From c00a62efd154336845a3c23d3d22ac51599e6693 Mon Sep 17 00:00:00 2001 From: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com> Date: Thu, 18 Apr 2024 02:42:36 -0700 Subject: [PATCH] [Automation] Generate Fluent Lite from datafactory#package-2018-06 (#39784) * [Automation] Generate Fluent Lite from datafactory#package-2018-06 * Update SsisEnvironmentTests.java * Update SsisPackageTests.java * Delete failure test case SsisEnvironmentTests * Delete failure test case SsisPackageTests --------- Co-authored-by: Hong Li(MSFT) <74638143+v-hongli1@users.noreply.github.com> --- .../CHANGELOG.md | 2207 ++++++++++++++++- .../README.md | 2 +- .../SAMPLE.md | 1164 ++++----- .../azure-resourcemanager-datafactory/pom.xml | 12 +- .../datafactory/DataFactoryManager.java | 40 +- .../fluent/CredentialOperationsClient.java | 20 +- ...ateEndpointConnectionOperationsClient.java | 3 +- .../ActivityRunsQueryResponseInner.java | 13 +- .../AmazonMwsLinkedServiceTypeProperties.java | 45 +- ...azonRdsForLinkedServiceTypeProperties.java | 16 +- ...rSqlServerLinkedServiceTypeProperties.java | 16 +- ...onRedshiftLinkedServiceTypeProperties.java | 32 +- ...CompatibleLinkedServiceTypeProperties.java | 33 +- .../models/AmazonS3DatasetTypeProperties.java | 5 +- .../AmazonS3LinkedServiceTypeProperties.java | 29 +- ...AppFiguresLinkedServiceTypeProperties.java | 15 +- .../AppendVariableActivityTypeProperties.java | 11 +- .../AsanaLinkedServiceTypeProperties.java | 16 +- .../models/AvroDatasetTypeProperties.java | 13 +- .../AzPowerShellSetupTypeProperties.java | 5 +- ...AzureBatchLinkedServiceTypeProperties.java | 31 +- .../AzureBlobDatasetTypeProperties.java | 8 +- ...zureBlobFSLinkedServiceTypeProperties.java | 61 +- ...lobStorageLinkedServiceTypeProperties.java | 48 +- ...ExplorerCommandActivityTypeProperties.java | 11 +- ...taExplorerLinkedServiceTypeProperties.java | 31 +- ...eAnalyticsLinkedServiceTypeProperties.java | 41 +- ...aLakeStoreLinkedServiceTypeProperties.java | 52 +- ...abricksDeltaLakeDatasetTypeProperties.java | 6 +- ...DetltaLakeLinkedServiceTypeProperties.java | 33 +- ...DatabricksLinkedServiceTypeProperties.java | 85 +- ...ileStorageLinkedServiceTypeProperties.java | 20 +- .../AzureFunctionActivityTypeProperties.java | 26 +- ...reFunctionLinkedServiceTypeProperties.java | 22 +- ...reKeyVaultLinkedServiceTypeProperties.java | 8 +- ...LBatchExecutionActivityTypeProperties.java | 36 +- ...ExecutePipelineActivityTypeProperties.java | 58 +- .../AzureMLLinkedServiceTypeProperties.java | 47 +- ...eMLServiceLinkedServiceTypeProperties.java | 46 +- ...LUpdateResourceActivityTypeProperties.java | 21 +- ...ureMariaDBLinkedServiceTypeProperties.java | 11 +- ...AzureMySqlLinkedServiceTypeProperties.java | 16 +- ...PostgreSqlLinkedServiceTypeProperties.java | 11 +- ...ePostgreSqlTableDatasetTypeProperties.java | 11 +- ...AzureSearchIndexDatasetTypeProperties.java | 5 +- ...zureSearchLinkedServiceTypeProperties.java | 16 +- ...AzureSqlDWLinkedServiceTypeProperties.java | 41 +- ...qlDatabaseLinkedServiceTypeProperties.java | 38 +- ...AzureSqlMILinkedServiceTypeProperties.java | 38 +- ...ureStorageLinkedServiceTypeProperties.java | 17 +- ...eArtifactsLinkedServiceTypeProperties.java | 16 +- .../AzureTableDatasetTypeProperties.java | 5 +- .../models/BinaryDatasetTypeProperties.java | 5 +- .../BlobEventsTriggerTypeProperties.java | 34 +- .../models/BlobTriggerTypeProperties.java | 10 +- .../CassandraLinkedServiceTypeProperties.java | 16 +- .../CassandraTableDatasetTypeProperties.java | 8 +- .../models/ChainingTriggerTypeProperties.java | 10 +- .../fluent/models/ChangeDataCapture.java | 18 +- .../ChangeDataCaptureResourceInner.java | 5 +- .../models/CmdkeySetupTypeProperties.java | 15 +- ...iceForAppsEntityDatasetTypeProperties.java | 6 +- ...iceForAppsLinkedServiceTypeProperties.java | 68 +- .../ConcurLinkedServiceTypeProperties.java | 35 +- .../models/CopyActivityTypeProperties.java | 19 +- .../CosmosDbLinkedServiceTypeProperties.java | 51 +- ...oDbApiCollectionDatasetTypeProperties.java | 5 +- ...MongoDbApiLinkedServiceTypeProperties.java | 19 +- ...SqlApiCollectionDatasetTypeProperties.java | 5 +- .../CouchbaseLinkedServiceTypeProperties.java | 11 +- .../fluent/models/CreateRunResponseInner.java | 4 +- ...nner.java => CredentialResourceInner.java} | 31 +- .../models/CustomActivityTypeProperties.java | 14 +- .../CustomEventsTriggerTypeProperties.java | 16 +- .../fluent/models/DataFlowResourceInner.java | 5 +- ...keAnalyticsUsqlActivityTypeProperties.java | 22 +- ...abricksNotebookActivityTypeProperties.java | 19 +- ...abricksSparkJarActivityTypeProperties.java | 8 +- ...icksSparkPythonActivityTypeProperties.java | 8 +- .../fluent/models/DatasetResourceInner.java | 5 +- .../DataworldLinkedServiceTypeProperties.java | 16 +- .../Db2LinkedServiceTypeProperties.java | 50 +- .../models/DeleteActivityTypeProperties.java | 11 +- .../DelimitedTextDatasetTypeProperties.java | 30 +- ...mentDbCollectionDatasetTypeProperties.java | 5 +- .../DrillLinkedServiceTypeProperties.java | 11 +- ...DynamicsAXLinkedServiceTypeProperties.java | 43 +- ...namicsAXResourceDatasetTypeProperties.java | 5 +- ...ynamicsCrmEntityDatasetTypeProperties.java | 6 +- ...ynamicsCrmLinkedServiceTypeProperties.java | 80 +- .../DynamicsEntityDatasetTypeProperties.java | 6 +- .../DynamicsLinkedServiceTypeProperties.java | 80 +- .../EloquaLinkedServiceTypeProperties.java | 32 +- ...nvironmentVariableSetupTypeProperties.java | 10 +- .../models/ExcelDatasetTypeProperties.java | 9 +- ...ExecuteDataFlowActivityTypeProperties.java | 33 +- ...ExecutePipelineActivityTypeProperties.java | 5 +- ...cuteSsisPackageActivityTypeProperties.java | 13 +- .../ExposureControlBatchResponseInner.java | 5 +- .../models/FailActivityTypeProperties.java | 16 +- ...FileServerLinkedServiceTypeProperties.java | 16 +- .../FileShareDatasetTypeProperties.java | 11 +- .../models/FilterActivityTypeProperties.java | 10 +- .../models/ForEachActivityTypeProperties.java | 10 +- .../FtpServerLinkedServiceTypeProperties.java | 47 +- .../GetMetadataActivityTypeProperties.java | 5 +- .../models/GlobalParameterResourceInner.java | 5 +- ...gleAdWordsLinkedServiceTypeProperties.java | 58 +- ...leBigQueryLinkedServiceTypeProperties.java | 83 +- ...BigQueryV2LinkedServiceTypeProperties.java | 27 +- ...oudStorageLinkedServiceTypeProperties.java | 30 +- ...ogleSheetsLinkedServiceTypeProperties.java | 16 +- .../GreenplumLinkedServiceTypeProperties.java | 11 +- .../HBaseLinkedServiceTypeProperties.java | 40 +- .../HDInsightHiveActivityTypeProperties.java | 11 +- .../HDInsightLinkedServiceTypeProperties.java | 19 +- ...nsightMapReduceActivityTypeProperties.java | 10 +- ...htOnDemandLinkedServiceTypeProperties.java | 118 +- .../HDInsightSparkActivityTypeProperties.java | 24 +- ...nsightStreamingActivityTypeProperties.java | 25 +- .../HdfsLinkedServiceTypeProperties.java | 22 +- .../HiveLinkedServiceTypeProperties.java | 62 +- .../models/HttpDatasetTypeProperties.java | 17 +- .../HttpLinkedServiceTypeProperties.java | 41 +- .../HubspotLinkedServiceTypeProperties.java | 27 +- .../IfConditionActivityTypeProperties.java | 30 +- .../ImpalaLinkedServiceTypeProperties.java | 43 +- .../InformixLinkedServiceTypeProperties.java | 30 +- .../IntegrationRuntimeResourceInner.java | 5 +- ...IntegrationRuntimeStatusResponseInner.java | 5 +- .../JiraLinkedServiceTypeProperties.java | 43 +- .../models/JsonDatasetTypeProperties.java | 10 +- .../LakeHouseLinkedServiceTypeProperties.java | 37 +- .../LakeHouseTableDatasetTypeProperties.java | 8 +- .../LicensedComponentSetupTypeProperties.java | 5 +- .../models/LinkedServiceResourceInner.java | 5 +- .../models/LookupActivityTypeProperties.java | 13 +- .../MagentoLinkedServiceTypeProperties.java | 27 +- ...nagedIntegrationRuntimeTypeProperties.java | 8 +- .../ManagedPrivateEndpointResourceInner.java | 5 +- .../ManagedVirtualNetworkResourceInner.java | 5 +- .../MariaDBLinkedServiceTypeProperties.java | 14 +- .../MarketoLinkedServiceTypeProperties.java | 32 +- ...softAccessLinkedServiceTypeProperties.java | 30 +- ...osoftAccessTableDatasetTypeProperties.java | 6 +- ...bAtlasCollectionDatasetTypeProperties.java | 5 +- ...ngoDbAtlasLinkedServiceTypeProperties.java | 19 +- ...ongoDbCollectionDatasetTypeProperties.java | 5 +- .../MongoDbLinkedServiceTypeProperties.java | 30 +- ...goDbV2CollectionDatasetTypeProperties.java | 5 +- .../MongoDbV2LinkedServiceTypeProperties.java | 13 +- .../MySqlLinkedServiceTypeProperties.java | 14 +- .../NetezzaLinkedServiceTypeProperties.java | 11 +- .../ODataLinkedServiceTypeProperties.java | 75 +- .../OdbcLinkedServiceTypeProperties.java | 30 +- .../Office365DatasetTypeProperties.java | 8 +- .../Office365LinkedServiceTypeProperties.java | 34 +- ...oudStorageLinkedServiceTypeProperties.java | 30 +- .../OracleLinkedServiceTypeProperties.java | 16 +- ...rviceCloudLinkedServiceTypeProperties.java | 43 +- .../models/OrcDatasetTypeProperties.java | 5 +- .../models/ParquetDatasetTypeProperties.java | 5 +- .../PaypalLinkedServiceTypeProperties.java | 32 +- .../PhoenixLinkedServiceTypeProperties.java | 46 +- .../fluent/models/PipelineResourceInner.java | 5 +- .../PipelineRunsQueryResponseInner.java | 13 +- ...PostgreSqlLinkedServiceTypeProperties.java | 16 +- ...stgreSqlV2LinkedServiceTypeProperties.java | 43 +- .../PrestoLinkedServiceTypeProperties.java | 56 +- .../PrivateLinkResourcesWrapperInner.java | 5 +- ...QuickBooksLinkedServiceTypeProperties.java | 14 +- .../QuickbaseLinkedServiceTypeProperties.java | 21 +- ...unTumblingWindowTriggerTypeProperties.java | 15 +- .../ResponsysLinkedServiceTypeProperties.java | 41 +- .../RestResourceDatasetTypeProperties.java | 17 +- ...estServiceLinkedServiceTypeProperties.java | 82 +- ...SalesforceLinkedServiceTypeProperties.java | 18 +- ...etingCloudLinkedServiceTypeProperties.java | 45 +- ...rviceCloudLinkedServiceTypeProperties.java | 18 +- ...iceCloudV2LinkedServiceTypeProperties.java | 23 +- ...iceCloudV2ObjectDatasetTypeProperties.java | 8 +- ...lesforceV2LinkedServiceTypeProperties.java | 23 +- .../SapBWLinkedServiceTypeProperties.java | 40 +- ...orCustomerLinkedServiceTypeProperties.java | 23 +- ...CustomerResourceDatasetTypeProperties.java | 5 +- .../SapEccLinkedServiceTypeProperties.java | 23 +- .../SapEccResourceDatasetTypeProperties.java | 11 +- .../SapHanaLinkedServiceProperties.java | 11 +- .../SapOdpLinkedServiceTypeProperties.java | 46 +- .../SapOdpResourceDatasetTypeProperties.java | 22 +- ...SapOpenHubLinkedServiceTypeProperties.java | 52 +- .../SapOpenHubTableDatasetTypeProperties.java | 14 +- .../SapTableLinkedServiceTypeProperties.java | 38 +- ...SapTableResourceDatasetTypeProperties.java | 5 +- .../models/ScheduleTriggerTypeProperties.java | 5 +- .../models/ScriptActivityTypeProperties.java | 3 +- ...SelfHostedIntegrationRuntimeNodeInner.java | 4 +- ...ntegrationRuntimeStatusTypeProperties.java | 6 +- ...ostedIntegrationRuntimeTypeProperties.java | 3 +- ...ServiceNowLinkedServiceTypeProperties.java | 32 +- ...rviceNowV2LinkedServiceTypeProperties.java | 21 +- ...SftpServerLinkedServiceTypeProperties.java | 41 +- ...OnlineListLinkedServiceTypeProperties.java | 51 +- .../ShopifyLinkedServiceTypeProperties.java | 27 +- ...SmartsheetLinkedServiceTypeProperties.java | 16 +- .../SnowflakeLinkedServiceTypeProperties.java | 16 +- ...nowflakeLinkedV2ServiceTypeProperties.java | 34 +- .../SparkLinkedServiceTypeProperties.java | 48 +- .../SqlServerLinkedServiceTypeProperties.java | 16 +- ...StoredProcedureActivityTypeProperties.java | 11 +- .../SquareLinkedServiceTypeProperties.java | 25 +- .../models/SsisLogLocationTypeProperties.java | 3 +- .../models/SwitchActivityTypeProperties.java | 14 +- .../SybaseLinkedServiceTypeProperties.java | 21 +- ...SynapseNotebookActivityTypeProperties.java | 16 +- ...SynapseSparkJobActivityTypeProperties.java | 82 +- .../TeamDeskLinkedServiceTypeProperties.java | 21 +- .../TeradataLinkedServiceTypeProperties.java | 11 +- .../models/TriggerQueryResponseInner.java | 13 +- .../fluent/models/TriggerResourceInner.java | 5 +- .../models/TriggerRunsQueryResponseInner.java | 13 +- .../TumblingWindowTriggerTypeProperties.java | 20 +- .../TwilioLinkedServiceTypeProperties.java | 10 +- .../models/UntilActivityTypeProperties.java | 22 +- .../ValidationActivityTypeProperties.java | 40 +- .../VerticaLinkedServiceTypeProperties.java | 11 +- .../models/WaitActivityTypeProperties.java | 5 +- .../WarehouseLinkedServiceTypeProperties.java | 47 +- .../models/WebActivityTypeProperties.java | 48 +- .../models/WebTableDatasetTypeProperties.java | 11 +- .../models/WebhookActivityTypeProperties.java | 30 +- .../XeroLinkedServiceTypeProperties.java | 28 +- .../models/XmlDatasetTypeProperties.java | 10 +- .../ZendeskLinkedServiceTypeProperties.java | 21 +- .../ZohoLinkedServiceTypeProperties.java | 25 +- .../fluent/models/package-info.java | 4 +- .../datafactory/fluent/package-info.java | 4 +- .../ChangeDataCaptureResourceImpl.java | 46 +- .../ChangeDataCapturesClientImpl.java | 4 +- .../ChangeDataCapturesImpl.java | 4 +- .../CredentialOperationsClientImpl.java | 83 +- .../CredentialOperationsImpl.java | 140 +- .../CredentialResourceImpl.java | 49 + .../DataFactoryManagementClientBuilder.java | 6 +- .../DataFactoryManagementClientImpl.java | 11 +- .../implementation/DataFlowResourceImpl.java | 40 +- .../implementation/DatasetResourceImpl.java | 40 +- .../ExposureControlBatchResponseImpl.java | 3 +- .../implementation/ExposureControlsImpl.java | 10 +- .../implementation/FactoryImpl.java | 42 +- .../GlobalParameterResourceImpl.java | 34 +- .../IntegrationRuntimeNodesImpl.java | 17 +- ...IntegrationRuntimeObjectMetadatasImpl.java | 4 +- .../IntegrationRuntimeResourceImpl.java | 104 +- .../IntegrationRuntimesImpl.java | 47 +- .../LinkedServiceResourceImpl.java | 41 +- .../implementation/LinkedServicesImpl.java | 4 +- ...ManagedIdentityCredentialResourceImpl.java | 148 -- .../ManagedPrivateEndpointResourceImpl.java | 34 +- .../ManagedPrivateEndpointsImpl.java | 24 +- .../ManagedVirtualNetworkResourceImpl.java | 38 +- .../ManagedVirtualNetworksImpl.java | 4 +- .../implementation/OperationsClientImpl.java | 4 +- .../implementation/PipelineResourceImpl.java | 45 +- .../implementation/PipelinesImpl.java | 6 +- .../PrivateEndPointConnectionsImpl.java | 2 +- ...ndpointConnectionOperationsClientImpl.java | 3 +- ...ivateEndpointConnectionOperationsImpl.java | 14 +- ...PrivateEndpointConnectionResourceImpl.java | 39 +- .../implementation/ResourceManagerUtils.java | 2 - .../TriggerQueryResponseImpl.java | 3 +- .../implementation/TriggerResourceImpl.java | 44 +- .../implementation/TriggersImpl.java | 6 +- .../implementation/package-info.java | 4 +- .../datafactory/models/Activity.java | 31 +- .../models/ActivityDependency.java | 9 +- .../datafactory/models/ActivityPolicy.java | 12 +- .../models/AmazonMwsLinkedService.java | 41 +- .../models/AmazonMwsObjectDataset.java | 20 +- .../datafactory/models/AmazonMwsSource.java | 20 +- .../AmazonRdsForOracleLinkedService.java | 37 +- .../AmazonRdsForOraclePartitionSettings.java | 11 +- .../models/AmazonRdsForOracleSource.java | 33 +- .../AmazonRdsForOracleTableDataset.java | 24 +- .../AmazonRdsForSqlServerLinkedService.java | 37 +- .../models/AmazonRdsForSqlServerSource.java | 34 +- .../AmazonRdsForSqlServerTableDataset.java | 24 +- .../models/AmazonRedshiftLinkedService.java | 45 +- .../models/AmazonRedshiftSource.java | 24 +- .../models/AmazonRedshiftTableDataset.java | 24 +- .../AmazonS3CompatibleLinkedService.java | 49 +- .../models/AmazonS3CompatibleLocation.java | 24 +- .../AmazonS3CompatibleReadSettings.java | 60 +- .../datafactory/models/AmazonS3Dataset.java | 25 +- .../models/AmazonS3LinkedService.java | 41 +- .../datafactory/models/AmazonS3Location.java | 20 +- .../models/AmazonS3ReadSettings.java | 56 +- .../models/AppFiguresLinkedService.java | 29 +- .../models/AppendVariableActivity.java | 33 +- .../models/AsanaLinkedService.java | 33 +- .../datafactory/models/AvroDataset.java | 28 +- .../datafactory/models/AvroFormat.java | 21 +- .../datafactory/models/AvroSink.java | 20 +- .../datafactory/models/AvroSource.java | 23 +- .../datafactory/models/AvroWriteSettings.java | 26 +- .../datafactory/models/AzPowerShellSetup.java | 25 +- .../models/AzureBatchLinkedService.java | 37 +- .../datafactory/models/AzureBlobDataset.java | 28 +- .../models/AzureBlobFSDataset.java | 20 +- .../models/AzureBlobFSLinkedService.java | 69 +- .../models/AzureBlobFSLocation.java | 20 +- .../models/AzureBlobFSReadSettings.java | 68 +- .../datafactory/models/AzureBlobFSSink.java | 23 +- .../datafactory/models/AzureBlobFSSource.java | 39 +- .../models/AzureBlobFSWriteSettings.java | 24 +- .../models/AzureBlobStorageLinkedService.java | 49 +- .../models/AzureBlobStorageLocation.java | 24 +- .../models/AzureBlobStorageReadSettings.java | 60 +- .../models/AzureBlobStorageWriteSettings.java | 24 +- .../AzureDataExplorerCommandActivity.java | 29 +- .../AzureDataExplorerLinkedService.java | 41 +- .../models/AzureDataExplorerSink.java | 20 +- .../models/AzureDataExplorerSource.java | 48 +- .../models/AzureDataExplorerTableDataset.java | 29 +- .../AzureDataLakeAnalyticsLinkedService.java | 45 +- .../models/AzureDataLakeStoreDataset.java | 24 +- .../AzureDataLakeStoreLinkedService.java | 57 +- .../models/AzureDataLakeStoreLocation.java | 25 +- .../AzureDataLakeStoreReadSettings.java | 80 +- .../models/AzureDataLakeStoreSink.java | 20 +- .../models/AzureDataLakeStoreSource.java | 35 +- .../AzureDataLakeStoreWriteSettings.java | 27 +- .../AzureDatabricksDeltaLakeDataset.java | 30 +- ...AzureDatabricksDeltaLakeExportCommand.java | 30 +- ...AzureDatabricksDeltaLakeImportCommand.java | 38 +- ...AzureDatabricksDeltaLakeLinkedService.java | 45 +- .../models/AzureDatabricksDeltaLakeSink.java | 24 +- .../AzureDatabricksDeltaLakeSource.java | 24 +- .../models/AzureDatabricksLinkedService.java | 61 +- .../models/AzureFileStorageLinkedService.java | 37 +- .../models/AzureFileStorageLocation.java | 25 +- .../models/AzureFileStorageReadSettings.java | 63 +- .../models/AzureFileStorageWriteSettings.java | 25 +- .../models/AzureFunctionActivity.java | 29 +- .../models/AzureFunctionLinkedService.java | 37 +- .../models/AzureKeyVaultLinkedService.java | 29 +- .../models/AzureKeyVaultSecretReference.java | 37 +- .../models/AzureMLBatchExecutionActivity.java | 53 +- .../AzureMLExecutePipelineActivity.java | 69 +- .../models/AzureMLLinkedService.java | 41 +- .../models/AzureMLServiceLinkedService.java | 45 +- .../models/AzureMLUpdateResourceActivity.java | 29 +- .../models/AzureMLWebServiceFile.java | 12 +- .../models/AzureMariaDBLinkedService.java | 37 +- .../models/AzureMariaDBSource.java | 20 +- .../models/AzureMariaDBTableDataset.java | 24 +- .../models/AzureMySqlLinkedService.java | 37 +- .../datafactory/models/AzureMySqlSink.java | 20 +- .../datafactory/models/AzureMySqlSource.java | 20 +- .../models/AzureMySqlTableDataset.java | 25 +- .../models/AzurePostgreSqlLinkedService.java | 37 +- .../models/AzurePostgreSqlSink.java | 20 +- .../models/AzurePostgreSqlSource.java | 20 +- .../models/AzurePostgreSqlTableDataset.java | 32 +- .../datafactory/models/AzureQueueSink.java | 21 +- .../models/AzureSearchIndexDataset.java | 29 +- .../models/AzureSearchIndexSink.java | 20 +- .../models/AzureSearchLinkedService.java | 37 +- .../models/AzureSqlDWLinkedService.java | 49 +- .../models/AzureSqlDWTableDataset.java | 20 +- .../models/AzureSqlDatabaseLinkedService.java | 49 +- .../models/AzureSqlMILinkedService.java | 49 +- .../models/AzureSqlMITableDataset.java | 20 +- .../datafactory/models/AzureSqlSink.java | 34 +- .../datafactory/models/AzureSqlSource.java | 30 +- .../models/AzureSqlTableDataset.java | 20 +- .../models/AzureStorageLinkedService.java | 37 +- .../AzureSynapseArtifactsLinkedService.java | 29 +- .../datafactory/models/AzureTableDataset.java | 25 +- .../datafactory/models/AzureTableSink.java | 20 +- .../datafactory/models/AzureTableSource.java | 20 +- .../AzureTableStorageLinkedService.java | 37 +- .../BigDataPoolParametrizationReference.java | 10 +- .../datafactory/models/BinaryDataset.java | 20 +- .../models/BinaryReadSettings.java | 20 +- .../datafactory/models/BinarySink.java | 20 +- .../datafactory/models/BinarySource.java | 20 +- .../datafactory/models/BlobEventsTrigger.java | 41 +- .../datafactory/models/BlobSink.java | 23 +- .../datafactory/models/BlobSource.java | 39 +- .../datafactory/models/BlobTrigger.java | 25 +- .../models/CassandraLinkedService.java | 33 +- .../datafactory/models/CassandraSource.java | 28 +- .../models/CassandraTableDataset.java | 28 +- .../datafactory/models/ChainingTrigger.java | 35 +- .../models/ChangeDataCaptureListResponse.java | 5 +- .../datafactory/models/CmdkeySetup.java | 25 +- ...CommonDataServiceForAppsEntityDataset.java | 30 +- ...CommonDataServiceForAppsLinkedService.java | 49 +- .../models/CommonDataServiceForAppsSink.java | 35 +- .../CommonDataServiceForAppsSource.java | 30 +- .../datafactory/models/ComponentSetup.java | 25 +- .../models/CompressionReadSettings.java | 23 +- .../models/ConcurLinkedService.java | 41 +- .../models/ConcurObjectDataset.java | 20 +- .../datafactory/models/ConcurSource.java | 20 +- .../datafactory/models/ControlActivity.java | 25 +- .../datafactory/models/CopyActivity.java | 25 +- .../datafactory/models/CopySink.java | 44 +- .../datafactory/models/CopySource.java | 33 +- .../datafactory/models/CopyTranslator.java | 25 +- .../models/CosmosDbLinkedService.java | 55 +- .../CosmosDbMongoDbApiCollectionDataset.java | 29 +- .../CosmosDbMongoDbApiLinkedService.java | 29 +- .../models/CosmosDbMongoDbApiSink.java | 24 +- .../models/CosmosDbMongoDbApiSource.java | 37 +- .../CosmosDbSqlApiCollectionDataset.java | 29 +- .../models/CosmosDbSqlApiSink.java | 23 +- .../models/CosmosDbSqlApiSource.java | 39 +- .../models/CouchbaseLinkedService.java | 33 +- .../datafactory/models/CouchbaseSource.java | 20 +- .../models/CouchbaseTableDataset.java | 20 +- .../CreateDataFlowDebugSessionRequest.java | 3 +- .../datafactory/models/Credential.java | 27 +- .../models/CredentialListResponse.java | 12 +- .../models/CredentialOperations.java | 92 +- .../models/CredentialReference.java | 9 +- .../models/CredentialResource.java | 54 + .../datafactory/models/CustomActivity.java | 25 +- .../models/CustomDataSourceLinkedService.java | 29 +- .../datafactory/models/CustomDataset.java | 20 +- .../models/CustomEventsTrigger.java | 25 +- .../datafactory/models/CustomSetupBase.java | 25 +- .../models/DWCopyCommandSettings.java | 32 +- .../datafactory/models/DataFlow.java | 24 +- .../models/DataFlowDebugCommandPayload.java | 5 +- .../models/DataFlowDebugResource.java | 5 +- .../models/DataFlowListResponse.java | 4 +- .../datafactory/models/DataFlowReference.java | 9 +- .../models/DataLakeAnalyticsUsqlActivity.java | 29 +- .../models/DatabricksNotebookActivity.java | 37 +- .../models/DatabricksSparkJarActivity.java | 29 +- .../models/DatabricksSparkPythonActivity.java | 29 +- .../datafactory/models/Dataset.java | 37 +- .../models/DatasetCompression.java | 4 +- .../models/DatasetDebugResource.java | 5 +- .../models/DatasetListResponse.java | 4 +- .../datafactory/models/DatasetLocation.java | 30 +- .../datafactory/models/DatasetReference.java | 4 +- .../models/DatasetStorageFormat.java | 24 +- .../models/DataworldLinkedService.java | 33 +- .../datafactory/models/Db2LinkedService.java | 53 +- .../datafactory/models/Db2Source.java | 20 +- .../datafactory/models/Db2TableDataset.java | 20 +- .../datafactory/models/DeleteActivity.java | 25 +- .../models/DelimitedTextDataset.java | 36 +- .../models/DelimitedTextReadSettings.java | 27 +- .../datafactory/models/DelimitedTextSink.java | 20 +- .../models/DelimitedTextSource.java | 23 +- .../models/DelimitedTextWriteSettings.java | 38 +- .../models/DependencyReference.java | 25 +- .../datafactory/models/DistcpSettings.java | 21 +- .../models/DocumentDbCollectionDataset.java | 29 +- .../models/DocumentDbCollectionSink.java | 27 +- .../models/DocumentDbCollectionSource.java | 30 +- .../models/DrillLinkedService.java | 33 +- .../datafactory/models/DrillSource.java | 20 +- .../datafactory/models/DrillTableDataset.java | 20 +- .../models/DynamicsAXLinkedService.java | 37 +- .../models/DynamicsAXResourceDataset.java | 29 +- .../datafactory/models/DynamicsAXSource.java | 24 +- .../models/DynamicsCrmEntityDataset.java | 30 +- .../models/DynamicsCrmLinkedService.java | 65 +- .../datafactory/models/DynamicsCrmSink.java | 30 +- .../datafactory/models/DynamicsCrmSource.java | 26 +- .../models/DynamicsEntityDataset.java | 26 +- .../models/DynamicsLinkedService.java | 61 +- .../datafactory/models/DynamicsSink.java | 30 +- .../datafactory/models/DynamicsSource.java | 34 +- .../models/EloquaLinkedService.java | 41 +- .../models/EloquaObjectDataset.java | 20 +- .../datafactory/models/EloquaSource.java | 20 +- .../models/EncryptionConfiguration.java | 21 +- .../models/EnvironmentVariableSetup.java | 29 +- .../datafactory/models/ExcelDataset.java | 20 +- .../datafactory/models/ExcelSource.java | 23 +- .../models/ExecuteDataFlowActivity.java | 45 +- ...DataFlowActivityTypePropertiesCompute.java | 6 +- .../models/ExecutePipelineActivity.java | 29 +- .../models/ExecuteSsisPackageActivity.java | 29 +- .../ExecuteWranglingDataflowActivity.java | 45 +- .../datafactory/models/ExecutionActivity.java | 24 +- .../datafactory/models/ExportSettings.java | 25 +- .../models/ExposureControlBatchRequest.java | 5 +- .../datafactory/models/Expression.java | 4 +- .../models/FactoryGitHubConfiguration.java | 24 +- .../datafactory/models/FactoryIdentity.java | 4 +- .../models/FactoryListResponse.java | 4 +- .../models/FactoryRepoConfiguration.java | 42 +- .../models/FactoryVstsConfiguration.java | 29 +- .../datafactory/models/FailActivity.java | 29 +- .../models/FileServerLinkedService.java | 37 +- .../models/FileServerLocation.java | 21 +- .../models/FileServerReadSettings.java | 59 +- .../models/FileServerWriteSettings.java | 25 +- .../datafactory/models/FileShareDataset.java | 28 +- .../datafactory/models/FileSystemSink.java | 20 +- .../datafactory/models/FileSystemSource.java | 34 +- .../datafactory/models/FilterActivity.java | 25 +- .../datafactory/models/Flowlet.java | 20 +- .../datafactory/models/ForEachActivity.java | 25 +- .../models/FormatReadSettings.java | 25 +- .../models/FormatWriteSettings.java | 25 +- .../datafactory/models/FtpReadSettings.java | 76 +- .../models/FtpServerLinkedService.java | 58 +- .../datafactory/models/FtpServerLocation.java | 21 +- .../models/GetMetadataActivity.java | 25 +- .../models/GitHubAccessTokenRequest.java | 10 +- .../models/GlobalParameterListResponse.java | 5 +- .../models/GlobalParameterSpecification.java | 10 +- .../models/GoogleAdWordsLinkedService.java | 45 +- .../models/GoogleAdWordsObjectDataset.java | 24 +- .../models/GoogleAdWordsSource.java | 20 +- .../models/GoogleBigQueryLinkedService.java | 73 +- .../models/GoogleBigQueryObjectDataset.java | 24 +- .../models/GoogleBigQuerySource.java | 20 +- .../models/GoogleBigQueryV2LinkedService.java | 37 +- .../models/GoogleBigQueryV2ObjectDataset.java | 24 +- .../models/GoogleBigQueryV2Source.java | 20 +- .../GoogleCloudStorageLinkedService.java | 49 +- .../models/GoogleCloudStorageLocation.java | 24 +- .../GoogleCloudStorageReadSettings.java | 60 +- .../models/GoogleSheetsLinkedService.java | 37 +- .../models/GreenplumLinkedService.java | 33 +- .../datafactory/models/GreenplumSource.java | 20 +- .../models/GreenplumTableDataset.java | 20 +- .../models/HBaseLinkedService.java | 45 +- .../models/HBaseObjectDataset.java | 20 +- .../datafactory/models/HBaseSource.java | 20 +- .../models/HDInsightHiveActivity.java | 33 +- .../models/HDInsightLinkedService.java | 33 +- .../models/HDInsightMapReduceActivity.java | 29 +- .../HDInsightOnDemandLinkedService.java | 83 +- .../models/HDInsightPigActivity.java | 25 +- .../models/HDInsightSparkActivity.java | 33 +- .../models/HDInsightStreamingActivity.java | 29 +- .../datafactory/models/HdfsLinkedService.java | 33 +- .../datafactory/models/HdfsLocation.java | 21 +- .../datafactory/models/HdfsReadSettings.java | 56 +- .../datafactory/models/HdfsSource.java | 31 +- .../datafactory/models/HiveLinkedService.java | 61 +- .../datafactory/models/HiveObjectDataset.java | 20 +- .../datafactory/models/HiveSource.java | 20 +- .../datafactory/models/HttpDataset.java | 34 +- .../datafactory/models/HttpLinkedService.java | 44 +- .../datafactory/models/HttpReadSettings.java | 43 +- .../models/HttpServerLocation.java | 28 +- .../datafactory/models/HttpSource.java | 24 +- .../models/HubspotLinkedService.java | 41 +- .../models/HubspotObjectDataset.java | 20 +- .../datafactory/models/HubspotSource.java | 20 +- .../models/IfConditionActivity.java | 41 +- .../models/ImpalaLinkedService.java | 45 +- .../models/ImpalaObjectDataset.java | 20 +- .../datafactory/models/ImpalaSource.java | 20 +- .../datafactory/models/ImportSettings.java | 25 +- .../models/InformixLinkedService.java | 41 +- .../datafactory/models/InformixSink.java | 20 +- .../datafactory/models/InformixSource.java | 20 +- .../models/InformixTableDataset.java | 20 +- .../models/IntegrationRuntime.java | 24 +- .../IntegrationRuntimeComputeProperties.java | 11 +- .../IntegrationRuntimeDataFlowProperties.java | 6 +- .../IntegrationRuntimeDebugResource.java | 5 +- .../IntegrationRuntimeListResponse.java | 5 +- .../models/IntegrationRuntimeReference.java | 5 +- .../IntegrationRuntimeSsisCatalogInfo.java | 3 +- .../models/IntegrationRuntimeStatus.java | 22 +- .../datafactory/models/JiraLinkedService.java | 49 +- .../datafactory/models/JiraObjectDataset.java | 20 +- .../datafactory/models/JiraSource.java | 20 +- .../datafactory/models/JsonDataset.java | 20 +- .../datafactory/models/JsonFormat.java | 39 +- .../datafactory/models/JsonReadSettings.java | 20 +- .../datafactory/models/JsonSink.java | 20 +- .../datafactory/models/JsonSource.java | 23 +- .../datafactory/models/JsonWriteSettings.java | 23 +- .../models/LakeHouseLinkedService.java | 45 +- .../datafactory/models/LakeHouseLocation.java | 21 +- .../models/LakeHouseReadSettings.java | 64 +- .../models/LakeHouseTableDataset.java | 28 +- .../models/LakeHouseTableSink.java | 26 +- .../models/LakeHouseTableSource.java | 31 +- .../models/LakeHouseWriteSettings.java | 21 +- ...kedIntegrationRuntimeKeyAuthorization.java | 29 +- ...edIntegrationRuntimeRbacAuthorization.java | 29 +- .../LinkedIntegrationRuntimeRequest.java | 5 +- .../models/LinkedIntegrationRuntimeType.java | 23 +- .../datafactory/models/LinkedService.java | 27 +- .../models/LinkedServiceDebugResource.java | 5 +- .../models/LinkedServiceListResponse.java | 5 +- .../models/LinkedServiceReference.java | 5 +- .../models/LogLocationSettings.java | 8 +- .../datafactory/models/LogSettings.java | 5 +- .../models/LogStorageSettings.java | 8 +- .../datafactory/models/LookupActivity.java | 25 +- .../models/MagentoLinkedService.java | 41 +- .../models/MagentoObjectDataset.java | 20 +- .../datafactory/models/MagentoSource.java | 20 +- .../models/ManagedIdentityCredential.java | 24 +- .../ManagedIdentityCredentialResource.java | 226 +- .../models/ManagedIntegrationRuntime.java | 37 +- .../ManagedIntegrationRuntimeStatus.java | 29 +- .../models/ManagedPrivateEndpoint.java | 8 +- .../ManagedPrivateEndpointListResponse.java | 5 +- .../ManagedVirtualNetworkListResponse.java | 5 +- .../ManagedVirtualNetworkReference.java | 10 +- .../datafactory/models/MapperConnection.java | 7 +- .../datafactory/models/MappingDataFlow.java | 20 +- .../models/MariaDBLinkedService.java | 33 +- .../datafactory/models/MariaDBSource.java | 20 +- .../models/MariaDBTableDataset.java | 20 +- .../models/MarketoLinkedService.java | 41 +- .../models/MarketoObjectDataset.java | 20 +- .../datafactory/models/MarketoSource.java | 20 +- .../models/MicrosoftAccessLinkedService.java | 45 +- .../models/MicrosoftAccessSink.java | 20 +- .../models/MicrosoftAccessSource.java | 23 +- .../models/MicrosoftAccessTableDataset.java | 30 +- .../models/MongoDbAtlasCollectionDataset.java | 29 +- .../models/MongoDbAtlasLinkedService.java | 29 +- .../datafactory/models/MongoDbAtlasSink.java | 24 +- .../models/MongoDbAtlasSource.java | 41 +- .../models/MongoDbCollectionDataset.java | 29 +- .../MongoDbCursorMethodsProperties.java | 20 +- .../models/MongoDbLinkedService.java | 33 +- .../datafactory/models/MongoDbSource.java | 23 +- .../models/MongoDbV2CollectionDataset.java | 29 +- .../models/MongoDbV2LinkedService.java | 25 +- .../datafactory/models/MongoDbV2Sink.java | 24 +- .../datafactory/models/MongoDbV2Source.java | 33 +- .../models/MultiplePipelineTrigger.java | 22 +- .../models/MySqlLinkedService.java | 33 +- .../datafactory/models/MySqlSource.java | 20 +- .../datafactory/models/MySqlTableDataset.java | 20 +- .../models/NetezzaLinkedService.java | 33 +- .../models/NetezzaPartitionSettings.java | 9 +- .../datafactory/models/NetezzaSource.java | 23 +- .../models/NetezzaTableDataset.java | 20 +- .../models/ODataLinkedService.java | 73 +- .../models/ODataResourceDataset.java | 20 +- .../datafactory/models/ODataSource.java | 27 +- .../datafactory/models/OdbcLinkedService.java | 41 +- .../datafactory/models/OdbcSink.java | 20 +- .../datafactory/models/OdbcSource.java | 20 +- .../datafactory/models/OdbcTableDataset.java | 20 +- .../datafactory/models/Office365Dataset.java | 25 +- .../models/Office365LinkedService.java | 33 +- .../datafactory/models/Office365Source.java | 26 +- .../OracleCloudStorageLinkedService.java | 49 +- .../models/OracleCloudStorageLocation.java | 24 +- .../OracleCloudStorageReadSettings.java | 60 +- .../models/OracleLinkedService.java | 33 +- .../models/OraclePartitionSettings.java | 11 +- .../OracleServiceCloudLinkedService.java | 45 +- .../OracleServiceCloudObjectDataset.java | 24 +- .../models/OracleServiceCloudSource.java | 24 +- .../datafactory/models/OracleSink.java | 20 +- .../datafactory/models/OracleSource.java | 37 +- .../models/OracleTableDataset.java | 20 +- .../datafactory/models/OrcDataset.java | 20 +- .../datafactory/models/OrcFormat.java | 21 +- .../datafactory/models/OrcSink.java | 20 +- .../datafactory/models/OrcSource.java | 23 +- .../datafactory/models/OrcWriteSettings.java | 26 +- .../datafactory/models/PackageStore.java | 9 +- .../models/ParameterSpecification.java | 4 +- .../datafactory/models/ParquetDataset.java | 20 +- .../datafactory/models/ParquetFormat.java | 21 +- .../models/ParquetReadSettings.java | 20 +- .../datafactory/models/ParquetSink.java | 20 +- .../datafactory/models/ParquetSource.java | 23 +- .../models/ParquetWriteSettings.java | 26 +- .../models/PaypalLinkedService.java | 41 +- .../models/PaypalObjectDataset.java | 20 +- .../datafactory/models/PaypalSource.java | 20 +- .../models/PhoenixLinkedService.java | 45 +- .../models/PhoenixObjectDataset.java | 20 +- .../datafactory/models/PhoenixSource.java | 20 +- ...ipelineExternalComputeScaleProperties.java | 16 +- .../models/PipelineListResponse.java | 4 +- .../datafactory/models/PipelineReference.java | 5 +- .../datafactory/models/PolybaseSettings.java | 17 +- .../models/PostgreSqlLinkedService.java | 37 +- .../datafactory/models/PostgreSqlSource.java | 20 +- .../models/PostgreSqlTableDataset.java | 20 +- .../models/PostgreSqlV2LinkedService.java | 37 +- .../models/PostgreSqlV2Source.java | 20 +- .../models/PostgreSqlV2TableDataset.java | 24 +- .../models/PrestoLinkedService.java | 45 +- .../models/PrestoObjectDataset.java | 20 +- .../datafactory/models/PrestoSource.java | 20 +- ...PrivateEndpointConnectionListResponse.java | 5 +- .../models/QuickBooksLinkedService.java | 37 +- .../models/QuickBooksObjectDataset.java | 24 +- .../datafactory/models/QuickBooksSource.java | 20 +- .../models/QuickbaseLinkedService.java | 33 +- .../RedirectIncompatibleRowSettings.java | 17 +- .../models/RedshiftUnloadSettings.java | 17 +- .../datafactory/models/RelationalSource.java | 23 +- .../models/RelationalTableDataset.java | 20 +- .../models/RerunTumblingWindowTrigger.java | 29 +- .../models/ResponsysLinkedService.java | 41 +- .../models/ResponsysObjectDataset.java | 20 +- .../datafactory/models/ResponsysSource.java | 20 +- .../models/RestResourceDataset.java | 28 +- .../models/RestServiceLinkedService.java | 76 +- .../datafactory/models/RestSink.java | 35 +- .../datafactory/models/RestSource.java | 54 +- .../datafactory/models/RetryPolicy.java | 3 +- .../models/RunFilterParameters.java | 10 +- .../datafactory/models/RunQueryFilter.java | 16 +- .../datafactory/models/RunQueryOrderBy.java | 24 +- .../models/SalesforceLinkedService.java | 37 +- ...SalesforceMarketingCloudLinkedService.java | 53 +- ...SalesforceMarketingCloudObjectDataset.java | 24 +- .../SalesforceMarketingCloudSource.java | 24 +- .../models/SalesforceObjectDataset.java | 24 +- .../SalesforceServiceCloudLinkedService.java | 37 +- .../SalesforceServiceCloudObjectDataset.java | 24 +- .../models/SalesforceServiceCloudSink.java | 33 +- .../models/SalesforceServiceCloudSource.java | 30 +- ...SalesforceServiceCloudV2LinkedService.java | 37 +- ...SalesforceServiceCloudV2ObjectDataset.java | 32 +- .../models/SalesforceServiceCloudV2Sink.java | 33 +- .../SalesforceServiceCloudV2Source.java | 30 +- .../datafactory/models/SalesforceSink.java | 29 +- .../datafactory/models/SalesforceSource.java | 23 +- .../models/SalesforceV2LinkedService.java | 37 +- .../models/SalesforceV2ObjectDataset.java | 24 +- .../datafactory/models/SalesforceV2Sink.java | 29 +- .../models/SalesforceV2Source.java | 23 +- .../models/SapBWLinkedService.java | 41 +- .../datafactory/models/SapBwCubeDataset.java | 21 +- .../datafactory/models/SapBwSource.java | 20 +- .../SapCloudForCustomerLinkedService.java | 41 +- .../SapCloudForCustomerResourceDataset.java | 29 +- .../models/SapCloudForCustomerSink.java | 28 +- .../models/SapCloudForCustomerSource.java | 28 +- .../models/SapEccLinkedService.java | 37 +- .../models/SapEccResourceDataset.java | 31 +- .../datafactory/models/SapEccSource.java | 24 +- .../models/SapHanaLinkedService.java | 33 +- .../models/SapHanaPartitionSettings.java | 11 +- .../datafactory/models/SapHanaSource.java | 23 +- .../models/SapHanaTableDataset.java | 20 +- .../models/SapOdpLinkedService.java | 41 +- .../models/SapOdpResourceDataset.java | 37 +- .../datafactory/models/SapOdpSource.java | 29 +- .../models/SapOpenHubLinkedService.java | 63 +- .../datafactory/models/SapOpenHubSource.java | 40 +- .../models/SapOpenHubTableDataset.java | 25 +- .../models/SapTableLinkedService.java | 33 +- .../models/SapTablePartitionSettings.java | 28 +- .../models/SapTableResourceDataset.java | 29 +- .../datafactory/models/SapTableSource.java | 55 +- .../datafactory/models/ScheduleTrigger.java | 25 +- .../datafactory/models/ScriptAction.java | 12 +- .../datafactory/models/ScriptActivity.java | 25 +- .../models/ScriptActivityScriptBlock.java | 22 +- ...riptActivityTypePropertiesLogSettings.java | 5 +- .../datafactory/models/ScriptType.java | 53 - .../datafactory/models/SecretBase.java | 25 +- .../datafactory/models/SecureString.java | 24 +- ...endencyTumblingWindowTriggerReference.java | 32 +- .../models/SelfHostedIntegrationRuntime.java | 34 +- .../SelfHostedIntegrationRuntimeStatus.java | 32 +- .../models/ServiceNowLinkedService.java | 45 +- .../models/ServiceNowObjectDataset.java | 24 +- .../datafactory/models/ServiceNowSource.java | 20 +- .../models/ServiceNowV2LinkedService.java | 37 +- .../models/ServiceNowV2ObjectDataset.java | 24 +- .../models/ServiceNowV2Source.java | 20 +- .../models/ServicePrincipalCredential.java | 29 +- .../ServicePrincipalCredentialResource.java | 76 + .../models/SetVariableActivity.java | 25 +- .../datafactory/models/SftpLocation.java | 21 +- .../datafactory/models/SftpReadSettings.java | 59 +- .../models/SftpServerLinkedService.java | 45 +- .../datafactory/models/SftpWriteSettings.java | 26 +- .../SharePointOnlineListLinkedService.java | 45 +- .../SharePointOnlineListResourceDataset.java | 24 +- .../models/SharePointOnlineListSource.java | 30 +- .../models/ShopifyLinkedService.java | 41 +- .../models/ShopifyObjectDataset.java | 20 +- .../datafactory/models/ShopifySource.java | 20 +- .../datafactory/models/SkipErrorFile.java | 22 +- .../models/SmartsheetLinkedService.java | 37 +- .../datafactory/models/SnowflakeDataset.java | 25 +- .../models/SnowflakeExportCopyCommand.java | 32 +- .../models/SnowflakeImportCopyCommand.java | 32 +- .../models/SnowflakeLinkedService.java | 33 +- .../datafactory/models/SnowflakeSink.java | 20 +- .../datafactory/models/SnowflakeSource.java | 24 +- .../models/SnowflakeV2Dataset.java | 25 +- .../models/SnowflakeV2LinkedService.java | 45 +- .../datafactory/models/SnowflakeV2Sink.java | 20 +- .../datafactory/models/SnowflakeV2Source.java | 25 +- ...ConfigurationParametrizationReference.java | 10 +- .../models/SparkLinkedService.java | 45 +- .../models/SparkObjectDataset.java | 20 +- .../datafactory/models/SparkSource.java | 20 +- .../models/SqlAlwaysEncryptedProperties.java | 8 +- .../datafactory/models/SqlDWSink.java | 32 +- .../datafactory/models/SqlDWSource.java | 41 +- .../models/SqlDWUpsertSettings.java | 11 +- .../datafactory/models/SqlMISink.java | 31 +- .../datafactory/models/SqlMISource.java | 30 +- .../models/SqlPartitionSettings.java | 12 +- .../models/SqlServerLinkedService.java | 33 +- .../datafactory/models/SqlServerSink.java | 31 +- .../datafactory/models/SqlServerSource.java | 30 +- .../SqlServerStoredProcedureActivity.java | 35 +- .../models/SqlServerTableDataset.java | 20 +- .../datafactory/models/SqlSink.java | 31 +- .../datafactory/models/SqlSource.java | 30 +- .../datafactory/models/SqlUpsertSettings.java | 14 +- .../models/SquareLinkedService.java | 41 +- .../models/SquareObjectDataset.java | 20 +- .../datafactory/models/SquareSource.java | 20 +- .../models/SsisAccessCredential.java | 12 +- .../datafactory/models/SsisChildPackage.java | 9 +- .../datafactory/models/SsisEnvironment.java | 20 +- .../models/SsisExecutionCredential.java | 14 +- .../models/SsisExecutionParameter.java | 4 +- .../datafactory/models/SsisFolder.java | 21 +- .../datafactory/models/SsisLogLocation.java | 13 +- .../models/SsisObjectMetadata.java | 24 +- .../datafactory/models/SsisPackage.java | 20 +- .../datafactory/models/SsisProject.java | 20 +- .../models/SsisPropertyOverride.java | 4 +- .../datafactory/models/StagingSettings.java | 8 +- .../datafactory/models/StoreReadSettings.java | 30 +- .../models/StoreWriteSettings.java | 33 +- .../datafactory/models/SwitchActivity.java | 25 +- .../models/SybaseLinkedService.java | 33 +- .../datafactory/models/SybaseSource.java | 20 +- .../models/SybaseTableDataset.java | 20 +- .../models/SynapseNotebookActivity.java | 29 +- .../models/SynapseNotebookReference.java | 9 +- .../SynapseSparkJobDefinitionActivity.java | 73 +- .../models/SynapseSparkJobReference.java | 9 +- .../datafactory/models/TabularSource.java | 30 +- .../datafactory/models/TabularTranslator.java | 54 +- .../models/TarGZipReadSettings.java | 20 +- .../datafactory/models/TarReadSettings.java | 20 +- .../models/TeamDeskLinkedService.java | 33 +- .../models/TeradataLinkedService.java | 33 +- .../models/TeradataPartitionSettings.java | 9 +- .../datafactory/models/TeradataSource.java | 23 +- .../models/TeradataTableDataset.java | 20 +- .../datafactory/models/TextFormat.java | 51 +- .../datafactory/models/Transformation.java | 4 +- .../datafactory/models/Trigger.java | 36 +- .../models/TriggerDependencyReference.java | 27 +- .../models/TriggerFilterParameters.java | 6 +- .../models/TriggerListResponse.java | 4 +- .../datafactory/models/TriggerReference.java | 8 +- .../models/TumblingWindowTrigger.java | 29 +- ...blingWindowTriggerDependencyReference.java | 27 +- .../models/TwilioLinkedService.java | 25 +- .../models/TypeConversionSettings.java | 3 +- .../datafactory/models/UntilActivity.java | 37 +- .../UpdateIntegrationRuntimeNodeRequest.java | 3 +- .../UpdateIntegrationRuntimeRequest.java | 6 +- .../datafactory/models/UserAccessPolicy.java | 17 +- .../datafactory/models/UserProperty.java | 8 +- .../models/ValidationActivity.java | 47 +- .../models/VariableSpecification.java | 4 +- .../models/VerticaLinkedService.java | 33 +- .../datafactory/models/VerticaSource.java | 20 +- .../models/VerticaTableDataset.java | 20 +- .../datafactory/models/WaitActivity.java | 25 +- .../models/WarehouseLinkedService.java | 45 +- .../datafactory/models/WarehouseSink.java | 29 +- .../datafactory/models/WarehouseSource.java | 33 +- .../models/WarehouseTableDataset.java | 20 +- .../datafactory/models/WebActivity.java | 47 +- .../models/WebActivityAuthentication.java | 9 +- .../models/WebAnonymousAuthentication.java | 25 +- .../models/WebBasicAuthentication.java | 34 +- .../WebClientCertificateAuthentication.java | 34 +- .../datafactory/models/WebLinkedService.java | 25 +- .../WebLinkedServiceTypeProperties.java | 34 +- .../datafactory/models/WebSource.java | 23 +- .../datafactory/models/WebTableDataset.java | 25 +- .../datafactory/models/WebhookActivity.java | 29 +- .../datafactory/models/WranglingDataFlow.java | 20 +- .../datafactory/models/XeroLinkedService.java | 41 +- .../datafactory/models/XeroObjectDataset.java | 20 +- .../datafactory/models/XeroSource.java | 20 +- .../datafactory/models/XmlDataset.java | 20 +- .../datafactory/models/XmlReadSettings.java | 49 +- .../datafactory/models/XmlSource.java | 23 +- .../models/ZendeskLinkedService.java | 33 +- .../models/ZipDeflateReadSettings.java | 20 +- .../datafactory/models/ZohoLinkedService.java | 41 +- .../datafactory/models/ZohoObjectDataset.java | 20 +- .../datafactory/models/ZohoSource.java | 20 +- .../datafactory/models/package-info.java | 4 +- .../datafactory/package-info.java | 4 +- .../src/main/java/module-info.java | 2 +- .../reflect-config.json | 57 +- ...ActivityRunsQueryByPipelineRunSamples.java | 14 +- ...hangeDataCaptureCreateOrUpdateSamples.java | 30 +- .../ChangeDataCaptureDeleteSamples.java | 8 +- .../ChangeDataCaptureGetSamples.java | 9 +- ...ChangeDataCaptureListByFactorySamples.java | 7 +- .../ChangeDataCaptureStartSamples.java | 8 +- .../ChangeDataCaptureStatusSamples.java | 8 +- .../ChangeDataCaptureStopSamples.java | 8 +- ...entialOperationsCreateOrUpdateSamples.java | 15 +- .../CredentialOperationsDeleteSamples.java | 9 +- .../CredentialOperationsGetSamples.java | 8 +- ...dentialOperationsListByFactorySamples.java | 7 +- ...ataFlowDebugSessionAddDataFlowSamples.java | 43 +- .../DataFlowDebugSessionCreateSamples.java | 7 +- .../DataFlowDebugSessionDeleteSamples.java | 10 +- ...FlowDebugSessionExecuteCommandSamples.java | 14 +- ...FlowDebugSessionQueryByFactorySamples.java | 7 +- .../DataFlowsCreateOrUpdateSamples.java | 63 +- .../generated/DataFlowsDeleteSamples.java | 8 +- .../generated/DataFlowsGetSamples.java | 8 +- .../DataFlowsListByFactorySamples.java | 7 +- .../DatasetsCreateOrUpdateSamples.java | 51 +- .../generated/DatasetsDeleteSamples.java | 8 +- .../generated/DatasetsGetSamples.java | 8 +- .../DatasetsListByFactorySamples.java | 7 +- ...ontrolGetFeatureValueByFactorySamples.java | 11 +- ...ExposureControlGetFeatureValueSamples.java | 11 +- ...rolQueryFeatureValuesByFactorySamples.java | 16 +- .../FactoriesConfigureFactoryRepoSamples.java | 20 +- .../FactoriesCreateOrUpdateSamples.java | 10 +- .../generated/FactoriesDeleteSamples.java | 8 +- .../FactoriesGetByResourceGroupSamples.java | 8 +- .../FactoriesGetDataPlaneAccessSamples.java | 15 +- .../FactoriesGetGitHubAccessTokenSamples.java | 13 +- .../FactoriesListByResourceGroupSamples.java | 3 +- .../generated/FactoriesListSamples.java | 3 +- .../generated/FactoriesUpdateSamples.java | 9 +- ...GlobalParametersCreateOrUpdateSamples.java | 13 +- .../GlobalParametersDeleteSamples.java | 8 +- .../generated/GlobalParametersGetSamples.java | 8 +- .../GlobalParametersListByFactorySamples.java | 7 +- .../IntegrationRuntimeNodesDeleteSamples.java | 8 +- ...rationRuntimeNodesGetIpAddressSamples.java | 8 +- .../IntegrationRuntimeNodesGetSamples.java | 8 +- .../IntegrationRuntimeNodesUpdateSamples.java | 9 +- ...rationRuntimeObjectMetadataGetSamples.java | 9 +- ...onRuntimeObjectMetadataRefreshSamples.java | 7 +- ...CreateLinkedIntegrationRuntimeSamples.java | 17 +- ...egrationRuntimesCreateOrUpdateSamples.java | 6 +- .../IntegrationRuntimesDeleteSamples.java | 8 +- ...ationRuntimesGetConnectionInfoSamples.java | 8 +- ...ationRuntimesGetMonitoringDataSamples.java | 8 +- .../IntegrationRuntimesGetSamples.java | 8 +- .../IntegrationRuntimesGetStatusSamples.java | 8 +- ...ntegrationRuntimesListAuthKeysSamples.java | 8 +- ...tegrationRuntimesListByFactorySamples.java | 7 +- ...ndNetworkDependenciesEndpointsSamples.java | 8 +- ...ationRuntimesRegenerateAuthKeySamples.java | 11 +- ...IntegrationRuntimesRemoveLinksSamples.java | 11 +- .../IntegrationRuntimesStartSamples.java | 8 +- .../IntegrationRuntimesStopSamples.java | 8 +- ...grationRuntimesSyncCredentialsSamples.java | 8 +- .../IntegrationRuntimesUpdateSamples.java | 9 +- .../IntegrationRuntimesUpgradeSamples.java | 8 +- .../LinkedServicesCreateOrUpdateSamples.java | 32 +- .../LinkedServicesDeleteSamples.java | 9 +- .../generated/LinkedServicesGetSamples.java | 9 +- .../LinkedServicesListByFactorySamples.java | 7 +- ...PrivateEndpointsCreateOrUpdateSamples.java | 9 +- .../ManagedPrivateEndpointsDeleteSamples.java | 8 +- .../ManagedPrivateEndpointsGetSamples.java | 9 +- ...dPrivateEndpointsListByFactorySamples.java | 8 +- ...dVirtualNetworksCreateOrUpdateSamples.java | 9 +- .../ManagedVirtualNetworksGetSamples.java | 8 +- ...edVirtualNetworksListByFactorySamples.java | 7 +- .../generated/OperationsListSamples.java | 3 +- .../generated/PipelineRunsCancelSamples.java | 9 +- .../generated/PipelineRunsGetSamples.java | 8 +- .../PipelineRunsQueryByFactorySamples.java | 17 +- .../PipelinesCreateOrUpdateSamples.java | 67 +- .../generated/PipelinesCreateRunSamples.java | 16 +- .../generated/PipelinesDeleteSamples.java | 8 +- .../generated/PipelinesGetSamples.java | 8 +- .../PipelinesListByFactorySamples.java | 7 +- ...dPointConnectionsListByFactorySamples.java | 7 +- ...nectionOperationCreateOrUpdateSamples.java | 9 +- ...pointConnectionOperationDeleteSamples.java | 8 +- ...EndpointConnectionOperationGetSamples.java | 8 +- .../PrivateLinkResourcesGetSamples.java | 7 +- .../generated/TriggerRunsCancelSamples.java | 9 +- .../TriggerRunsQueryByFactorySamples.java | 17 +- .../generated/TriggerRunsRerunSamples.java | 9 +- .../TriggersCreateOrUpdateSamples.java | 29 +- .../generated/TriggersDeleteSamples.java | 8 +- ...gersGetEventSubscriptionStatusSamples.java | 8 +- .../generated/TriggersGetSamples.java | 8 +- .../TriggersListByFactorySamples.java | 7 +- .../TriggersQueryByFactorySamples.java | 9 +- .../generated/TriggersStartSamples.java | 7 +- .../generated/TriggersStopSamples.java | 7 +- .../TriggersSubscribeToEventsSamples.java | 8 +- .../TriggersUnsubscribeFromEventsSamples.java | 8 +- .../generated/ActivityDependencyTests.java | 16 +- .../generated/ActivityPolicyTests.java | 17 +- .../generated/ActivityRunTests.java | 10 +- .../datafactory/generated/ActivityTests.java | 50 +- ...aFlowToDebugSessionResponseInnerTests.java | 11 +- .../AmazonMwsObjectDatasetTests.java | 45 +- .../generated/AmazonMwsSourceTests.java | 13 +- ...zonRdsForOraclePartitionSettingsTests.java | 10 +- .../AmazonRdsForOracleSourceTests.java | 22 +- .../AmazonRdsForOracleTableDatasetTests.java | 41 +- ...OracleTableDatasetTypePropertiesTests.java | 4 +- .../AmazonRdsForSqlServerSourceTests.java | 25 +- ...mazonRdsForSqlServerTableDatasetTests.java | 43 +- ...ServerTableDatasetTypePropertiesTests.java | 4 +- .../generated/AmazonRedshiftSourceTests.java | 28 +- .../AmazonRedshiftTableDatasetTests.java | 42 +- ...dshiftTableDatasetTypePropertiesTests.java | 13 +- .../AmazonS3CompatibleLocationTests.java | 8 +- .../AmazonS3CompatibleReadSettingsTests.java | 21 +- .../generated/AmazonS3LocationTests.java | 8 +- .../generated/AmazonS3ReadSettingsTests.java | 20 +- .../AppendVariableActivityTests.java | 57 +- ...ndVariableActivityTypePropertiesTests.java | 11 +- .../generated/ArmIdWrapperTests.java | 2 +- .../generated/AvroFormatTests.java | 4 +- .../datafactory/generated/AvroSinkTests.java | 39 +- .../generated/AvroSourceTests.java | 15 +- .../generated/AvroWriteSettingsTests.java | 16 +- .../generated/AzPowerShellSetupTests.java | 8 +- .../AzPowerShellSetupTypePropertiesTests.java | 8 +- .../generated/AzureBlobDatasetTests.java | 57 +- .../AzureBlobDatasetTypePropertiesTests.java | 18 +- .../generated/AzureBlobFSDatasetTests.java | 45 +- ...AzureBlobFSDatasetTypePropertiesTests.java | 14 +- .../generated/AzureBlobFSLocationTests.java | 7 +- .../AzureBlobFSReadSettingsTests.java | 20 +- .../generated/AzureBlobFSSinkTests.java | 21 +- .../generated/AzureBlobFSSourceTests.java | 13 +- .../AzureBlobFSWriteSettingsTests.java | 13 +- .../AzureBlobStorageLocationTests.java | 7 +- .../AzureBlobStorageReadSettingsTests.java | 21 +- .../AzureBlobStorageWriteSettingsTests.java | 13 +- ...AzureDataExplorerCommandActivityTests.java | 94 +- ...rerCommandActivityTypePropertiesTests.java | 7 +- ...ataExplorerDatasetTypePropertiesTests.java | 4 +- .../generated/AzureDataExplorerSinkTests.java | 16 +- .../AzureDataExplorerSourceTests.java | 15 +- .../AzureDataExplorerTableDatasetTests.java | 42 +- .../AzureDataLakeStoreDatasetTests.java | 52 +- ...taLakeStoreDatasetTypePropertiesTests.java | 16 +- .../AzureDataLakeStoreLocationTests.java | 6 +- .../AzureDataLakeStoreReadSettingsTests.java | 23 +- .../AzureDataLakeStoreSinkTests.java | 15 +- .../AzureDataLakeStoreSourceTests.java | 10 +- .../AzureDataLakeStoreWriteSettingsTests.java | 13 +- .../AzureDatabricksDeltaLakeDatasetTests.java | 46 +- ...ksDeltaLakeDatasetTypePropertiesTests.java | 7 +- ...DatabricksDeltaLakeExportCommandTests.java | 7 +- ...DatabricksDeltaLakeImportCommandTests.java | 7 +- .../AzureDatabricksDeltaLakeSinkTests.java | 18 +- .../AzureDatabricksDeltaLakeSourceTests.java | 14 +- .../AzureFileStorageLocationTests.java | 4 +- .../AzureFileStorageReadSettingsTests.java | 21 +- .../AzureFileStorageWriteSettingsTests.java | 13 +- .../generated/AzureFunctionActivityTests.java | 97 +- ...reFunctionActivityTypePropertiesTests.java | 16 +- .../AzureMLBatchExecutionActivityTests.java | 138 +- ...hExecutionActivityTypePropertiesTests.java | 67 +- .../AzureMLExecutePipelineActivityTests.java | 100 +- ...tePipelineActivityTypePropertiesTests.java | 16 +- .../AzureMLUpdateResourceActivityTests.java | 96 +- ...teResourceActivityTypePropertiesTests.java | 17 +- .../generated/AzureMLWebServiceFileTests.java | 12 +- .../generated/AzureMariaDBSourceTests.java | 13 +- .../AzureMariaDBTableDatasetTests.java | 39 +- .../generated/AzureMySqlSinkTests.java | 13 +- .../generated/AzureMySqlSourceTests.java | 12 +- .../AzureMySqlTableDatasetTests.java | 43 +- ...eMySqlTableDatasetTypePropertiesTests.java | 4 +- .../generated/AzurePostgreSqlSinkTests.java | 13 +- .../generated/AzurePostgreSqlSourceTests.java | 13 +- .../AzurePostgreSqlTableDatasetTests.java | 50 +- ...greSqlTableDatasetTypePropertiesTests.java | 12 +- .../generated/AzureQueueSinkTests.java | 12 +- .../AzureSearchIndexDatasetTests.java | 42 +- ...SearchIndexDatasetTypePropertiesTests.java | 4 +- .../generated/AzureSearchIndexSinkTests.java | 11 +- .../AzureSqlDWTableDatasetTests.java | 39 +- ...eSqlDWTableDatasetTypePropertiesTests.java | 8 +- .../AzureSqlMITableDatasetTests.java | 43 +- ...eSqlMITableDatasetTypePropertiesTests.java | 7 +- .../generated/AzureSqlSourceTests.java | 25 +- .../generated/AzureSqlTableDatasetTests.java | 45 +- ...ureSqlTableDatasetTypePropertiesTests.java | 12 +- ...ureSynapseArtifactsLinkedServiceTests.java | 38 +- ...factsLinkedServiceTypePropertiesTests.java | 11 +- .../generated/AzureTableDatasetTests.java | 37 +- .../AzureTableDatasetTypePropertiesTests.java | 5 +- .../generated/AzureTableSourceTests.java | 15 +- ...DataPoolParametrizationReferenceTests.java | 7 +- .../generated/BinaryDatasetTests.java | 47 +- .../BinaryDatasetTypePropertiesTests.java | 10 +- .../generated/BinaryReadSettingsTests.java | 4 +- .../generated/BinarySinkTests.java | 22 +- .../generated/BinarySourceTests.java | 19 +- .../generated/BlobEventsTriggerTests.java | 59 +- .../BlobEventsTriggerTypePropertiesTests.java | 31 +- .../datafactory/generated/BlobSinkTests.java | 21 +- .../generated/BlobSourceTests.java | 12 +- .../generated/BlobTriggerTests.java | 55 +- .../BlobTriggerTypePropertiesTests.java | 22 +- .../generated/CassandraSourceTests.java | 19 +- .../generated/ChainingTriggerTests.java | 47 +- .../ChainingTriggerTypePropertiesTests.java | 24 +- .../ChangeDataCaptureFolderTests.java | 8 +- .../ChangeDataCaptureListResponseTests.java | 271 +- .../ChangeDataCaptureResourceInnerTests.java | 287 ++- .../generated/ChangeDataCaptureTests.java | 410 ++- ...esCreateOrUpdateWithResponseMockTests.java | 154 +- ...taCapturesDeleteWithResponseMockTests.java | 36 +- ...eDataCapturesGetWithResponseMockTests.java | 65 +- ...ngeDataCapturesListByFactoryMockTests.java | 62 +- ...ataCapturesStartWithResponseMockTests.java | 37 +- ...taCapturesStatusWithResponseMockTests.java | 39 +- ...DataCapturesStopWithResponseMockTests.java | 36 +- .../generated/CmkIdentityDefinitionTests.java | 8 +- ...nDataServiceForAppsEntityDatasetTests.java | 50 +- ...rAppsEntityDatasetTypePropertiesTests.java | 4 +- .../CommonDataServiceForAppsSourceTests.java | 12 +- .../CompressionReadSettingsTests.java | 5 +- .../generated/ConcurObjectDatasetTests.java | 45 +- .../generated/ConcurSourceTests.java | 12 +- .../ConnectionStatePropertiesTests.java | 4 +- .../generated/ControlActivityTests.java | 65 +- .../CopyActivityLogSettingsTests.java | 6 +- .../generated/CopyActivityTests.java | 232 +- .../CopyActivityTypePropertiesTests.java | 88 +- .../CopyComputeScalePropertiesTests.java | 18 +- .../datafactory/generated/CopySinkTests.java | 12 +- .../generated/CopySourceTests.java | 10 +- .../generated/CopyTranslatorTests.java | 7 +- ...mosDbMongoDbApiCollectionDatasetTests.java | 40 +- ...iCollectionDatasetTypePropertiesTests.java | 7 +- .../CosmosDbMongoDbApiLinkedServiceTests.java | 32 +- ...DbApiLinkedServiceTypePropertiesTests.java | 8 +- .../CosmosDbMongoDbApiSinkTests.java | 13 +- .../CosmosDbMongoDbApiSourceTests.java | 22 +- .../CosmosDbSqlApiCollectionDatasetTests.java | 41 +- ...iCollectionDatasetTypePropertiesTests.java | 4 +- .../generated/CosmosDbSqlApiSinkTests.java | 13 +- .../generated/CosmosDbSqlApiSourceTests.java | 16 +- .../generated/CouchbaseSourceTests.java | 13 +- .../generated/CouchbaseTableDatasetTests.java | 39 +- ...reateDataFlowDebugSessionRequestTests.java | 47 +- ...ataFlowDebugSessionResponseInnerTests.java | 12 +- ...eLinkedIntegrationRuntimeRequestTests.java | 24 +- .../CreateRunResponseInnerTests.java | 8 +- .../CredentialListResponseTests.java | 62 +- ...nsCreateOrUpdateWithResponseMockTests.java | 77 +- ...OperationsDeleteWithResponseMockTests.java | 35 +- ...ialOperationsGetWithResponseMockTests.java | 45 +- ...ntialOperationsListByFactoryMockTests.java | 45 +- .../generated/CredentialReferenceTests.java | 9 +- .../CredentialResourceInnerTests.java | 47 + .../generated/CredentialTests.java | 12 +- .../CustomActivityReferenceObjectTests.java | 37 +- .../generated/CustomActivityTests.java | 144 +- .../CustomActivityTypePropertiesTests.java | 56 +- .../CustomDataSourceLinkedServiceTests.java | 29 +- .../generated/CustomDatasetTests.java | 43 +- .../generated/CustomEventsTriggerTests.java | 54 +- ...ustomEventsTriggerTypePropertiesTests.java | 21 +- .../generated/CustomSetupBaseTests.java | 2 +- .../DWCopyCommandDefaultValueTests.java | 6 +- .../generated/DWCopyCommandSettingsTests.java | 15 +- .../DataFlowDebugCommandPayloadTests.java | 24 +- .../DataFlowDebugCommandRequestTests.java | 30 +- ...ataFlowDebugCommandResponseInnerTests.java | 12 +- ...ataFlowDebugPackageDebugSettingsTests.java | 23 +- .../generated/DataFlowDebugPackageTests.java | 213 +- .../generated/DataFlowDebugResourceTests.java | 22 +- .../DataFlowDebugSessionInfoInnerTests.java | 53 +- ...sionsAddDataFlowWithResponseMockTests.java | 215 +- .../DataFlowDebugSessionsCreateMockTests.java | 72 +- ...ugSessionsDeleteWithResponseMockTests.java | 37 +- ...wDebugSessionsExecuteCommandMockTests.java | 60 +- ...wDebugSessionsQueryByFactoryMockTests.java | 56 +- .../generated/DataFlowFolderTests.java | 8 +- .../generated/DataFlowListResponseTests.java | 43 +- .../generated/DataFlowReferenceTests.java | 11 +- .../generated/DataFlowResourceInnerTests.java | 22 +- .../generated/DataFlowSinkTests.java | 59 +- .../generated/DataFlowSourceSettingTests.java | 13 +- .../generated/DataFlowSourceTests.java | 49 +- .../generated/DataFlowStagingInfoTests.java | 15 +- .../datafactory/generated/DataFlowTests.java | 16 +- ...wsCreateOrUpdateWithResponseMockTests.java | 59 +- .../DataFlowsDeleteWithResponseMockTests.java | 34 +- .../DataFlowsGetWithResponseMockTests.java | 42 +- .../DataFlowsListByFactoryMockTests.java | 42 +- .../DataLakeAnalyticsUsqlActivityTests.java | 99 +- ...lyticsUsqlActivityTypePropertiesTests.java | 21 +- .../generated/DataMapperMappingTests.java | 179 +- .../DatabricksNotebookActivityTests.java | 92 +- ...ksNotebookActivityTypePropertiesTests.java | 16 +- .../DatabricksSparkJarActivityTests.java | 94 +- ...ksSparkJarActivityTypePropertiesTests.java | 17 +- .../DatabricksSparkPythonActivityTests.java | 104 +- ...parkPythonActivityTypePropertiesTests.java | 15 +- .../generated/DatasetCompressionTests.java | 8 +- .../generated/DatasetDebugResourceTests.java | 50 +- .../generated/DatasetFolderTests.java | 8 +- .../generated/DatasetListResponseTests.java | 109 +- .../generated/DatasetLocationTests.java | 7 +- .../generated/DatasetReferenceTests.java | 12 +- .../generated/DatasetResourceInnerTests.java | 42 +- .../DatasetSchemaDataElementTests.java | 9 +- .../generated/DatasetStorageFormatTests.java | 7 +- .../datafactory/generated/DatasetTests.java | 40 +- ...tsCreateOrUpdateWithResponseMockTests.java | 76 +- .../DatasetsDeleteWithResponseMockTests.java | 35 +- .../DatasetsGetWithResponseMockTests.java | 55 +- .../DatasetsListByFactoryMockTests.java | 51 +- .../datafactory/generated/Db2SourceTests.java | 12 +- .../generated/Db2TableDatasetTests.java | 45 +- .../Db2TableDatasetTypePropertiesTests.java | 11 +- .../generated/DeleteActivityTests.java | 106 +- .../DeleteActivityTypePropertiesTests.java | 38 +- ...eleteDataFlowDebugSessionRequestTests.java | 8 +- .../DelimitedTextReadSettingsTests.java | 8 +- .../generated/DelimitedTextSinkTests.java | 33 +- .../generated/DelimitedTextSourceTests.java | 22 +- .../DelimitedTextWriteSettingsTests.java | 9 +- .../generated/DependencyReferenceTests.java | 3 +- .../generated/DistcpSettingsTests.java | 7 +- .../DocumentDbCollectionDatasetTests.java | 46 +- ...bCollectionDatasetTypePropertiesTests.java | 4 +- .../DocumentDbCollectionSinkTests.java | 15 +- .../DocumentDbCollectionSourceTests.java | 14 +- .../DrillDatasetTypePropertiesTests.java | 11 +- .../generated/DrillSourceTests.java | 12 +- .../generated/DrillTableDatasetTests.java | 44 +- .../DynamicsAXResourceDatasetTests.java | 43 +- ...sAXResourceDatasetTypePropertiesTests.java | 4 +- .../generated/DynamicsAXSourceTests.java | 14 +- .../DynamicsCrmEntityDatasetTests.java | 43 +- ...csCrmEntityDatasetTypePropertiesTests.java | 7 +- .../generated/DynamicsCrmSourceTests.java | 11 +- .../generated/DynamicsEntityDatasetTests.java | 36 +- ...amicsEntityDatasetTypePropertiesTests.java | 7 +- .../generated/DynamicsSourceTests.java | 11 +- .../generated/EloquaObjectDatasetTests.java | 44 +- .../generated/EloquaSourceTests.java | 13 +- .../generated/EntityReferenceTests.java | 8 +- .../EnvironmentVariableSetupTests.java | 12 +- ...nmentVariableSetupTypePropertiesTests.java | 13 +- .../generated/ExcelDatasetTests.java | 52 +- .../ExcelDatasetTypePropertiesTests.java | 19 +- .../generated/ExcelSourceTests.java | 16 +- .../ExecuteDataFlowActivityTests.java | 139 +- ...lowActivityTypePropertiesComputeTests.java | 7 +- ...teDataFlowActivityTypePropertiesTests.java | 46 +- .../ExecutePipelineActivityPolicyTests.java | 10 +- .../ExecutePipelineActivityTests.java | 82 +- ...tePipelineActivityTypePropertiesTests.java | 15 +- ...PowerQueryActivityTypePropertiesTests.java | 448 ++-- ...ExecuteWranglingDataflowActivityTests.java | 460 ++-- .../generated/ExecutionActivityTests.java | 72 +- .../generated/ExportSettingsTests.java | 4 +- .../ExposureControlBatchRequestTests.java | 15 +- ...xposureControlBatchResponseInnerTests.java | 8 +- .../ExposureControlRequestTests.java | 12 +- .../ExposureControlResponseInnerTests.java | 2 +- ...reValueByFactoryWithResponseMockTests.java | 38 +- ...sGetFeatureValueWithResponseMockTests.java | 38 +- ...eValuesByFactoryWithResponseMockTests.java | 42 +- .../generated/ExpressionTests.java | 8 +- .../generated/ExpressionV2Tests.java | 139 +- ...eByResourceGroupWithResponseMockTests.java | 35 +- .../generated/FactoryIdentityTests.java | 11 +- .../FactoryRepoConfigurationTests.java | 30 +- .../generated/FactoryRepoUpdateTests.java | 37 +- .../FactoryUpdateParametersTests.java | 21 +- .../FactoryUpdatePropertiesTests.java | 8 +- .../FactoryVstsConfigurationTests.java | 46 +- .../generated/FileServerLocationTests.java | 4 +- .../FileServerReadSettingsTests.java | 21 +- .../FileServerWriteSettingsTests.java | 13 +- .../generated/FileShareDatasetTests.java | 51 +- .../FileShareDatasetTypePropertiesTests.java | 20 +- .../generated/FileSystemSinkTests.java | 13 +- .../generated/FileSystemSourceTests.java | 12 +- .../generated/FilterActivityTests.java | 81 +- .../FilterActivityTypePropertiesTests.java | 15 +- .../datafactory/generated/FlowletTests.java | 324 +-- .../generated/FlowletTypePropertiesTests.java | 293 ++- .../generated/ForEachActivityTests.java | 133 +- .../ForEachActivityTypePropertiesTests.java | 158 +- .../generated/FormatReadSettingsTests.java | 5 +- .../generated/FormatWriteSettingsTests.java | 5 +- .../generated/FtpReadSettingsTests.java | 19 +- .../generated/FtpServerLocationTests.java | 4 +- .../GenericDatasetTypePropertiesTests.java | 6 +- ...taFactoryOperationStatusResponseTests.java | 12 +- .../generated/GetMetadataActivityTests.java | 97 +- ...etMetadataActivityTypePropertiesTests.java | 21 +- .../GetSsisObjectMetadataRequestTests.java | 10 +- .../GlobalParameterListResponseTests.java | 44 +- .../GlobalParameterResourceInnerTests.java | 20 +- .../GlobalParameterSpecificationTests.java | 11 +- ...rsCreateOrUpdateWithResponseMockTests.java | 61 +- ...ParametersDeleteWithResponseMockTests.java | 35 +- ...balParametersGetWithResponseMockTests.java | 41 +- ...lobalParametersListByFactoryMockTests.java | 42 +- .../GoogleAdWordsObjectDatasetTests.java | 41 +- .../generated/GoogleAdWordsSourceTests.java | 13 +- ...gleBigQueryDatasetTypePropertiesTests.java | 12 +- .../GoogleBigQueryObjectDatasetTests.java | 42 +- .../generated/GoogleBigQuerySourceTests.java | 13 +- ...eBigQueryV2DatasetTypePropertiesTests.java | 5 +- .../GoogleBigQueryV2ObjectDatasetTests.java | 41 +- .../GoogleBigQueryV2SourceTests.java | 13 +- .../GoogleCloudStorageLocationTests.java | 8 +- .../GoogleCloudStorageReadSettingsTests.java | 20 +- .../GreenplumDatasetTypePropertiesTests.java | 11 +- .../generated/GreenplumSourceTests.java | 13 +- .../generated/GreenplumTableDatasetTests.java | 43 +- .../generated/HBaseObjectDatasetTests.java | 41 +- .../generated/HBaseSourceTests.java | 12 +- .../generated/HDInsightHiveActivityTests.java | 147 +- ...nsightHiveActivityTypePropertiesTests.java | 38 +- .../HDInsightMapReduceActivityTests.java | 126 +- ...tMapReduceActivityTypePropertiesTests.java | 51 +- .../generated/HDInsightPigActivityTests.java | 107 +- ...InsightPigActivityTypePropertiesTests.java | 40 +- .../HDInsightSparkActivityTests.java | 106 +- ...sightSparkActivityTypePropertiesTests.java | 31 +- .../HDInsightStreamingActivityTests.java | 115 +- ...tStreamingActivityTypePropertiesTests.java | 44 +- .../generated/HdfsLocationTests.java | 4 +- .../generated/HdfsReadSettingsTests.java | 25 +- .../generated/HdfsSourceTests.java | 14 +- .../HiveDatasetTypePropertiesTests.java | 7 +- .../generated/HiveObjectDatasetTests.java | 45 +- .../generated/HiveSourceTests.java | 13 +- .../generated/HttpDatasetTests.java | 52 +- .../HttpDatasetTypePropertiesTests.java | 16 +- .../generated/HttpReadSettingsTests.java | 13 +- .../generated/HttpServerLocationTests.java | 7 +- .../generated/HttpSourceTests.java | 10 +- .../generated/HubspotObjectDatasetTests.java | 37 +- .../generated/HubspotSourceTests.java | 13 +- .../generated/IfConditionActivityTests.java | 261 +- ...fConditionActivityTypePropertiesTests.java | 203 +- .../ImpalaDatasetTypePropertiesTests.java | 8 +- .../generated/ImpalaObjectDatasetTests.java | 45 +- .../generated/ImpalaSourceTests.java | 12 +- .../generated/ImportSettingsTests.java | 4 +- .../generated/InformixSinkTests.java | 13 +- .../generated/InformixSourceTests.java | 12 +- .../generated/InformixTableDatasetTests.java | 42 +- ...formixTableDatasetTypePropertiesTests.java | 7 +- ...egrationRuntimeComputePropertiesTests.java | 125 +- ...ionRuntimeCustomerVirtualNetworkTests.java | 8 +- ...owPropertiesCustomPropertiesItemTests.java | 12 +- ...grationRuntimeDataFlowPropertiesTests.java | 37 +- ...rationRuntimeDataProxyPropertiesTests.java | 27 +- .../IntegrationRuntimeDebugResourceTests.java | 31 +- .../IntegrationRuntimeListResponseTests.java | 53 +- ...rationRuntimeMonitoringDataInnerTests.java | 14 +- ...grationRuntimeNodeIpAddressInnerTests.java | 2 +- ...grationRuntimeNodeMonitoringDataTests.java | 8 +- ...ntimeNodesDeleteWithResponseMockTests.java | 36 +- ...odesGetIpAddressWithResponseMockTests.java | 37 +- ...nRuntimeNodesGetWithResponseMockTests.java | 36 +- ...ntimeNodesUpdateWithResponseMockTests.java | 38 +- ...jectMetadatasGetWithResponseMockTests.java | 47 +- ...untimeObjectMetadatasRefreshMockTests.java | 53 +- ...workDependenciesCategoryEndpointTests.java | 39 +- ...tworkDependenciesEndpointDetailsTests.java | 8 +- ...boundNetworkDependenciesEndpointTests.java | 15 +- ...pendenciesEndpointsResponseInnerTests.java | 49 +- .../IntegrationRuntimeReferenceTests.java | 11 +- .../IntegrationRuntimeResourceInnerTests.java | 31 +- ...rationRuntimeStatusResponseInnerTests.java | 23 +- .../IntegrationRuntimeStatusTests.java | 20 +- .../generated/IntegrationRuntimeTests.java | 24 +- ...IntegrationRuntimeVNetPropertiesTests.java | 26 +- ...tegrationRuntimeWithResponseMockTests.java | 42 +- ...esCreateOrUpdateWithResponseMockTests.java | 70 +- ...onRuntimesDeleteWithResponseMockTests.java | 36 +- ...etMonitoringDataWithResponseMockTests.java | 39 +- ...untimesGetStatusWithResponseMockTests.java | 37 +- ...ationRuntimesGetWithResponseMockTests.java | 40 +- ...grationRuntimesListByFactoryMockTests.java | 40 +- ...denciesEndpointsWithResponseMockTests.java | 51 +- ...timesRemoveLinksWithResponseMockTests.java | 40 +- .../IntegrationRuntimesStartMockTests.java | 38 +- .../IntegrationRuntimesStopMockTests.java | 34 +- ...sSyncCredentialsWithResponseMockTests.java | 36 +- ...nRuntimesUpgradeWithResponseMockTests.java | 35 +- .../generated/JiraObjectDatasetTests.java | 36 +- .../generated/JiraSourceTests.java | 12 +- .../generated/JsonDatasetTests.java | 49 +- .../JsonDatasetTypePropertiesTests.java | 13 +- .../generated/JsonFormatTests.java | 12 +- .../generated/JsonReadSettingsTests.java | 4 +- .../datafactory/generated/JsonSinkTests.java | 24 +- .../generated/JsonSourceTests.java | 17 +- .../generated/JsonWriteSettingsTests.java | 6 +- .../generated/LakeHouseLocationTests.java | 4 +- .../generated/LakeHouseReadSettingsTests.java | 19 +- .../generated/LakeHouseTableDatasetTests.java | 44 +- ...eHouseTableDatasetTypePropertiesTests.java | 4 +- .../generated/LakeHouseTableSinkTests.java | 16 +- .../generated/LakeHouseTableSourceTests.java | 13 +- .../LakeHouseWriteSettingsTests.java | 14 +- .../LinkedIntegrationRuntimeRequestTests.java | 9 +- .../LinkedIntegrationRuntimeTests.java | 2 +- .../LinkedServiceDebugResourceTests.java | 44 +- .../LinkedServiceListResponseTests.java | 78 +- .../LinkedServiceReferenceTests.java | 15 +- .../LinkedServiceResourceInnerTests.java | 37 +- .../generated/LinkedServiceTests.java | 42 +- ...esCreateOrUpdateWithResponseMockTests.java | 69 +- ...edServicesDeleteWithResponseMockTests.java | 35 +- ...inkedServicesGetWithResponseMockTests.java | 45 +- .../LinkedServicesListByFactoryMockTests.java | 45 +- .../generated/LogLocationSettingsTests.java | 17 +- .../generated/LogSettingsTests.java | 18 +- .../generated/LogStorageSettingsTests.java | 16 +- .../generated/LookupActivityTests.java | 98 +- .../LookupActivityTypePropertiesTests.java | 20 +- .../generated/MagentoObjectDatasetTests.java | 40 +- .../generated/MagentoSourceTests.java | 13 +- ...dIdentityCredentialResourceInnerTests.java | 35 - ...anagedIdentityCredentialResourceTests.java | 32 + .../ManagedIdentityCredentialTests.java | 16 +- .../ManagedIdentityTypePropertiesTests.java | 8 +- ...nagedPrivateEndpointListResponseTests.java | 46 +- ...agedPrivateEndpointResourceInnerTests.java | 27 +- .../ManagedPrivateEndpointTests.java | 20 +- ...tsCreateOrUpdateWithResponseMockTests.java | 57 +- ...eEndpointsDeleteWithResponseMockTests.java | 36 +- ...vateEndpointsGetWithResponseMockTests.java | 44 +- ...rivateEndpointsListByFactoryMockTests.java | 52 +- ...anagedVirtualNetworkListResponseTests.java | 21 +- .../ManagedVirtualNetworkReferenceTests.java | 9 +- ...nagedVirtualNetworkResourceInnerTests.java | 12 +- .../generated/ManagedVirtualNetworkTests.java | 9 +- ...ksCreateOrUpdateWithResponseMockTests.java | 53 +- ...rtualNetworksGetWithResponseMockTests.java | 39 +- ...VirtualNetworksListByFactoryMockTests.java | 38 +- .../MapperAttributeMappingTests.java | 63 +- .../MapperAttributeMappingsTests.java | 179 +- .../MapperAttributeReferenceTests.java | 21 +- .../MapperConnectionReferenceTests.java | 8 +- .../generated/MapperConnectionTests.java | 29 +- .../MapperDslConnectorPropertiesTests.java | 9 +- .../MapperPolicyRecurrenceTests.java | 12 +- .../generated/MapperPolicyTests.java | 18 +- .../MapperSourceConnectionsInfoTests.java | 96 +- .../generated/MapperTablePropertiesTests.java | 29 +- .../generated/MapperTableSchemaTests.java | 14 +- .../generated/MapperTableTests.java | 29 +- .../MapperTargetConnectionsInfoTests.java | 238 +- .../generated/MappingDataFlowTests.java | 329 +-- .../MappingDataFlowTypePropertiesTests.java | 297 +-- .../generated/MariaDBSourceTests.java | 12 +- .../generated/MariaDBTableDatasetTests.java | 43 +- .../generated/MarketoObjectDatasetTests.java | 37 +- .../generated/MarketoSourceTests.java | 13 +- .../generated/MetadataItemTests.java | 4 +- .../generated/MicrosoftAccessSinkTests.java | 13 +- .../generated/MicrosoftAccessSourceTests.java | 12 +- .../MicrosoftAccessTableDatasetTests.java | 42 +- ...AccessTableDatasetTypePropertiesTests.java | 4 +- .../MongoDbAtlasCollectionDatasetTests.java | 38 +- ...sCollectionDatasetTypePropertiesTests.java | 4 +- .../MongoDbAtlasLinkedServiceTests.java | 37 +- ...AtlasLinkedServiceTypePropertiesTests.java | 8 +- .../generated/MongoDbAtlasSinkTests.java | 13 +- .../generated/MongoDbAtlasSourceTests.java | 22 +- .../MongoDbCollectionDatasetTests.java | 48 +- ...bCollectionDatasetTypePropertiesTests.java | 7 +- .../MongoDbCursorMethodsPropertiesTests.java | 10 +- .../generated/MongoDbSourceTests.java | 11 +- .../MongoDbV2CollectionDatasetTests.java | 42 +- ...2CollectionDatasetTypePropertiesTests.java | 7 +- .../MongoDbV2LinkedServiceTests.java | 34 +- ...oDbV2LinkedServiceTypePropertiesTests.java | 7 +- .../generated/MongoDbV2SinkTests.java | 13 +- .../generated/MongoDbV2SourceTests.java | 22 +- .../MultiplePipelineTriggerTests.java | 41 +- .../generated/MySqlSourceTests.java | 13 +- .../generated/MySqlTableDatasetTests.java | 41 +- .../MySqlTableDatasetTypePropertiesTests.java | 4 +- .../NetezzaPartitionSettingsTests.java | 7 +- .../generated/NetezzaSourceTests.java | 19 +- .../generated/NetezzaTableDatasetTests.java | 44 +- ...etezzaTableDatasetTypePropertiesTests.java | 8 +- .../generated/NotebookParameterTests.java | 10 +- .../generated/ODataResourceDatasetTests.java | 39 +- ...ataResourceDatasetTypePropertiesTests.java | 4 +- .../generated/ODataSourceTests.java | 13 +- .../datafactory/generated/OdbcSinkTests.java | 13 +- .../generated/OdbcSourceTests.java | 12 +- .../generated/OdbcTableDatasetTests.java | 39 +- .../OdbcTableDatasetTypePropertiesTests.java | 4 +- .../generated/Office365DatasetTests.java | 45 +- .../Office365DatasetTypePropertiesTests.java | 6 +- .../generated/Office365SourceTests.java | 17 +- .../generated/OperationDisplayTests.java | 6 +- .../generated/OperationInnerTests.java | 28 +- .../generated/OperationListResponseTests.java | 27 +- .../OperationLogSpecificationTests.java | 3 +- .../OperationMetricDimensionTests.java | 3 +- .../OperationMetricSpecificationTests.java | 19 +- .../generated/OperationPropertiesTests.java | 65 +- .../OperationServiceSpecificationTests.java | 19 +- .../generated/OperationsListMockTests.java | 68 +- .../OracleCloudStorageLocationTests.java | 8 +- .../OracleCloudStorageReadSettingsTests.java | 20 +- .../OraclePartitionSettingsTests.java | 9 +- .../OracleServiceCloudObjectDatasetTests.java | 43 +- .../OracleServiceCloudSourceTests.java | 13 +- .../generated/OracleSinkTests.java | 13 +- .../generated/OracleSourceTests.java | 22 +- .../generated/OracleTableDatasetTests.java | 44 +- ...OracleTableDatasetTypePropertiesTests.java | 10 +- .../datafactory/generated/OrcFormatTests.java | 4 +- .../datafactory/generated/OrcSinkTests.java | 25 +- .../datafactory/generated/OrcSourceTests.java | 16 +- .../generated/OrcWriteSettingsTests.java | 4 +- .../generated/PackageStoreTests.java | 17 +- .../ParameterSpecificationTests.java | 11 +- .../generated/ParquetFormatTests.java | 4 +- .../generated/ParquetReadSettingsTests.java | 6 +- .../generated/ParquetSinkTests.java | 24 +- .../generated/ParquetSourceTests.java | 18 +- .../generated/ParquetWriteSettingsTests.java | 4 +- .../generated/PaypalObjectDatasetTests.java | 45 +- .../generated/PaypalSourceTests.java | 13 +- .../PhoenixDatasetTypePropertiesTests.java | 6 +- .../generated/PhoenixObjectDatasetTests.java | 48 +- .../generated/PhoenixSourceTests.java | 13 +- .../PipelineElapsedTimeMetricPolicyTests.java | 5 +- ...neExternalComputeScalePropertiesTests.java | 22 +- .../generated/PipelineFolderTests.java | 8 +- .../generated/PipelineListResponseTests.java | 277 ++- .../generated/PipelinePolicyTests.java | 8 +- .../generated/PipelineReferenceTests.java | 15 +- .../generated/PipelineResourceInnerTests.java | 171 +- .../generated/PipelineRunInnerTests.java | 30 +- .../generated/PipelineRunInvokedByTests.java | 2 +- ...pelineRunsCancelWithResponseMockTests.java | 37 +- .../PipelineRunsGetWithResponseMockTests.java | 37 +- .../datafactory/generated/PipelineTests.java | 93 +- ...esCreateOrUpdateWithResponseMockTests.java | 165 +- ...pelinesCreateRunWithResponseMockTests.java | 49 +- .../PipelinesDeleteWithResponseMockTests.java | 34 +- .../PipelinesGetWithResponseMockTests.java | 58 +- .../PipelinesListByFactoryMockTests.java | 57 +- .../generated/PolybaseSettingsTests.java | 8 +- .../generated/PostgreSqlSourceTests.java | 13 +- .../PostgreSqlTableDatasetTests.java | 43 +- ...greSqlTableDatasetTypePropertiesTests.java | 11 +- .../generated/PostgreSqlV2SourceTests.java | 13 +- .../PostgreSqlV2TableDatasetTests.java | 44 +- ...eSqlV2TableDatasetTypePropertiesTests.java | 4 +- .../generated/PowerQuerySinkMappingTests.java | 124 +- .../generated/PowerQuerySinkTests.java | 67 +- .../generated/PowerQuerySourceTests.java | 55 +- .../PowerQueryTypePropertiesTests.java | 151 +- .../PrestoDatasetTypePropertiesTests.java | 7 +- .../generated/PrestoObjectDatasetTests.java | 43 +- .../generated/PrestoSourceTests.java | 13 +- ...ointConnectionsListByFactoryMockTests.java | 44 +- ...teEndpointConnectionListResponseTests.java | 39 +- ...nsCreateOrUpdateWithResponseMockTests.java | 60 +- ...OperationsDeleteWithResponseMockTests.java | 36 +- ...ionOperationsGetWithResponseMockTests.java | 45 +- ...eEndpointConnectionResourceInnerTests.java | 27 +- .../generated/PrivateEndpointTests.java | 8 +- ...onnectionApprovalRequestResourceTests.java | 31 +- ...ateLinkConnectionApprovalRequestTests.java | 25 +- .../PrivateLinkConnectionStateTests.java | 20 +- .../PrivateLinkResourcePropertiesTests.java | 2 +- .../generated/PrivateLinkResourceTests.java | 8 +- ...LinkResourcesGetWithResponseMockTests.java | 39 +- ...PrivateLinkResourcesWrapperInnerTests.java | 11 +- .../generated/PurviewConfigurationTests.java | 8 +- ...eryDataFlowDebugSessionsResponseTests.java | 82 +- .../QuickBooksObjectDatasetTests.java | 40 +- .../generated/QuickBooksSourceTests.java | 13 +- .../RecurrenceScheduleOccurrenceTests.java | 15 +- .../generated/RecurrenceScheduleTests.java | 45 +- .../RedirectIncompatibleRowSettingsTests.java | 8 +- .../RedshiftUnloadSettingsTests.java | 17 +- .../generated/RelationalSourceTests.java | 12 +- .../RelationalTableDatasetTests.java | 37 +- ...tionalTableDatasetTypePropertiesTests.java | 7 +- .../RemotePrivateEndpointConnectionTests.java | 19 +- .../RerunTumblingWindowTriggerTests.java | 29 +- ...blingWindowTriggerTypePropertiesTests.java | 22 +- .../ResponsysObjectDatasetTests.java | 41 +- .../generated/ResponsysSourceTests.java | 13 +- .../generated/RestResourceDatasetTests.java | 47 +- ...estResourceDatasetTypePropertiesTests.java | 13 +- .../datafactory/generated/RestSinkTests.java | 19 +- .../generated/RestSourceTests.java | 19 +- .../generated/RetryPolicyTests.java | 8 +- .../generated/RunQueryFilterTests.java | 20 +- .../generated/RunQueryOrderByTests.java | 10 +- ...forceMarketingCloudObjectDatasetTests.java | 42 +- .../SalesforceMarketingCloudSourceTests.java | 13 +- .../SalesforceObjectDatasetTests.java | 44 +- ...forceObjectDatasetTypePropertiesTests.java | 7 +- ...esforceServiceCloudObjectDatasetTests.java | 41 +- ...CloudObjectDatasetTypePropertiesTests.java | 4 +- .../SalesforceServiceCloudSinkTests.java | 20 +- .../SalesforceServiceCloudSourceTests.java | 14 +- ...forceServiceCloudV2ObjectDatasetTests.java | 39 +- ...oudV2ObjectDatasetTypePropertiesTests.java | 6 +- .../SalesforceServiceCloudV2SinkTests.java | 17 +- .../SalesforceServiceCloudV2SourceTests.java | 13 +- .../generated/SalesforceSinkTests.java | 16 +- .../generated/SalesforceSourceTests.java | 14 +- .../SalesforceV2ObjectDatasetTests.java | 41 +- ...rceV2ObjectDatasetTypePropertiesTests.java | 7 +- .../generated/SalesforceV2SinkTests.java | 16 +- .../generated/SalesforceV2SourceTests.java | 14 +- .../generated/SapBwCubeDatasetTests.java | 41 +- .../generated/SapBwSourceTests.java | 12 +- ...pCloudForCustomerResourceDatasetTests.java | 39 +- ...merResourceDatasetTypePropertiesTests.java | 4 +- .../SapCloudForCustomerSinkTests.java | 15 +- .../SapCloudForCustomerSourceTests.java | 14 +- .../generated/SapEccResourceDatasetTests.java | 42 +- ...EccResourceDatasetTypePropertiesTests.java | 4 +- .../generated/SapEccSourceTests.java | 14 +- .../SapHanaPartitionSettingsTests.java | 6 +- .../generated/SapHanaSourceTests.java | 18 +- .../generated/SapHanaTableDatasetTests.java | 43 +- ...apHanaTableDatasetTypePropertiesTests.java | 4 +- .../generated/SapOdpResourceDatasetTests.java | 47 +- ...OdpResourceDatasetTypePropertiesTests.java | 4 +- .../generated/SapOdpSourceTests.java | 17 +- .../generated/SapOpenHubSourceTests.java | 17 +- .../SapOpenHubTableDatasetTests.java | 44 +- ...penHubTableDatasetTypePropertiesTests.java | 7 +- .../SapTablePartitionSettingsTests.java | 9 +- .../SapTableResourceDatasetTests.java | 44 +- ...bleResourceDatasetTypePropertiesTests.java | 4 +- .../generated/SapTableSourceTests.java | 29 +- .../ScheduleTriggerRecurrenceTests.java | 70 +- .../generated/ScheduleTriggerTests.java | 116 +- .../ScheduleTriggerTypePropertiesTests.java | 86 +- .../generated/ScriptActionTests.java | 19 +- .../ScriptActivityParameterTests.java | 22 +- .../ScriptActivityScriptBlockTests.java | 50 +- .../generated/ScriptActivityTests.java | 141 +- ...ctivityTypePropertiesLogSettingsTests.java | 25 +- .../ScriptActivityTypePropertiesTests.java | 92 +- .../generated/SecretBaseTests.java | 2 +- .../SecureInputOutputPolicyTests.java | 8 +- .../generated/SecureStringTests.java | 8 +- ...cyTumblingWindowTriggerReferenceTests.java | 16 +- ...ostedIntegrationRuntimeNodeInnerTests.java | 21 +- ...lfHostedIntegrationRuntimeStatusTests.java | 99 +- ...ationRuntimeStatusTypePropertiesTests.java | 83 +- .../ServiceNowObjectDatasetTests.java | 41 +- .../generated/ServiceNowSourceTests.java | 13 +- .../ServiceNowV2ObjectDatasetTests.java | 45 +- .../generated/ServiceNowV2SourceTests.java | 89 +- .../generated/SetVariableActivityTests.java | 78 +- ...etVariableActivityTypePropertiesTests.java | 19 +- .../generated/SftpLocationTests.java | 4 +- .../generated/SftpReadSettingsTests.java | 20 +- .../generated/SftpWriteSettingsTests.java | 16 +- ...tOnlineListDatasetTypePropertiesTests.java | 4 +- ...rePointOnlineListResourceDatasetTests.java | 41 +- .../SharePointOnlineListSourceTests.java | 12 +- .../generated/ShopifyObjectDatasetTests.java | 39 +- .../generated/ShopifySourceTests.java | 12 +- .../generated/SkipErrorFileTests.java | 8 +- .../generated/SnowflakeDatasetTests.java | 41 +- .../SnowflakeDatasetTypePropertiesTests.java | 4 +- .../SnowflakeExportCopyCommandTests.java | 9 +- .../SnowflakeImportCopyCommandTests.java | 9 +- .../generated/SnowflakeSinkTests.java | 21 +- .../generated/SnowflakeSourceTests.java | 20 +- .../generated/SnowflakeV2DatasetTests.java | 46 +- .../generated/SnowflakeV2SinkTests.java | 19 +- .../generated/SnowflakeV2SourceTests.java | 19 +- ...gurationParametrizationReferenceTests.java | 9 +- .../SparkDatasetTypePropertiesTests.java | 8 +- .../generated/SparkObjectDatasetTests.java | 46 +- .../generated/SparkSourceTests.java | 12 +- .../generated/SqlDWSourceTests.java | 23 +- .../generated/SqlMISourceTests.java | 25 +- .../generated/SqlPartitionSettingsTests.java | 7 +- .../generated/SqlServerSourceTests.java | 26 +- ...SqlServerStoredProcedureActivityTests.java | 72 +- ...dProcedureActivityTypePropertiesTests.java | 8 +- .../generated/SqlServerTableDatasetTests.java | 42 +- ...ServerTableDatasetTypePropertiesTests.java | 12 +- .../datafactory/generated/SqlSourceTests.java | 23 +- .../generated/SquareObjectDatasetTests.java | 40 +- .../generated/SquareSourceTests.java | 12 +- .../generated/SsisChildPackageTests.java | 16 +- .../SsisEnvironmentReferenceTests.java | 24 +- .../generated/SsisEnvironmentTests.java | 58 - .../SsisExecutionParameterTests.java | 4 +- .../generated/SsisFolderTests.java | 19 +- ...sObjectMetadataListResponseInnerTests.java | 37 +- ...bjectMetadataStatusResponseInnerTests.java | 25 +- .../generated/SsisObjectMetadataTests.java | 17 +- .../generated/SsisPackageTests.java | 68 - .../generated/SsisParameterTests.java | 54 +- .../generated/SsisProjectTests.java | 179 +- .../generated/SsisPropertyOverrideTests.java | 4 +- .../generated/SsisVariableTests.java | 40 +- .../generated/StagingSettingsTests.java | 14 +- .../generated/StoreReadSettingsTests.java | 8 +- .../generated/StoreWriteSettingsTests.java | 14 +- .../SubResourceDebugResourceTests.java | 8 +- .../generated/SwitchActivityTests.java | 369 +-- .../SwitchActivityTypePropertiesTests.java | 384 +-- .../generated/SwitchCaseTests.java | 89 +- .../generated/SybaseSourceTests.java | 13 +- .../generated/SybaseTableDatasetTests.java | 41 +- ...SybaseTableDatasetTypePropertiesTests.java | 6 +- .../SynapseNotebookActivityTests.java | 108 +- ...seNotebookActivityTypePropertiesTests.java | 32 +- .../SynapseNotebookReferenceTests.java | 7 +- .../SynapseSparkJobReferenceTests.java | 7 +- .../generated/TabularSourceTests.java | 12 +- .../generated/TabularTranslatorTests.java | 20 +- .../generated/TarGZipReadSettingsTests.java | 5 +- .../generated/TarReadSettingsTests.java | 4 +- .../TeradataPartitionSettingsTests.java | 7 +- .../generated/TeradataSourceTests.java | 19 +- .../generated/TeradataTableDatasetTests.java | 44 +- ...radataTableDatasetTypePropertiesTests.java | 5 +- .../generated/TextFormatTests.java | 18 +- .../generated/TransformationTests.java | 42 +- .../TriggerDependencyReferenceTests.java | 8 +- .../generated/TriggerListResponseTests.java | 45 +- .../TriggerPipelineReferenceTests.java | 14 +- .../generated/TriggerReferenceTests.java | 10 +- .../generated/TriggerResourceInnerTests.java | 18 +- .../generated/TriggerRunTests.java | 34 +- ...riggerRunsCancelWithResponseMockTests.java | 37 +- ...TriggerRunsRerunWithResponseMockTests.java | 36 +- ...SubscriptionOperationStatusInnerTests.java | 2 +- .../datafactory/generated/TriggerTests.java | 12 +- ...rsCreateOrUpdateWithResponseMockTests.java | 56 +- .../TriggersDeleteWithResponseMockTests.java | 34 +- ...bscriptionStatusWithResponseMockTests.java | 44 +- .../TriggersGetWithResponseMockTests.java | 40 +- .../TriggersListByFactoryMockTests.java | 40 +- .../generated/TriggersStartMockTests.java | 34 +- .../generated/TriggersStopMockTests.java | 34 +- .../TriggersSubscribeToEventsMockTests.java | 38 +- ...riggersUnsubscribeFromEventsMockTests.java | 38 +- ...WindowTriggerDependencyReferenceTests.java | 24 +- .../generated/TumblingWindowTriggerTests.java | 62 +- ...blingWindowTriggerTypePropertiesTests.java | 41 +- .../TypeConversionSettingsTests.java | 12 +- .../generated/UntilActivityTests.java | 172 +- .../UntilActivityTypePropertiesTests.java | 148 +- ...ateIntegrationRuntimeNodeRequestTests.java | 8 +- .../UpdateIntegrationRuntimeRequestTests.java | 15 +- .../generated/UserAccessPolicyTests.java | 30 +- .../generated/UserPropertyTests.java | 10 +- .../generated/ValidationActivityTests.java | 77 +- ...ValidationActivityTypePropertiesTests.java | 17 +- .../generated/VariableSpecificationTests.java | 11 +- .../VerticaDatasetTypePropertiesTests.java | 10 +- .../generated/VerticaSourceTests.java | 13 +- .../generated/VerticaTableDatasetTests.java | 46 +- .../generated/WaitActivityTests.java | 48 +- .../WaitActivityTypePropertiesTests.java | 6 +- .../generated/WarehouseSinkTests.java | 30 +- .../generated/WarehouseSourceTests.java | 24 +- .../generated/WarehouseTableDatasetTests.java | 43 +- ...ehouseTableDatasetTypePropertiesTests.java | 5 +- .../WebAnonymousAuthenticationTests.java | 4 +- .../generated/WebLinkedServiceTests.java | 44 +- .../WebLinkedServiceTypePropertiesTests.java | 9 +- .../datafactory/generated/WebSourceTests.java | 10 +- .../generated/WebTableDatasetTests.java | 45 +- .../WebTableDatasetTypePropertiesTests.java | 4 +- .../generated/WranglingDataFlowTests.java | 82 +- .../generated/XeroObjectDatasetTests.java | 41 +- .../generated/XeroSourceTests.java | 12 +- .../generated/XmlDatasetTests.java | 49 +- .../XmlDatasetTypePropertiesTests.java | 14 +- .../generated/XmlReadSettingsTests.java | 10 +- .../datafactory/generated/XmlSourceTests.java | 24 +- .../ZipDeflateReadSettingsTests.java | 4 +- .../generated/ZohoObjectDatasetTests.java | 44 +- .../generated/ZohoSourceTests.java | 12 +- 1819 files changed, 35090 insertions(+), 22952 deletions(-) rename sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/{ManagedIdentityCredentialResourceInner.java => CredentialResourceInner.java} (67%) create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialResourceImpl.java delete mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedIdentityCredentialResourceImpl.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialResource.java delete mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptType.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServicePrincipalCredentialResource.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialResourceInnerTests.java delete mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialResourceInnerTests.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialResourceTests.java delete mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentTests.java delete mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPackageTests.java diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md b/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md index c8a77309e31fc..f147fbb5039b2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md +++ b/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md @@ -1,14 +1,2213 @@ # Release History -## 1.0.0-beta.28 (Unreleased) +## 1.0.0-beta.28 (2024-04-18) -### Features Added +- Azure Resource Manager DataFactory client library for Java. This package contains Microsoft Azure SDK for DataFactory Management SDK. The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. Package tag package-2018-06. For documentation on how to use this package, please see [Azure Management Libraries for Java](https://aka.ms/azsdk/java/mgmt). ### Breaking Changes -### Bugs Fixed +* `models.ManagedIdentityCredentialResource$Update` was removed + +* `models.ManagedIdentityCredentialResource$DefinitionStages` was removed + +* `models.ManagedIdentityCredentialResource$Definition` was removed + +* `models.ManagedIdentityCredentialResource$UpdateStages` was removed + +* `models.ScriptType` was removed + +#### `models.CredentialOperations` was modified + +* `define(java.lang.String)` was removed +* `getById(java.lang.String)` was removed +* `deleteById(java.lang.String)` was removed +* `getByIdWithResponse(java.lang.String,java.lang.String,com.azure.core.util.Context)` was removed +* `deleteByIdWithResponse(java.lang.String,com.azure.core.util.Context)` was removed +* `models.ManagedIdentityCredentialResource get(java.lang.String,java.lang.String,java.lang.String)` -> `models.CredentialResource get(java.lang.String,java.lang.String,java.lang.String)` + +#### `models.ScriptActivityScriptBlock` was modified + +* `models.ScriptType type()` -> `java.lang.Object type()` +* `withType(models.ScriptType)` was removed + +#### `models.ManagedIdentityCredentialResource` was modified + +* `refresh()` was removed +* `update()` was removed +* `etag()` was removed +* `innerModel()` was removed +* `type()` was removed +* `resourceGroupName()` was removed +* `id()` was removed +* `refresh(com.azure.core.util.Context)` was removed +* `name()` was removed +* `models.ManagedIdentityCredential properties()` -> `models.ManagedIdentityCredential properties()` + +### Features Added + +* `models.CredentialResource` was added + +* `models.ServicePrincipalCredentialResource` was added + +#### `models.CosmosDbMongoDbApiSource` was modified + +* `type()` was added + +#### `models.SqlMISink` was modified + +* `type()` was added + +#### `models.CredentialOperations` was modified + +* `createOrUpdate(java.lang.String,java.lang.String,java.lang.String,fluent.models.CredentialResourceInner)` was added +* `createOrUpdateWithResponse(java.lang.String,java.lang.String,java.lang.String,fluent.models.CredentialResourceInner,java.lang.String,com.azure.core.util.Context)` was added + +#### `models.QuickBooksSource` was modified + +* `type()` was added + +#### `models.SapEccLinkedService` was modified + +* `type()` was added + +#### `models.SmartsheetLinkedService` was modified + +* `type()` was added + +#### `models.TarReadSettings` was modified + +* `type()` was added + +#### `models.TwilioLinkedService` was modified + +* `type()` was added + +#### `models.Activity` was modified + +* `type()` was added + +#### `models.HBaseLinkedService` was modified + +* `type()` was added + +#### `models.AzureDataLakeStoreLocation` was modified + +* `type()` was added + +#### `models.MicrosoftAccessTableDataset` was modified + +* `type()` was added + +#### `models.DrillLinkedService` was modified + +* `type()` was added + +#### `models.GoogleBigQuerySource` was modified + +* `type()` was added + +#### `models.ServiceNowObjectDataset` was modified + +* `type()` was added + +#### `models.LinkedIntegrationRuntimeKeyAuthorization` was modified + +* `authorizationType()` was added + +#### `models.WarehouseTableDataset` was modified + +* `type()` was added + +#### `models.CopySink` was modified + +* `type()` was added + +#### `models.SnowflakeLinkedService` was modified + +* `type()` was added + +#### `models.FileShareDataset` was modified + +* `type()` was added + +#### `models.SapOdpLinkedService` was modified + +* `type()` was added + +#### `models.WebLinkedServiceTypeProperties` was modified + +* `authenticationType()` was added + +#### `models.DelimitedTextSource` was modified + +* `type()` was added + +#### `models.Trigger` was modified + +* `type()` was added + +#### `models.MongoDbAtlasLinkedService` was modified + +* `type()` was added + +#### `models.IntegrationRuntime` was modified + +* `type()` was added + +#### `models.WebBasicAuthentication` was modified + +* `authenticationType()` was added + +#### `models.EloquaObjectDataset` was modified + +* `type()` was added + +#### `models.CustomDataset` was modified + +* `type()` was added + +#### `models.FileServerWriteSettings` was modified + +* `type()` was added + +#### `models.OdbcTableDataset` was modified + +* `type()` was added + +#### `models.TumblingWindowTrigger` was modified + +* `type()` was added + +#### `models.AzureFileStorageLocation` was modified + +* `type()` was added + +#### `models.SapOpenHubLinkedService` was modified + +* `type()` was added + +#### `models.AzureFileStorageWriteSettings` was modified + +* `type()` was added + +#### `models.SsisProject` was modified + +* `type()` was added + +#### `models.SapHanaTableDataset` was modified + +* `type()` was added + +#### `models.SalesforceSource` was modified + +* `type()` was added + +#### `models.SqlServerSink` was modified + +* `type()` was added + +#### `models.CosmosDbSqlApiSink` was modified + +* `type()` was added + +#### `models.SalesforceServiceCloudSource` was modified + +* `type()` was added + +#### `models.HubspotObjectDataset` was modified + +* `type()` was added + +#### `models.NetezzaSource` was modified + +* `type()` was added + +#### `models.AmazonMwsObjectDataset` was modified + +* `type()` was added + +#### `models.VerticaLinkedService` was modified + +* `type()` was added + +#### `models.DatasetStorageFormat` was modified + +* `type()` was added + +#### `models.HDInsightOnDemandLinkedService` was modified + +* `type()` was added + +#### `models.OrcFormat` was modified + +* `type()` was added + +#### `models.TumblingWindowTriggerDependencyReference` was modified + +* `type()` was added + +#### `models.AzureTableSink` was modified + +* `type()` was added + +#### `models.DatasetLocation` was modified + +* `type()` was added + +#### `models.PostgreSqlV2TableDataset` was modified + +* `type()` was added + +#### `models.AzureTableStorageLinkedService` was modified + +* `type()` was added + +#### `models.MySqlSource` was modified + +* `type()` was added + +#### `models.FileServerLinkedService` was modified + +* `type()` was added + +#### `models.SqlMISource` was modified + +* `type()` was added + +#### `models.AzureDataLakeStoreReadSettings` was modified + +* `type()` was added + +#### `models.SelfDependencyTumblingWindowTriggerReference` was modified + +* `type()` was added + +#### `models.ScriptActivity` was modified + +* `type()` was added + +#### `models.ParquetFormat` was modified + +* `type()` was added + +#### `models.InformixTableDataset` was modified + +* `type()` was added + +#### `models.GoogleBigQueryObjectDataset` was modified + +* `type()` was added + +#### `models.OracleServiceCloudObjectDataset` was modified + +* `type()` was added + +#### `models.MongoDbCollectionDataset` was modified + +* `type()` was added + +#### `models.HDInsightSparkActivity` was modified + +* `type()` was added + +#### `models.AmazonS3LinkedService` was modified + +* `type()` was added + +#### `models.NetezzaLinkedService` was modified + +* `type()` was added + +#### `models.DocumentDbCollectionSink` was modified + +* `type()` was added + +#### `models.JsonFormat` was modified + +* `type()` was added -### Other Changes +#### `models.DependencyReference` was modified + +* `type()` was added + +#### `models.ManagedIdentityCredential` was modified + +* `type()` was added + +#### `models.FormatReadSettings` was modified + +* `type()` was added + +#### `models.MagentoLinkedService` was modified + +* `type()` was added + +#### `models.InformixLinkedService` was modified + +* `type()` was added + +#### `models.EnvironmentVariableSetup` was modified + +* `type()` was added + +#### `models.WebActivity` was modified + +* `type()` was added + +#### `models.AzureMariaDBTableDataset` was modified + +* `type()` was added + +#### `models.VerticaTableDataset` was modified + +* `type()` was added + +#### `models.PaypalSource` was modified + +* `type()` was added + +#### `models.ParquetSink` was modified + +* `type()` was added + +#### `models.ServicePrincipalCredential` was modified + +* `type()` was added + +#### `models.HttpReadSettings` was modified + +* `type()` was added + +#### `models.GetMetadataActivity` was modified + +* `type()` was added + +#### `models.SalesforceObjectDataset` was modified + +* `type()` was added + +#### `models.SalesforceV2Source` was modified + +* `type()` was added + +#### `models.ZohoSource` was modified + +* `type()` was added + +#### `models.GoogleAdWordsObjectDataset` was modified + +* `type()` was added + +#### `models.FilterActivity` was modified + +* `type()` was added + +#### `models.Dataset` was modified + +* `type()` was added + +#### `models.Office365Source` was modified + +* `type()` was added + +#### `models.HttpLinkedService` was modified + +* `type()` was added + +#### `models.SapOpenHubSource` was modified + +* `type()` was added + +#### `models.HttpServerLocation` was modified + +* `type()` was added + +#### `models.GreenplumLinkedService` was modified + +* `type()` was added + +#### `models.ParquetWriteSettings` was modified + +* `type()` was added + +#### `models.PostgreSqlLinkedService` was modified + +* `type()` was added + +#### `models.ForEachActivity` was modified + +* `type()` was added + +#### `models.BinaryDataset` was modified + +* `type()` was added + +#### `models.CommonDataServiceForAppsLinkedService` was modified + +* `type()` was added + +#### `models.ExecuteWranglingDataflowActivity` was modified + +* `type()` was added + +#### `models.SquareLinkedService` was modified + +* `type()` was added + +#### `models.FactoryRepoConfiguration` was modified + +* `type()` was added + +#### `models.LakeHouseTableDataset` was modified + +* `type()` was added + +#### `models.AmazonS3CompatibleLocation` was modified + +* `type()` was added + +#### `models.AzurePostgreSqlTableDataset` was modified + +* `type()` was added + +#### `models.SnowflakeDataset` was modified + +* `type()` was added + +#### `models.HttpSource` was modified + +* `type()` was added + +#### `models.DynamicsCrmSink` was modified + +* `type()` was added + +#### `models.AzureBlobFSReadSettings` was modified + +* `type()` was added + +#### `models.HiveSource` was modified + +* `type()` was added + +#### `models.SalesforceServiceCloudV2LinkedService` was modified + +* `type()` was added + +#### `models.BinarySource` was modified + +* `type()` was added + +#### `models.AmazonMwsSource` was modified + +* `type()` was added + +#### `models.CassandraTableDataset` was modified + +* `type()` was added + +#### `models.SalesforceSink` was modified + +* `type()` was added + +#### `models.MicrosoftAccessSink` was modified + +* `type()` was added + +#### `models.DelimitedTextWriteSettings` was modified + +* `type()` was added + +#### `models.InformixSink` was modified + +* `type()` was added + +#### `models.AzureTableSource` was modified + +* `type()` was added + +#### `models.AzureDatabricksDeltaLakeExportCommand` was modified + +* `type()` was added + +#### `models.AppFiguresLinkedService` was modified + +* `type()` was added + +#### `models.AmazonRdsForOracleSource` was modified + +* `type()` was added + +#### `models.ComponentSetup` was modified + +* `type()` was added + +#### `models.AzureBatchLinkedService` was modified + +* `type()` was added + +#### `models.MongoDbLinkedService` was modified + +* `type()` was added + +#### `models.MongoDbAtlasCollectionDataset` was modified + +* `type()` was added + +#### `models.DatabricksNotebookActivity` was modified + +* `type()` was added + +#### `models.AzureDataExplorerTableDataset` was modified + +* `type()` was added + +#### `models.DocumentDbCollectionDataset` was modified + +* `type()` was added + +#### `models.GoogleBigQueryV2Source` was modified + +* `type()` was added + +#### `models.PostgreSqlV2Source` was modified + +* `type()` was added + +#### `models.SapHanaLinkedService` was modified + +* `type()` was added + +#### `models.WaitActivity` was modified + +* `type()` was added + +#### `models.OracleCloudStorageLinkedService` was modified + +* `type()` was added + +#### `models.CopyActivity` was modified + +* `type()` was added + +#### `models.SnowflakeSink` was modified + +* `type()` was added + +#### `models.CmdkeySetup` was modified + +* `type()` was added + +#### `models.WarehouseSource` was modified + +* `type()` was added + +#### `models.AzurePostgreSqlSource` was modified + +* `type()` was added + +#### `models.IfConditionActivity` was modified + +* `type()` was added + +#### `models.SqlDWSink` was modified + +* `type()` was added + +#### `models.PrestoSource` was modified + +* `type()` was added + +#### `models.AzureDataLakeStoreSource` was modified + +* `type()` was added + +#### `models.AzureSynapseArtifactsLinkedService` was modified + +* `type()` was added + +#### `models.TeamDeskLinkedService` was modified + +* `type()` was added + +#### `models.AsanaLinkedService` was modified + +* `type()` was added + +#### `models.SalesforceServiceCloudV2ObjectDataset` was modified + +* `type()` was added + +#### `models.ShopifyObjectDataset` was modified + +* `type()` was added + +#### `models.ExecutionActivity` was modified + +* `type()` was added + +#### `models.SapTableResourceDataset` was modified + +* `type()` was added + +#### `models.WebhookActivity` was modified + +* `type()` was added + +#### `models.SapCloudForCustomerSource` was modified + +* `type()` was added + +#### `models.Office365Dataset` was modified + +* `type()` was added + +#### `models.MicrosoftAccessSource` was modified + +* `type()` was added + +#### `models.ServiceNowV2LinkedService` was modified + +* `type()` was added + +#### `models.ValidationActivity` was modified + +* `type()` was added + +#### `models.LakeHouseLinkedService` was modified + +* `type()` was added + +#### `models.ConcurLinkedService` was modified + +* `type()` was added + +#### `models.SquareObjectDataset` was modified + +* `type()` was added + +#### `models.BlobSink` was modified + +* `type()` was added + +#### `models.JsonWriteSettings` was modified + +* `type()` was added + +#### `models.AvroSink` was modified + +* `type()` was added + +#### `models.CustomActivity` was modified + +* `type()` was added + +#### `models.OracleTableDataset` was modified + +* `type()` was added + +#### `models.AzureMariaDBSource` was modified + +* `type()` was added + +#### `models.XeroObjectDataset` was modified + +* `type()` was added + +#### `models.CustomEventsTrigger` was modified + +* `type()` was added + +#### `models.SynapseNotebookActivity` was modified + +* `type()` was added + +#### `models.AmazonRedshiftTableDataset` was modified + +* `type()` was added + +#### `models.MariaDBLinkedService` was modified + +* `type()` was added + +#### `models.XeroLinkedService` was modified + +* `type()` was added + +#### `models.SnowflakeSource` was modified + +* `type()` was added + +#### `models.AzurePostgreSqlSink` was modified + +* `type()` was added + +#### `models.AzureSqlDWTableDataset` was modified + +* `type()` was added + +#### `models.OrcWriteSettings` was modified + +* `type()` was added + +#### `models.ServiceNowV2ObjectDataset` was modified + +* `type()` was added + +#### `models.GoogleAdWordsSource` was modified + +* `type()` was added + +#### `models.HiveObjectDataset` was modified + +* `type()` was added + +#### `models.SapOdpResourceDataset` was modified + +* `type()` was added + +#### `models.WarehouseSink` was modified + +* `type()` was added + +#### `models.TeradataTableDataset` was modified + +* `type()` was added + +#### `models.ConcurObjectDataset` was modified + +* `type()` was added + +#### `models.MultiplePipelineTrigger` was modified + +* `type()` was added + +#### `models.ServiceNowV2Source` was modified + +* `type()` was added + +#### `models.CouchbaseTableDataset` was modified + +* `type()` was added + +#### `models.OdbcSink` was modified + +* `type()` was added + +#### `models.MariaDBSource` was modified + +* `type()` was added + +#### `models.AzureFileStorageReadSettings` was modified + +* `type()` was added + +#### `models.RestSource` was modified + +* `type()` was added + +#### `models.FtpServerLocation` was modified + +* `type()` was added + +#### `models.AzureMLServiceLinkedService` was modified + +* `type()` was added + +#### `models.SapEccSource` was modified + +* `type()` was added + +#### `models.FtpReadSettings` was modified + +* `type()` was added + +#### `models.SapOpenHubTableDataset` was modified + +* `type()` was added + +#### `models.CassandraSource` was modified + +* `type()` was added + +#### `models.RerunTumblingWindowTrigger` was modified + +* `type()` was added + +#### `models.AzureDataLakeStoreDataset` was modified + +* `type()` was added + +#### `models.SapTableSource` was modified + +* `type()` was added + +#### `models.CommonDataServiceForAppsSource` was modified + +* `type()` was added + +#### `models.ScriptActivityScriptBlock` was modified + +* `withType(java.lang.Object)` was added + +#### `models.OracleSource` was modified + +* `type()` was added + +#### `models.LakeHouseWriteSettings` was modified + +* `type()` was added + +#### `models.DynamicsAXLinkedService` was modified + +* `type()` was added + +#### `models.AzureBlobFSWriteSettings` was modified + +* `type()` was added + +#### `models.FileServerReadSettings` was modified + +* `type()` was added + +#### `models.HdfsSource` was modified + +* `type()` was added + +#### `models.TabularTranslator` was modified + +* `type()` was added + +#### `models.CopySource` was modified + +* `type()` was added + +#### `models.MongoDbV2LinkedService` was modified + +* `type()` was added + +#### `models.MySqlTableDataset` was modified + +* `type()` was added + +#### `models.SalesforceServiceCloudV2Source` was modified + +* `type()` was added + +#### `models.PrestoLinkedService` was modified + +* `type()` was added + +#### `models.CosmosDbMongoDbApiCollectionDataset` was modified + +* `type()` was added + +#### `models.WebSource` was modified + +* `type()` was added + +#### `models.DynamicsCrmEntityDataset` was modified + +* `type()` was added + +#### `models.AzureMLBatchExecutionActivity` was modified + +* `type()` was added + +#### `models.AmazonRdsForOracleLinkedService` was modified + +* `type()` was added + +#### `models.Db2TableDataset` was modified + +* `type()` was added + +#### `models.DrillSource` was modified + +* `type()` was added + +#### `models.AzureStorageLinkedService` was modified + +* `type()` was added + +#### `models.ManagedIdentityCredentialResource` was modified + +* `validate()` was added +* `withId(java.lang.String)` was added +* `withProperties(models.ManagedIdentityCredential)` was added +* `properties()` was added + +#### `models.SetVariableActivity` was modified + +* `type()` was added + +#### `models.SalesforceLinkedService` was modified + +* `type()` was added + +#### `models.RestResourceDataset` was modified + +* `type()` was added + +#### `models.SapBWLinkedService` was modified + +* `type()` was added + +#### `models.SsisFolder` was modified + +* `type()` was added + +#### `models.SharePointOnlineListResourceDataset` was modified + +* `type()` was added + +#### `models.ZohoLinkedService` was modified + +* `type()` was added + +#### `models.SapTableLinkedService` was modified + +* `type()` was added + +#### `models.ServiceNowSource` was modified + +* `type()` was added + +#### `models.MongoDbSource` was modified + +* `type()` was added + +#### `models.JiraLinkedService` was modified + +* `type()` was added + +#### `models.PostgreSqlTableDataset` was modified + +* `type()` was added + +#### `models.LakeHouseReadSettings` was modified + +* `type()` was added + +#### `models.JsonReadSettings` was modified + +* `type()` was added + +#### `models.DynamicsAXResourceDataset` was modified + +* `type()` was added + +#### `models.InformixSource` was modified + +* `type()` was added + +#### `models.AzureFileStorageLinkedService` was modified + +* `type()` was added + +#### `models.SparkObjectDataset` was modified + +* `type()` was added + +#### `models.AvroDataset` was modified + +* `type()` was added + +#### `models.ParquetSource` was modified + +* `type()` was added + +#### `models.SftpWriteSettings` was modified + +* `type()` was added + +#### `models.ScheduleTrigger` was modified + +* `type()` was added + +#### `models.ConcurSource` was modified + +* `type()` was added + +#### `models.DataLakeAnalyticsUsqlActivity` was modified + +* `type()` was added + +#### `models.AmazonRdsForSqlServerSource` was modified + +* `type()` was added + +#### `models.ImportSettings` was modified + +* `type()` was added + +#### `models.FactoryGitHubConfiguration` was modified + +* `type()` was added + +#### `models.OrcSink` was modified + +* `type()` was added + +#### `models.PhoenixSource` was modified + +* `type()` was added + +#### `models.CosmosDbMongoDbApiSink` was modified + +* `type()` was added + +#### `models.JiraObjectDataset` was modified + +* `type()` was added + +#### `models.BinaryReadSettings` was modified + +* `type()` was added + +#### `models.DataFlow` was modified + +* `type()` was added + +#### `models.ParquetDataset` was modified + +* `type()` was added + +#### `models.AzureSqlDatabaseLinkedService` was modified + +* `type()` was added + +#### `models.AzureSqlDWLinkedService` was modified + +* `type()` was added + +#### `models.SsisPackage` was modified + +* `type()` was added + +#### `models.AzureMLExecutePipelineActivity` was modified + +* `type()` was added + +#### `models.SalesforceMarketingCloudSource` was modified + +* `type()` was added + +#### `models.WranglingDataFlow` was modified + +* `type()` was added + +#### `models.DatabricksSparkJarActivity` was modified + +* `type()` was added + +#### `models.ShopifyLinkedService` was modified + +* `type()` was added + +#### `models.TeradataSource` was modified + +* `type()` was added + +#### `models.DrillTableDataset` was modified + +* `type()` was added + +#### `models.PrestoObjectDataset` was modified + +* `type()` was added + +#### `models.GreenplumSource` was modified + +* `type()` was added + +#### `models.SalesforceServiceCloudSink` was modified + +* `type()` was added + +#### `models.SalesforceV2LinkedService` was modified + +* `type()` was added + +#### `models.TeradataLinkedService` was modified + +* `type()` was added + +#### `models.SapBwSource` was modified + +* `type()` was added + +#### `models.OdbcSource` was modified + +* `type()` was added + +#### `models.ManagedIntegrationRuntime` was modified + +* `type()` was added + +#### `models.DelimitedTextSink` was modified + +* `type()` was added + +#### `models.DynamicsCrmSource` was modified + +* `type()` was added + +#### `models.AzureDataLakeStoreWriteSettings` was modified + +* `type()` was added + +#### `models.SapHanaSource` was modified + +* `type()` was added + +#### `models.DelimitedTextReadSettings` was modified + +* `type()` was added + +#### `models.SparkLinkedService` was modified + +* `type()` was added + +#### `models.SapCloudForCustomerSink` was modified + +* `type()` was added + +#### `models.OracleServiceCloudLinkedService` was modified + +* `type()` was added + +#### `models.SnowflakeExportCopyCommand` was modified + +* `type()` was added + +#### `models.AzureBlobStorageLinkedService` was modified + +* `type()` was added + +#### `models.SnowflakeV2Sink` was modified + +* `type()` was added + +#### `models.ODataResourceDataset` was modified + +* `type()` was added + +#### `models.AzureBlobFSSource` was modified + +* `type()` was added + +#### `models.BlobEventsTrigger` was modified + +* `type()` was added + +#### `models.AzureBlobStorageLocation` was modified + +* `type()` was added + +#### `models.TriggerDependencyReference` was modified + +* `type()` was added + +#### `models.SquareSource` was modified + +* `type()` was added + +#### `models.AzureDataLakeStoreLinkedService` was modified + +* `type()` was added + +#### `models.CustomDataSourceLinkedService` was modified + +* `type()` was added + +#### `models.WebAnonymousAuthentication` was modified + +* `authenticationType()` was added + +#### `models.ServiceNowLinkedService` was modified + +* `type()` was added + +#### `models.OrcDataset` was modified + +* `type()` was added + +#### `models.SqlServerTableDataset` was modified + +* `type()` was added + +#### `models.DynamicsSource` was modified + +* `type()` was added + +#### `models.AzureMySqlTableDataset` was modified + +* `type()` was added + +#### `models.AzureSearchLinkedService` was modified + +* `type()` was added + +#### `models.HBaseObjectDataset` was modified + +* `type()` was added + +#### `models.AmazonS3Location` was modified + +* `type()` was added + +#### `models.DynamicsSink` was modified + +* `type()` was added + +#### `models.StoreWriteSettings` was modified + +* `type()` was added + +#### `models.ExcelDataset` was modified + +* `type()` was added + +#### `models.JsonDataset` was modified + +* `type()` was added + +#### `models.ExecuteSsisPackageActivity` was modified + +* `type()` was added + +#### `models.ImpalaSource` was modified + +* `type()` was added + +#### `models.SybaseTableDataset` was modified + +* `type()` was added + +#### `models.SsisObjectMetadata` was modified + +* `type()` was added + +#### `models.UntilActivity` was modified + +* `type()` was added + +#### `models.AzureBlobFSLocation` was modified + +* `type()` was added + +#### `models.DeleteActivity` was modified + +* `type()` was added + +#### `models.AzureMySqlLinkedService` was modified + +* `type()` was added + +#### `models.LakeHouseLocation` was modified + +* `type()` was added + +#### `models.Credential` was modified + +* `type()` was added + +#### `models.AzureQueueSink` was modified + +* `type()` was added + +#### `models.JsonSink` was modified + +* `type()` was added + +#### `models.SynapseSparkJobDefinitionActivity` was modified + +* `type()` was added + +#### `models.HubspotLinkedService` was modified + +* `type()` was added + +#### `models.AzureSqlMITableDataset` was modified + +* `type()` was added + +#### `models.SalesforceV2ObjectDataset` was modified + +* `type()` was added + +#### `models.ZohoObjectDataset` was modified + +* `type()` was added + +#### `models.SapOdpSource` was modified + +* `type()` was added + +#### `models.SnowflakeV2LinkedService` was modified + +* `type()` was added + +#### `models.AzureTableDataset` was modified + +* `type()` was added + +#### `models.SapEccResourceDataset` was modified + +* `type()` was added + +#### `models.ControlActivity` was modified + +* `type()` was added + +#### `models.AmazonS3CompatibleReadSettings` was modified + +* `type()` was added + +#### `models.CommonDataServiceForAppsEntityDataset` was modified + +* `type()` was added + +#### `models.MappingDataFlow` was modified + +* `type()` was added + +#### `models.AzureBlobStorageWriteSettings` was modified + +* `type()` was added + +#### `models.HDInsightPigActivity` was modified + +* `type()` was added + +#### `models.SapBwCubeDataset` was modified + +* `type()` was added + +#### `models.CustomSetupBase` was modified + +* `type()` was added + +#### `models.SybaseSource` was modified + +* `type()` was added + +#### `models.JiraSource` was modified + +* `type()` was added + +#### `models.SftpLocation` was modified + +* `type()` was added + +#### `models.AzureBlobDataset` was modified + +* `type()` was added + +#### `models.DynamicsLinkedService` was modified + +* `type()` was added + +#### `models.WebTableDataset` was modified + +* `type()` was added + +#### `models.CassandraLinkedService` was modified + +* `type()` was added + +#### `models.AvroFormat` was modified + +* `type()` was added + +#### `models.SftpReadSettings` was modified + +* `type()` was added + +#### `models.ODataLinkedService` was modified + +* `type()` was added + +#### `models.GoogleAdWordsLinkedService` was modified + +* `type()` was added + +#### `models.HDInsightLinkedService` was modified + +* `type()` was added + +#### `models.GoogleCloudStorageLocation` was modified + +* `type()` was added + +#### `models.AzureBlobStorageReadSettings` was modified + +* `type()` was added + +#### `models.ResponsysSource` was modified + +* `type()` was added + +#### `models.RestServiceLinkedService` was modified + +* `type()` was added + +#### `models.CosmosDbLinkedService` was modified + +* `type()` was added + +#### `models.AzureBlobFSLinkedService` was modified + +* `type()` was added + +#### `models.JsonSource` was modified + +* `type()` was added + +#### `models.SapCloudForCustomerResourceDataset` was modified + +* `type()` was added + +#### `models.SparkSource` was modified + +* `type()` was added + +#### `models.RestSink` was modified + +* `type()` was added + +#### `models.CopyTranslator` was modified + +* `type()` was added + +#### `models.MongoDbAtlasSink` was modified + +* `type()` was added + +#### `models.HBaseSource` was modified + +* `type()` was added + +#### `models.SelfHostedIntegrationRuntime` was modified + +* `type()` was added + +#### `models.BinarySink` was modified + +* `type()` was added + +#### `models.AmazonS3ReadSettings` was modified + +* `type()` was added + +#### `models.OracleCloudStorageLocation` was modified + +* `type()` was added + +#### `models.VerticaSource` was modified + +* `type()` was added + +#### `models.SqlServerSource` was modified + +* `type()` was added + +#### `models.SwitchActivity` was modified + +* `type()` was added + +#### `models.AzureDataLakeAnalyticsLinkedService` was modified + +* `type()` was added + +#### `models.MariaDBTableDataset` was modified + +* `type()` was added + +#### `models.BlobSource` was modified + +* `type()` was added + +#### `models.HiveLinkedService` was modified + +* `type()` was added + +#### `models.ExportSettings` was modified + +* `type()` was added + +#### `models.SnowflakeV2Dataset` was modified + +* `type()` was added + +#### `models.StoreReadSettings` was modified + +* `type()` was added + +#### `models.AzureDatabricksDeltaLakeSink` was modified + +* `type()` was added + +#### `models.AzureDatabricksDeltaLakeLinkedService` was modified + +* `type()` was added + +#### `models.HttpDataset` was modified + +* `type()` was added + +#### `models.HdfsLocation` was modified + +* `type()` was added + +#### `models.LakeHouseTableSource` was modified + +* `type()` was added + +#### `models.AmazonRedshiftSource` was modified + +* `type()` was added + +#### `models.GoogleSheetsLinkedService` was modified + +* `type()` was added + +#### `models.MarketoSource` was modified + +* `type()` was added + +#### `models.ZipDeflateReadSettings` was modified + +* `type()` was added + +#### `models.EloquaLinkedService` was modified + +* `type()` was added + +#### `models.MongoDbV2CollectionDataset` was modified + +* `type()` was added + +#### `models.CompressionReadSettings` was modified + +* `type()` was added + +#### `models.AzureMLUpdateResourceActivity` was modified + +* `type()` was added + +#### `models.NetezzaTableDataset` was modified + +* `type()` was added + +#### `models.SqlDWSource` was modified + +* `type()` was added + +#### `models.AzureFunctionActivity` was modified + +* `type()` was added + +#### `models.IntegrationRuntimeStatus` was modified + +* `type()` was added + +#### `models.AzPowerShellSetup` was modified + +* `type()` was added + +#### `models.ExecutePipelineActivity` was modified + +* `type()` was added + +#### `models.XeroSource` was modified + +* `type()` was added + +#### `models.AzureMySqlSource` was modified + +* `type()` was added + +#### `models.ODataSource` was modified + +* `type()` was added + +#### `models.AzureSearchIndexDataset` was modified + +* `type()` was added + +#### `models.AzureDatabricksDeltaLakeImportCommand` was modified + +* `type()` was added + +#### `models.HDInsightHiveActivity` was modified + +* `type()` was added + +#### `models.SapCloudForCustomerLinkedService` was modified + +* `type()` was added + +#### `models.GoogleCloudStorageLinkedService` was modified + +* `type()` was added + +#### `models.FailActivity` was modified + +* `type()` was added + +#### `models.XmlSource` was modified + +* `type()` was added + +#### `models.OdbcLinkedService` was modified + +* `type()` was added + +#### `models.MongoDbAtlasSource` was modified + +* `type()` was added + +#### `models.TabularSource` was modified + +* `type()` was added + +#### `models.FileSystemSink` was modified + +* `type()` was added + +#### `models.LinkedService` was modified + +* `type()` was added + +#### `models.WarehouseLinkedService` was modified + +* `type()` was added + +#### `models.RelationalSource` was modified + +* `type()` was added + +#### `models.HdfsLinkedService` was modified + +* `type()` was added + +#### `models.SalesforceMarketingCloudObjectDataset` was modified + +* `type()` was added + +#### `models.SnowflakeV2Source` was modified + +* `type()` was added + +#### `models.TextFormat` was modified + +* `type()` was added + +#### `models.PaypalObjectDataset` was modified + +* `type()` was added + +#### `models.AzurePostgreSqlLinkedService` was modified + +* `type()` was added + +#### `models.SharePointOnlineListSource` was modified + +* `type()` was added + +#### `models.BlobTrigger` was modified + +* `type()` was added + +#### `models.OracleSink` was modified + +* `type()` was added + +#### `models.SnowflakeImportCopyCommand` was modified + +* `type()` was added + +#### `models.WebClientCertificateAuthentication` was modified + +* `authenticationType()` was added + +#### `models.AzureDatabricksDeltaLakeDataset` was modified + +* `type()` was added + +#### `models.AzureMLLinkedService` was modified + +* `type()` was added + +#### `models.SalesforceServiceCloudV2Sink` was modified + +* `type()` was added + +#### `models.OracleLinkedService` was modified + +* `type()` was added + +#### `models.SqlSink` was modified + +* `type()` was added + +#### `models.AzureFunctionLinkedService` was modified + +* `type()` was added + +#### `models.AzureSqlSink` was modified + +* `type()` was added + +#### `models.AzureBlobFSDataset` was modified + +* `type()` was added + +#### `models.PostgreSqlSource` was modified + +* `type()` was added + +#### `models.HdfsReadSettings` was modified + +* `type()` was added + +#### `models.AmazonS3CompatibleLinkedService` was modified + +* `type()` was added + +#### `models.MongoDbV2Source` was modified + +* `type()` was added + +#### `models.DelimitedTextDataset` was modified + +* `type()` was added + +#### `models.PaypalLinkedService` was modified + +* `type()` was added + +#### `models.ParquetReadSettings` was modified + +* `type()` was added + +#### `models.SalesforceMarketingCloudLinkedService` was modified + +* `type()` was added + +#### `models.AzureDataExplorerSource` was modified + +* `type()` was added + +#### `models.TarGZipReadSettings` was modified + +* `type()` was added + +#### `models.AzureSqlTableDataset` was modified + +* `type()` was added + +#### `models.FormatWriteSettings` was modified + +* `type()` was added + +#### `models.AzureDatabricksLinkedService` was modified + +* `type()` was added + +#### `models.GoogleBigQueryV2LinkedService` was modified + +* `type()` was added + +#### `models.AmazonS3Dataset` was modified + +* `type()` was added + +#### `models.SelfHostedIntegrationRuntimeStatus` was modified + +* `type()` was added + +#### `models.PostgreSqlV2LinkedService` was modified + +* `type()` was added + +#### `models.PhoenixObjectDataset` was modified + +* `type()` was added + +#### `models.MarketoObjectDataset` was modified + +* `type()` was added + +#### `models.SqlSource` was modified + +* `type()` was added + +#### `models.QuickbaseLinkedService` was modified + +* `type()` was added + +#### `models.ResponsysObjectDataset` was modified + +* `type()` was added + +#### `models.AzureDatabricksDeltaLakeSource` was modified + +* `type()` was added + +#### `models.MicrosoftAccessLinkedService` was modified + +* `type()` was added + +#### `models.CommonDataServiceForAppsSink` was modified + +* `type()` was added + +#### `models.DatabricksSparkPythonActivity` was modified + +* `type()` was added + +#### `models.OracleServiceCloudSource` was modified + +* `type()` was added + +#### `models.CosmosDbSqlApiCollectionDataset` was modified + +* `type()` was added + +#### `models.SecretBase` was modified + +* `type()` was added + +#### `models.AzureMariaDBLinkedService` was modified + +* `type()` was added + +#### `models.OrcSource` was modified + +* `type()` was added + +#### `models.LinkedIntegrationRuntimeRbacAuthorization` was modified + +* `authorizationType()` was added + +#### `models.ExecuteDataFlowActivity` was modified + +* `type()` was added + +#### `models.SqlServerLinkedService` was modified + +* `type()` was added + +#### `models.DynamicsEntityDataset` was modified + +* `type()` was added + +#### `models.DocumentDbCollectionSource` was modified + +* `type()` was added + +#### `models.AzureBlobFSSink` was modified + +* `type()` was added + +#### `models.AmazonRedshiftLinkedService` was modified + +* `type()` was added + +#### `models.CosmosDbSqlApiSource` was modified + +* `type()` was added + +#### `models.ResponsysLinkedService` was modified + +* `type()` was added + +#### `models.ImpalaLinkedService` was modified + +* `type()` was added + +#### `models.FileSystemSource` was modified + +* `type()` was added + +#### `models.FileServerLocation` was modified + +* `type()` was added + +#### `models.SftpServerLinkedService` was modified + +* `type()` was added + +#### `models.AzureKeyVaultSecretReference` was modified + +* `type()` was added + +#### `models.MySqlLinkedService` was modified + +* `type()` was added + +#### `models.MagentoSource` was modified + +* `type()` was added + +#### `models.LakeHouseTableSink` was modified + +* `type()` was added + +#### `models.CouchbaseSource` was modified + +* `type()` was added + +#### `models.HDInsightStreamingActivity` was modified + +* `type()` was added + +#### `models.PhoenixLinkedService` was modified + +* `type()` was added + +#### `models.Db2LinkedService` was modified + +* `type()` was added + +#### `models.DynamicsCrmLinkedService` was modified + +* `type()` was added + +#### `models.LookupActivity` was modified + +* `type()` was added + +#### `models.FtpServerLinkedService` was modified + +* `type()` was added + +#### `models.XmlDataset` was modified + +* `type()` was added + +#### `models.QuickBooksLinkedService` was modified + +* `type()` was added + +#### `models.GreenplumTableDataset` was modified + +* `type()` was added + +#### `models.SalesforceServiceCloudLinkedService` was modified + +* `type()` was added + +#### `models.SecureString` was modified + +* `type()` was added + +#### `models.AvroSource` was modified + +* `type()` was added + +#### `models.GoogleCloudStorageReadSettings` was modified + +* `type()` was added + +#### `models.SalesforceServiceCloudObjectDataset` was modified + +* `type()` was added + +#### `models.SalesforceV2Sink` was modified + +* `type()` was added + +#### `models.AzureDataExplorerLinkedService` was modified + +* `type()` was added + +#### `models.Db2Source` was modified + +* `type()` was added + +#### `models.HubspotSource` was modified + +* `type()` was added + +#### `models.CouchbaseLinkedService` was modified + +* `type()` was added + +#### `models.MagentoObjectDataset` was modified + +* `type()` was added + +#### `models.SharePointOnlineListLinkedService` was modified + +* `type()` was added + +#### `models.AppendVariableActivity` was modified + +* `type()` was added + +#### `models.SybaseLinkedService` was modified + +* `type()` was added + +#### `models.SsisEnvironment` was modified + +* `type()` was added + +#### `models.FactoryVstsConfiguration` was modified + +* `type()` was added + +#### `models.Flowlet` was modified + +* `type()` was added + +#### `models.HDInsightMapReduceActivity` was modified + +* `type()` was added + +#### `models.AmazonMwsLinkedService` was modified + +* `type()` was added + +#### `models.MongoDbV2Sink` was modified + +* `type()` was added + +#### `models.QuickBooksObjectDataset` was modified + +* `type()` was added + +#### `models.CosmosDbMongoDbApiLinkedService` was modified + +* `type()` was added + +#### `models.RelationalTableDataset` was modified + +* `type()` was added + +#### `models.GoogleBigQueryLinkedService` was modified + +* `type()` was added + +#### `models.AmazonRdsForOracleTableDataset` was modified + +* `type()` was added + +#### `models.SqlServerStoredProcedureActivity` was modified + +* `type()` was added + +#### `models.AzureSqlSource` was modified + +* `type()` was added + +#### `models.DataworldLinkedService` was modified + +* `type()` was added + +#### `models.ExcelSource` was modified + +* `type()` was added + +#### `models.AzureKeyVaultLinkedService` was modified + +* `type()` was added + +#### `models.ManagedIntegrationRuntimeStatus` was modified + +* `type()` was added + +#### `models.OracleCloudStorageReadSettings` was modified + +* `type()` was added + +#### `models.AzureSearchIndexSink` was modified + +* `type()` was added + +#### `models.AzureDataExplorerCommandActivity` was modified + +* `type()` was added + +#### `models.AzureSqlMILinkedService` was modified + +* `type()` was added + +#### `models.LinkedIntegrationRuntimeType` was modified + +* `authorizationType()` was added + +#### `models.ImpalaObjectDataset` was modified + +* `type()` was added + +#### `models.AmazonRdsForSqlServerTableDataset` was modified + +* `type()` was added + +#### `models.AzureDataExplorerSink` was modified + +* `type()` was added + +#### `models.AzureMySqlSink` was modified + +* `type()` was added + +#### `models.Office365LinkedService` was modified + +* `type()` was added + +#### `models.XmlReadSettings` was modified + +* `type()` was added + +#### `models.EloquaSource` was modified + +* `type()` was added + +#### `models.AzureDataLakeStoreSink` was modified + +* `type()` was added + +#### `models.AvroWriteSettings` was modified + +* `type()` was added + +#### `models.ChainingTrigger` was modified + +* `type()` was added + +#### `models.AmazonRdsForSqlServerLinkedService` was modified + +* `type()` was added + +#### `models.WebLinkedService` was modified + +* `type()` was added + +#### `models.ZendeskLinkedService` was modified + +* `type()` was added + +#### `models.MarketoLinkedService` was modified + +* `type()` was added + +#### `models.ShopifySource` was modified + +* `type()` was added + +#### `models.GoogleBigQueryV2ObjectDataset` was modified + +* `type()` was added + +#### `models.DynamicsAXSource` was modified + +* `type()` was added ## 1.0.0-beta.27 (2024-03-14) diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/README.md b/sdk/datafactory/azure-resourcemanager-datafactory/README.md index ffb342b468abd..aaeaa8bb15257 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/README.md +++ b/sdk/datafactory/azure-resourcemanager-datafactory/README.md @@ -32,7 +32,7 @@ Various documentation is available to help you get started com.azure.resourcemanager azure-resourcemanager-datafactory - 1.0.0-beta.27 + 1.0.0-beta.28 ``` [//]: # ({x-version-update-end}) diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/SAMPLE.md b/sdk/datafactory/azure-resourcemanager-datafactory/SAMPLE.md index 5f741e5933eac..8563cde04405c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/SAMPLE.md +++ b/sdk/datafactory/azure-resourcemanager-datafactory/SAMPLE.md @@ -182,8 +182,7 @@ import java.time.OffsetDateTime; */ public final class ActivityRunsQueryByPipelineRunSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ActivityRuns_QueryByPipelineRun.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ActivityRuns_QueryByPipelineRun.json */ /** * Sample code: ActivityRuns_QueryByPipelineRun. @@ -192,11 +191,12 @@ public final class ActivityRunsQueryByPipelineRunSamples { */ public static void activityRunsQueryByPipelineRun(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.activityRuns().queryByPipelineRunWithResponse("exampleResourceGroup", "exampleFactoryName", - "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", - new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z")) - .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")), - com.azure.core.util.Context.NONE); + manager.activityRuns() + .queryByPipelineRunWithResponse("exampleResourceGroup", "exampleFactoryName", + "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", + new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z")) + .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")), + com.azure.core.util.Context.NONE); } } ``` @@ -215,8 +215,7 @@ import java.util.List; */ public final class ChangeDataCaptureCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Create.json */ /** * Sample code: ChangeDataCapture_Create. @@ -224,18 +223,20 @@ public final class ChangeDataCaptureCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().define("exampleChangeDataCapture") + manager.changeDataCaptures() + .define("exampleChangeDataCapture") .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withSourceConnectionsInfo((List) null) - .withTargetConnectionsInfo((List) null).withPolicy((MapperPolicy) null) + .withTargetConnectionsInfo((List) null) + .withPolicy((MapperPolicy) null) .withDescription( "Sample demo change data capture to transfer data from delimited (csv) to Azure SQL Database with automapped and non-automapped mappings.") - .withAllowVNetOverride(false).create(); + .withAllowVNetOverride(false) + .create(); } /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Update.json */ /** * Sample code: ChangeDataCapture_Update. @@ -243,11 +244,16 @@ public final class ChangeDataCaptureCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - ChangeDataCaptureResource resource = manager.changeDataCaptures().getWithResponse("exampleResourceGroup", - "exampleFactoryName", "exampleChangeDataCapture", null, com.azure.core.util.Context.NONE).getValue(); - resource.update().withDescription( - "Sample demo change data capture to transfer data from delimited (csv) to Azure SQL Database. Updating table mappings.") - .withAllowVNetOverride(false).withStatus("Stopped").apply(); + ChangeDataCaptureResource resource = manager.changeDataCaptures() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", null, + com.azure.core.util.Context.NONE) + .getValue(); + resource.update() + .withDescription( + "Sample demo change data capture to transfer data from delimited (csv) to Azure SQL Database. Updating table mappings.") + .withAllowVNetOverride(false) + .withStatus("Stopped") + .apply(); } } ``` @@ -260,8 +266,7 @@ public final class ChangeDataCaptureCreateOrUpdateSamples { */ public final class ChangeDataCaptureDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Delete.json */ /** * Sample code: ChangeDataCapture_Delete. @@ -269,8 +274,9 @@ public final class ChangeDataCaptureDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleChangeDataCapture", com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", + com.azure.core.util.Context.NONE); } } ``` @@ -283,9 +289,7 @@ public final class ChangeDataCaptureDeleteSamples { */ public final class ChangeDataCaptureGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Get - * .json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Get.json */ /** * Sample code: ChangeDataCapture_Get. @@ -293,8 +297,9 @@ public final class ChangeDataCaptureGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleChangeDataCapture", null, com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", null, + com.azure.core.util.Context.NONE); } } ``` @@ -307,8 +312,7 @@ public final class ChangeDataCaptureGetSamples { */ public final class ChangeDataCaptureListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_ListByFactory.json */ /** * Sample code: ChangeDataCapture_ListByFactory. @@ -317,8 +321,8 @@ public final class ChangeDataCaptureListByFactorySamples { */ public static void changeDataCaptureListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -331,8 +335,7 @@ public final class ChangeDataCaptureListByFactorySamples { */ public final class ChangeDataCaptureStartSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_Start.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Start.json */ /** * Sample code: ChangeDataCapture_Start. @@ -340,8 +343,9 @@ public final class ChangeDataCaptureStartSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().startWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleChangeDataCapture", com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .startWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", + com.azure.core.util.Context.NONE); } } ``` @@ -354,8 +358,7 @@ public final class ChangeDataCaptureStartSamples { */ public final class ChangeDataCaptureStatusSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_Status.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Status.json */ /** * Sample code: ChangeDataCapture_Start. @@ -363,8 +366,9 @@ public final class ChangeDataCaptureStatusSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().statusWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleChangeDataCapture", com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .statusWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", + com.azure.core.util.Context.NONE); } } ``` @@ -377,8 +381,7 @@ public final class ChangeDataCaptureStatusSamples { */ public final class ChangeDataCaptureStopSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_Stop.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Stop.json */ /** * Sample code: ChangeDataCapture_Stop. @@ -386,8 +389,9 @@ public final class ChangeDataCaptureStopSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureStop(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().stopWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleChangeDataCapture", com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .stopWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", + com.azure.core.util.Context.NONE); } } ``` @@ -395,6 +399,7 @@ public final class ChangeDataCaptureStopSamples { ### CredentialOperations_CreateOrUpdate ```java +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential; /** @@ -402,9 +407,7 @@ import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential; */ public final class CredentialOperationsCreateOrUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Create. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Create.json */ /** * Sample code: Credentials_Create. @@ -412,11 +415,11 @@ public final class CredentialOperationsCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void credentialsCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.credentialOperations().define("exampleCredential") - .withExistingFactory("exampleResourceGroup", "exampleFactoryName") - .withProperties(new ManagedIdentityCredential().withResourceId( - "/subscriptions/12345678-1234-1234-1234-12345678abc/resourcegroups/exampleResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/exampleUami")) - .create(); + manager.credentialOperations() + .createOrUpdateWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleCredential", + new CredentialResourceInner().withProperties(new ManagedIdentityCredential().withResourceId( + "/subscriptions/12345678-1234-1234-1234-12345678abc/resourcegroups/exampleResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/exampleUami")), + null, com.azure.core.util.Context.NONE); } } ``` @@ -429,9 +432,7 @@ public final class CredentialOperationsCreateOrUpdateSamples { */ public final class CredentialOperationsDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Delete. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Delete.json */ /** * Sample code: Credentials_Delete. @@ -439,8 +440,9 @@ public final class CredentialOperationsDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void credentialsDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.credentialOperations().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleCredential", com.azure.core.util.Context.NONE); + manager.credentialOperations() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleCredential", + com.azure.core.util.Context.NONE); } } ``` @@ -453,8 +455,7 @@ public final class CredentialOperationsDeleteSamples { */ public final class CredentialOperationsGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Get.json */ /** * Sample code: Credentials_Get. @@ -462,8 +463,9 @@ public final class CredentialOperationsGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void credentialsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.credentialOperations().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleCredential", null, com.azure.core.util.Context.NONE); + manager.credentialOperations() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleCredential", null, + com.azure.core.util.Context.NONE); } } ``` @@ -476,8 +478,7 @@ public final class CredentialOperationsGetSamples { */ public final class CredentialOperationsListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Credentials_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_ListByFactory.json */ /** * Sample code: Credentials_ListByFactory. @@ -485,8 +486,8 @@ public final class CredentialOperationsListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void credentialsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.credentialOperations().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.credentialOperations() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -519,8 +520,7 @@ import java.util.Map; */ public final class DataFlowDebugSessionAddDataFlowSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DataFlowDebugSession_AddDataFlow.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_AddDataFlow.json */ /** * Sample code: DataFlowDebugSession_AddDataFlow. @@ -529,13 +529,17 @@ public final class DataFlowDebugSessionAddDataFlowSamples { */ public static void dataFlowDebugSessionAddDataFlow(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - manager.dataFlowDebugSessions().addDataFlowWithResponse("exampleResourceGroup", "exampleFactoryName", - new DataFlowDebugPackage().withSessionId("f06ed247-9d07-49b2-b05e-2cb4a2fc871e") - .withDataFlow(new DataFlowDebugResource().withName("dataflow1").withProperties(new MappingDataFlow() - .withSources(Arrays.asList(new DataFlowSource().withName("source1") - .withDataset(new DatasetReference().withReferenceName("DelimitedText2")))) - .withSinks(Arrays.asList()).withTransformations(Arrays.asList()).withScript( - "\n\nsource(output(\n\t\tColumn_1 as string\n\t),\n\tallowSchemaDrift: true,\n\tvalidateSchema: false) ~> source1"))) + manager.dataFlowDebugSessions() + .addDataFlowWithResponse("exampleResourceGroup", "exampleFactoryName", new DataFlowDebugPackage() + .withSessionId("f06ed247-9d07-49b2-b05e-2cb4a2fc871e") + .withDataFlow(new DataFlowDebugResource().withName("dataflow1") + .withProperties(new MappingDataFlow() + .withSources(Arrays.asList(new DataFlowSource().withName("source1") + .withDataset(new DatasetReference().withReferenceName("DelimitedText2")))) + .withSinks(Arrays.asList()) + .withTransformations(Arrays.asList()) + .withScript( + "\n\nsource(output(\n\t\tColumn_1 as string\n\t),\n\tallowSchemaDrift: true,\n\tvalidateSchema: false) ~> source1"))) .withDatasets(Arrays.asList(new DatasetDebugResource().withName("dataset1") .withProperties(new DelimitedTextDataset() .withSchema(SerializerFactory.createDefaultManagementSerializerAdapter() @@ -544,7 +548,10 @@ public final class DataFlowDebugSessionAddDataFlowSamples { .withAnnotations(Arrays.asList()) .withLocation(new AzureBlobStorageLocation().withFileName("Ansiencoding.csv") .withContainer("dataflow-sample-data")) - .withColumnDelimiter(",").withQuoteChar("\"").withEscapeChar("\\").withFirstRowAsHeader(true)))) + .withColumnDelimiter(",") + .withQuoteChar("\"") + .withEscapeChar("\\") + .withFirstRowAsHeader(true)))) .withLinkedServices(Arrays.asList(new LinkedServiceDebugResource().withName("linkedService1") .withProperties(new AzureBlobStorageLinkedService().withAnnotations(Arrays.asList()) .withConnectionString( @@ -552,16 +559,17 @@ public final class DataFlowDebugSessionAddDataFlowSamples { .withEncryptedCredential("fakeTokenPlaceholder")))) .withDebugSettings(new DataFlowDebugPackageDebugSettings() .withSourceSettings(Arrays.asList( - new DataFlowSourceSetting().withSourceName("source1").withRowLimit(1000) + new DataFlowSourceSetting().withSourceName("source1") + .withRowLimit(1000) .withAdditionalProperties(mapOf()), - new DataFlowSourceSetting() - .withSourceName("source2").withRowLimit(222).withAdditionalProperties(mapOf()))) + new DataFlowSourceSetting().withSourceName("source2") + .withRowLimit(222) + .withAdditionalProperties(mapOf()))) .withParameters(mapOf("sourcePath", "Toy")) - .withDatasetParameters(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"Movies\":{\"path\":\"abc\"},\"Output\":{\"time\":\"def\"}}", Object.class, - SerializerEncoding.JSON))) - .withAdditionalProperties(mapOf()), - com.azure.core.util.Context.NONE); + .withDatasetParameters(SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"Movies\":{\"path\":\"abc\"},\"Output\":{\"time\":\"def\"}}", Object.class, + SerializerEncoding.JSON))) + .withAdditionalProperties(mapOf()), com.azure.core.util.Context.NONE); } // Use "Map.of" if available @@ -595,8 +603,7 @@ import java.util.Map; */ public final class DataFlowDebugSessionCreateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DataFlowDebugSession_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_Create.json */ /** * Sample code: DataFlowDebugSession_Create. @@ -611,7 +618,9 @@ public final class DataFlowDebugSessionCreateSamples { .withProperties(new ManagedIntegrationRuntime() .withComputeProperties(new IntegrationRuntimeComputeProperties().withLocation("AutoResolve") .withDataFlowProperties(new IntegrationRuntimeDataFlowProperties() - .withComputeType(DataFlowComputeType.GENERAL).withCoreCount(48).withTimeToLive(10) + .withComputeType(DataFlowComputeType.GENERAL) + .withCoreCount(48) + .withTimeToLive(10) .withAdditionalProperties(mapOf())) .withAdditionalProperties(mapOf())))), com.azure.core.util.Context.NONE); @@ -641,8 +650,7 @@ import com.azure.resourcemanager.datafactory.models.DeleteDataFlowDebugSessionRe */ public final class DataFlowDebugSessionDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DataFlowDebugSession_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_Delete.json */ /** * Sample code: DataFlowDebugSession_Delete. @@ -650,9 +658,10 @@ public final class DataFlowDebugSessionDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void dataFlowDebugSessionDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlowDebugSessions().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - new DeleteDataFlowDebugSessionRequest().withSessionId("91fb57e0-8292-47be-89ff-c8f2d2bb2a7e"), - com.azure.core.util.Context.NONE); + manager.dataFlowDebugSessions() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", + new DeleteDataFlowDebugSessionRequest().withSessionId("91fb57e0-8292-47be-89ff-c8f2d2bb2a7e"), + com.azure.core.util.Context.NONE); } } ``` @@ -669,8 +678,7 @@ import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandType; */ public final class DataFlowDebugSessionExecuteCommandSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DataFlowDebugSession_ExecuteCommand.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_ExecuteCommand.json */ /** * Sample code: DataFlowDebugSession_ExecuteCommand. @@ -679,11 +687,12 @@ public final class DataFlowDebugSessionExecuteCommandSamples { */ public static void dataFlowDebugSessionExecuteCommand(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlowDebugSessions().executeCommand("exampleResourceGroup", "exampleFactoryName", - new DataFlowDebugCommandRequest().withSessionId("f06ed247-9d07-49b2-b05e-2cb4a2fc871e") - .withCommand(DataFlowDebugCommandType.EXECUTE_PREVIEW_QUERY) - .withCommandPayload(new DataFlowDebugCommandPayload().withStreamName("source1").withRowLimits(100)), - com.azure.core.util.Context.NONE); + manager.dataFlowDebugSessions() + .executeCommand("exampleResourceGroup", "exampleFactoryName", + new DataFlowDebugCommandRequest().withSessionId("f06ed247-9d07-49b2-b05e-2cb4a2fc871e") + .withCommand(DataFlowDebugCommandType.EXECUTE_PREVIEW_QUERY) + .withCommandPayload(new DataFlowDebugCommandPayload().withStreamName("source1").withRowLimits(100)), + com.azure.core.util.Context.NONE); } } ``` @@ -696,8 +705,7 @@ public final class DataFlowDebugSessionExecuteCommandSamples { */ public final class DataFlowDebugSessionQueryByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DataFlowDebugSession_QueryByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_QueryByFactory.json */ /** * Sample code: DataFlowDebugSession_QueryByFactory. @@ -706,8 +714,8 @@ public final class DataFlowDebugSessionQueryByFactorySamples { */ public static void dataFlowDebugSessionQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlowDebugSessions().queryByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.dataFlowDebugSessions() + .queryByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -727,8 +735,7 @@ import java.util.Arrays; */ public final class DataFlowsCreateOrUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Create.json */ /** * Sample code: DataFlows_Create. @@ -736,7 +743,9 @@ public final class DataFlowsCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void dataFlowsCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlows().define("exampleDataFlow").withExistingFactory("exampleResourceGroup", "exampleFactoryName") + manager.dataFlows() + .define("exampleDataFlow") + .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withProperties(new MappingDataFlow().withDescription( "Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation.") .withSources(Arrays.asList( @@ -764,8 +773,7 @@ public final class DataFlowsCreateOrUpdateSamples { } /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Update.json */ /** * Sample code: DataFlows_Update. @@ -773,31 +781,34 @@ public final class DataFlowsCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void dataFlowsUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - DataFlowResource resource = manager.dataFlows().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleDataFlow", null, com.azure.core.util.Context.NONE).getValue(); - resource.update().withProperties(new MappingDataFlow().withDescription( - "Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation.") - .withSources(Arrays.asList( - new DataFlowSource().withName("USDCurrency") - .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetUSD")), - new DataFlowSource().withName("CADSource") - .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetCAD")))) - .withSinks(Arrays.asList( - new DataFlowSink().withName("USDSink") - .withDataset(new DatasetReference().withReferenceName("USDOutput")), - new DataFlowSink().withName("CADSink") - .withDataset(new DatasetReference().withReferenceName("CADOutput")))) - .withScriptLines(Arrays.asList("source(output(", "PreviousConversionRate as double,", "Country as string,", - "DateTime1 as string,", "CurrentConversionRate as double", "),", "allowSchemaDrift: false,", - "validateSchema: false) ~> USDCurrency", "source(output(", "PreviousConversionRate as double,", - "Country as string,", "DateTime1 as string,", "CurrentConversionRate as double", "),", - "allowSchemaDrift: true,", "validateSchema: false) ~> CADSource", - "USDCurrency, CADSource union(byName: true)~> Union", - "Union derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn", - "NewCurrencyColumn split(Country == 'USD',", - "Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)", - "ConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink", - "ConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink"))) + DataFlowResource resource = manager.dataFlows() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", null, + com.azure.core.util.Context.NONE) + .getValue(); + resource.update() + .withProperties(new MappingDataFlow().withDescription( + "Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation.") + .withSources(Arrays.asList( + new DataFlowSource().withName("USDCurrency") + .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetUSD")), + new DataFlowSource().withName("CADSource") + .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetCAD")))) + .withSinks(Arrays.asList( + new DataFlowSink().withName("USDSink") + .withDataset(new DatasetReference().withReferenceName("USDOutput")), + new DataFlowSink().withName("CADSink") + .withDataset(new DatasetReference().withReferenceName("CADOutput")))) + .withScriptLines(Arrays.asList("source(output(", "PreviousConversionRate as double,", + "Country as string,", "DateTime1 as string,", "CurrentConversionRate as double", "),", + "allowSchemaDrift: false,", "validateSchema: false) ~> USDCurrency", "source(output(", + "PreviousConversionRate as double,", "Country as string,", "DateTime1 as string,", + "CurrentConversionRate as double", "),", "allowSchemaDrift: true,", + "validateSchema: false) ~> CADSource", "USDCurrency, CADSource union(byName: true)~> Union", + "Union derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn", + "NewCurrencyColumn split(Country == 'USD',", + "Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)", + "ConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink", + "ConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink"))) .apply(); } } @@ -811,8 +822,7 @@ public final class DataFlowsCreateOrUpdateSamples { */ public final class DataFlowsDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Delete.json */ /** * Sample code: DataFlows_Delete. @@ -820,8 +830,9 @@ public final class DataFlowsDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void dataFlowsDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlows().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", - com.azure.core.util.Context.NONE); + manager.dataFlows() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", + com.azure.core.util.Context.NONE); } } ``` @@ -834,8 +845,7 @@ public final class DataFlowsDeleteSamples { */ public final class DataFlowsGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Get.json */ /** * Sample code: DataFlows_Get. @@ -843,8 +853,9 @@ public final class DataFlowsGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void dataFlowsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlows().getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", null, - com.azure.core.util.Context.NONE); + manager.dataFlows() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", null, + com.azure.core.util.Context.NONE); } } ``` @@ -857,8 +868,7 @@ public final class DataFlowsGetSamples { */ public final class DataFlowsListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DataFlows_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_ListByFactory.json */ /** * Sample code: DataFlows_ListByFactory. @@ -866,8 +876,8 @@ public final class DataFlowsListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void dataFlowsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlows().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.dataFlows() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -892,8 +902,7 @@ import java.util.Map; */ public final class DatasetsCreateOrUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Create.json */ /** * Sample code: Datasets_Create. @@ -902,24 +911,25 @@ public final class DatasetsCreateOrUpdateSamples { */ public static void datasetsCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - manager.datasets().define("exampleDataset").withExistingFactory("exampleResourceGroup", "exampleFactoryName") + manager.datasets() + .define("exampleDataset") + .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withProperties(new AzureBlobDataset() .withLinkedServiceName(new LinkedServiceReference().withReferenceName("exampleLinkedService")) .withParameters(mapOf("MyFileName", new ParameterSpecification().withType(ParameterType.STRING), "MyFolderPath", new ParameterSpecification().withType(ParameterType.STRING))) - .withFolderPath(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@dataset().MyFolderPath\"}", Object.class, - SerializerEncoding.JSON)) - .withFileName(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@dataset().MyFileName\"}", Object.class, - SerializerEncoding.JSON)) + .withFolderPath(SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFolderPath\"}", Object.class, + SerializerEncoding.JSON)) + .withFileName(SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFileName\"}", Object.class, + SerializerEncoding.JSON)) .withFormat(new TextFormat())) .create(); } /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Update.json */ /** * Sample code: Datasets_Update. @@ -928,18 +938,23 @@ public final class DatasetsCreateOrUpdateSamples { */ public static void datasetsUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - DatasetResource resource = manager.datasets().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleDataset", null, com.azure.core.util.Context.NONE).getValue(); - resource.update().withProperties(new AzureBlobDataset().withDescription("Example description") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("exampleLinkedService")) - .withParameters(mapOf("MyFileName", new ParameterSpecification().withType(ParameterType.STRING), - "MyFolderPath", new ParameterSpecification().withType(ParameterType.STRING))) - .withFolderPath(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@dataset().MyFolderPath\"}", Object.class, - SerializerEncoding.JSON)) - .withFileName(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@dataset().MyFileName\"}", Object.class, SerializerEncoding.JSON)) - .withFormat(new TextFormat())).apply(); + DatasetResource resource = manager.datasets() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", null, + com.azure.core.util.Context.NONE) + .getValue(); + resource.update() + .withProperties(new AzureBlobDataset().withDescription("Example description") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("exampleLinkedService")) + .withParameters(mapOf("MyFileName", new ParameterSpecification().withType(ParameterType.STRING), + "MyFolderPath", new ParameterSpecification().withType(ParameterType.STRING))) + .withFolderPath(SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFolderPath\"}", Object.class, + SerializerEncoding.JSON)) + .withFileName(SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFileName\"}", Object.class, + SerializerEncoding.JSON)) + .withFormat(new TextFormat())) + .apply(); } // Use "Map.of" if available @@ -964,8 +979,7 @@ public final class DatasetsCreateOrUpdateSamples { */ public final class DatasetsDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Delete.json */ /** * Sample code: Datasets_Delete. @@ -973,8 +987,9 @@ public final class DatasetsDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void datasetsDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.datasets().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", - com.azure.core.util.Context.NONE); + manager.datasets() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", + com.azure.core.util.Context.NONE); } } ``` @@ -987,8 +1002,7 @@ public final class DatasetsDeleteSamples { */ public final class DatasetsGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Get.json */ /** * Sample code: Datasets_Get. @@ -996,8 +1010,9 @@ public final class DatasetsGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void datasetsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.datasets().getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", null, - com.azure.core.util.Context.NONE); + manager.datasets() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", null, + com.azure.core.util.Context.NONE); } } ``` @@ -1010,8 +1025,7 @@ public final class DatasetsGetSamples { */ public final class DatasetsListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Datasets_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_ListByFactory.json */ /** * Sample code: Datasets_ListByFactory. @@ -1019,8 +1033,8 @@ public final class DatasetsListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void datasetsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.datasets().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.datasets() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -1035,8 +1049,7 @@ import com.azure.resourcemanager.datafactory.models.ExposureControlRequest; */ public final class ExposureControlGetFeatureValueSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ExposureControl_GetFeatureValue.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ExposureControl_GetFeatureValue.json */ /** * Sample code: ExposureControl_GetFeatureValue. @@ -1045,9 +1058,11 @@ public final class ExposureControlGetFeatureValueSamples { */ public static void exposureControlGetFeatureValue(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.exposureControls().getFeatureValueWithResponse("WestEurope", - new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac").withFeatureType("Feature"), - com.azure.core.util.Context.NONE); + manager.exposureControls() + .getFeatureValueWithResponse("WestEurope", + new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac") + .withFeatureType("Feature"), + com.azure.core.util.Context.NONE); } } ``` @@ -1062,8 +1077,7 @@ import com.azure.resourcemanager.datafactory.models.ExposureControlRequest; */ public final class ExposureControlGetFeatureValueByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ExposureControl_GetFeatureValueByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ExposureControl_GetFeatureValueByFactory.json */ /** * Sample code: ExposureControl_GetFeatureValueByFactory. @@ -1072,9 +1086,11 @@ public final class ExposureControlGetFeatureValueByFactorySamples { */ public static void exposureControlGetFeatureValueByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.exposureControls().getFeatureValueByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", - new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac").withFeatureType("Feature"), - com.azure.core.util.Context.NONE); + manager.exposureControls() + .getFeatureValueByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", + new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac") + .withFeatureType("Feature"), + com.azure.core.util.Context.NONE); } } ``` @@ -1091,8 +1107,7 @@ import java.util.Arrays; */ public final class ExposureControlQueryFeatureValuesByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ExposureControl_QueryFeatureValuesByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ExposureControl_QueryFeatureValuesByFactory.json */ /** * Sample code: ExposureControl_QueryFeatureValuesByFactory. @@ -1101,12 +1116,13 @@ public final class ExposureControlQueryFeatureValuesByFactorySamples { */ public static void exposureControlQueryFeatureValuesByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.exposureControls().queryFeatureValuesByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", - new ExposureControlBatchRequest().withExposureControlRequests(Arrays.asList( - new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac") - .withFeatureType("Feature"), - new ExposureControlRequest().withFeatureName("ADFSampleFeature").withFeatureType("Feature"))), - com.azure.core.util.Context.NONE); + manager.exposureControls() + .queryFeatureValuesByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", + new ExposureControlBatchRequest().withExposureControlRequests(Arrays.asList( + new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac") + .withFeatureType("Feature"), + new ExposureControlRequest().withFeatureName("ADFSampleFeature").withFeatureType("Feature"))), + com.azure.core.util.Context.NONE); } } ``` @@ -1122,8 +1138,7 @@ import com.azure.resourcemanager.datafactory.models.FactoryVstsConfiguration; */ public final class FactoriesConfigureFactoryRepoSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Factories_ConfigureFactoryRepo.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_ConfigureFactoryRepo.json */ /** * Sample code: Factories_ConfigureFactoryRepo. @@ -1131,12 +1146,17 @@ public final class FactoriesConfigureFactoryRepoSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesConfigureFactoryRepo(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.factories().configureFactoryRepoWithResponse("East US", new FactoryRepoUpdate().withFactoryResourceId( - "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName") - .withRepoConfiguration(new FactoryVstsConfiguration().withAccountName("ADF").withRepositoryName("repo") - .withCollaborationBranch("master").withRootFolder("/").withLastCommitId("").withProjectName("project") - .withTenantId("")), - com.azure.core.util.Context.NONE); + manager.factories() + .configureFactoryRepoWithResponse("East US", new FactoryRepoUpdate().withFactoryResourceId( + "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName") + .withRepoConfiguration(new FactoryVstsConfiguration().withAccountName("ADF") + .withRepositoryName("repo") + .withCollaborationBranch("master") + .withRootFolder("/") + .withLastCommitId("") + .withProjectName("project") + .withTenantId("")), + com.azure.core.util.Context.NONE); } } ``` @@ -1149,8 +1169,7 @@ public final class FactoriesConfigureFactoryRepoSamples { */ public final class FactoriesCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Factories_CreateOrUpdate.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_CreateOrUpdate.json */ /** * Sample code: Factories_CreateOrUpdate. @@ -1158,8 +1177,11 @@ public final class FactoriesCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesCreateOrUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.factories().define("exampleFactoryName").withRegion("East US") - .withExistingResourceGroup("exampleResourceGroup").create(); + manager.factories() + .define("exampleFactoryName") + .withRegion("East US") + .withExistingResourceGroup("exampleResourceGroup") + .create(); } } ``` @@ -1172,8 +1194,7 @@ public final class FactoriesCreateOrUpdateSamples { */ public final class FactoriesDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Delete.json */ /** * Sample code: Factories_Delete. @@ -1181,8 +1202,9 @@ public final class FactoriesDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.factories().deleteByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.factories() + .deleteByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", + com.azure.core.util.Context.NONE); } } ``` @@ -1195,8 +1217,7 @@ public final class FactoriesDeleteSamples { */ public final class FactoriesGetByResourceGroupSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Get.json */ /** * Sample code: Factories_Get. @@ -1204,8 +1225,9 @@ public final class FactoriesGetByResourceGroupSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.factories().getByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", null, - com.azure.core.util.Context.NONE); + manager.factories() + .getByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", null, + com.azure.core.util.Context.NONE); } } ``` @@ -1220,8 +1242,7 @@ import com.azure.resourcemanager.datafactory.models.UserAccessPolicy; */ public final class FactoriesGetDataPlaneAccessSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Factories_GetDataPlaneAccess.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_GetDataPlaneAccess.json */ /** * Sample code: Factories_GetDataPlaneAccess. @@ -1229,10 +1250,14 @@ public final class FactoriesGetDataPlaneAccessSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesGetDataPlaneAccess(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.factories().getDataPlaneAccessWithResponse("exampleResourceGroup", "exampleFactoryName", - new UserAccessPolicy().withPermissions("r").withAccessResourcePath("").withProfileName("DefaultProfile") - .withStartTime("2018-11-10T02:46:20.2659347Z").withExpireTime("2018-11-10T09:46:20.2659347Z"), - com.azure.core.util.Context.NONE); + manager.factories() + .getDataPlaneAccessWithResponse("exampleResourceGroup", "exampleFactoryName", + new UserAccessPolicy().withPermissions("r") + .withAccessResourcePath("") + .withProfileName("DefaultProfile") + .withStartTime("2018-11-10T02:46:20.2659347Z") + .withExpireTime("2018-11-10T09:46:20.2659347Z"), + com.azure.core.util.Context.NONE); } } ``` @@ -1247,8 +1272,7 @@ import com.azure.resourcemanager.datafactory.models.GitHubAccessTokenRequest; */ public final class FactoriesGetGitHubAccessTokenSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Factories_GetGitHubAccessToken.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_GetGitHubAccessToken.json */ /** * Sample code: Factories_GetGitHubAccessToken. @@ -1256,10 +1280,12 @@ public final class FactoriesGetGitHubAccessTokenSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesGetGitHubAccessToken(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.factories().getGitHubAccessTokenWithResponse("exampleResourceGroup", "exampleFactoryName", - new GitHubAccessTokenRequest().withGitHubAccessCode("fakeTokenPlaceholder").withGitHubClientId("some") - .withGitHubAccessTokenBaseUrl("fakeTokenPlaceholder"), - com.azure.core.util.Context.NONE); + manager.factories() + .getGitHubAccessTokenWithResponse("exampleResourceGroup", "exampleFactoryName", + new GitHubAccessTokenRequest().withGitHubAccessCode("fakeTokenPlaceholder") + .withGitHubClientId("some") + .withGitHubAccessTokenBaseUrl("fakeTokenPlaceholder"), + com.azure.core.util.Context.NONE); } } ``` @@ -1272,8 +1298,7 @@ public final class FactoriesGetGitHubAccessTokenSamples { */ public final class FactoriesListSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_List.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_List.json */ /** * Sample code: Factories_List. @@ -1294,8 +1319,7 @@ public final class FactoriesListSamples { */ public final class FactoriesListByResourceGroupSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Factories_ListByResourceGroup.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_ListByResourceGroup.json */ /** * Sample code: Factories_ListByResourceGroup. @@ -1320,8 +1344,7 @@ import java.util.Map; */ public final class FactoriesUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Update.json */ /** * Sample code: Factories_Update. @@ -1329,8 +1352,10 @@ public final class FactoriesUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - Factory resource = manager.factories().getByResourceGroupWithResponse("exampleResourceGroup", - "exampleFactoryName", null, com.azure.core.util.Context.NONE).getValue(); + Factory resource = manager.factories() + .getByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", null, + com.azure.core.util.Context.NONE) + .getValue(); resource.update().withTags(mapOf("exampleTag", "exampleValue")).apply(); } @@ -1360,8 +1385,7 @@ import java.util.Map; */ public final class GlobalParametersCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * GlobalParameters_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Create.json */ /** * Sample code: GlobalParameters_Create. @@ -1369,13 +1393,15 @@ public final class GlobalParametersCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void globalParametersCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.globalParameters().define("default").withExistingFactory("exampleResourceGroup", "exampleFactoryName") - .withProperties((Map) null).create(); + manager.globalParameters() + .define("default") + .withExistingFactory("exampleResourceGroup", "exampleFactoryName") + .withProperties((Map) null) + .create(); } /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * GlobalParameters_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Update.json */ /** * Sample code: GlobalParameters_Update. @@ -1399,8 +1425,7 @@ public final class GlobalParametersCreateOrUpdateSamples { */ public final class GlobalParametersDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * GlobalParameters_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Delete.json */ /** * Sample code: GlobalParameters_Delete. @@ -1408,8 +1433,9 @@ public final class GlobalParametersDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void globalParametersDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.globalParameters().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "default", - com.azure.core.util.Context.NONE); + manager.globalParameters() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "default", + com.azure.core.util.Context.NONE); } } ``` @@ -1422,9 +1448,7 @@ public final class GlobalParametersDeleteSamples { */ public final class GlobalParametersGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Get. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Get.json */ /** * Sample code: GlobalParameters_Get. @@ -1432,8 +1456,8 @@ public final class GlobalParametersGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void globalParametersGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.globalParameters().getWithResponse("exampleResourceGroup", "exampleFactoryName", "default", - com.azure.core.util.Context.NONE); + manager.globalParameters() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "default", com.azure.core.util.Context.NONE); } } ``` @@ -1446,8 +1470,7 @@ public final class GlobalParametersGetSamples { */ public final class GlobalParametersListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * GlobalParameters_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_ListByFactory.json */ /** * Sample code: GlobalParameters_ListByFactory. @@ -1455,8 +1478,8 @@ public final class GlobalParametersListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void globalParametersListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.globalParameters().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.globalParameters() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -1469,8 +1492,7 @@ public final class GlobalParametersListByFactorySamples { */ public final class IntegrationRuntimeNodesDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimeNodes_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_Delete.json */ /** * Sample code: IntegrationRuntimesNodes_Delete. @@ -1479,8 +1501,9 @@ public final class IntegrationRuntimeNodesDeleteSamples { */ public static void integrationRuntimesNodesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimeNodes().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", "Node_1", com.azure.core.util.Context.NONE); + manager.integrationRuntimeNodes() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1", + com.azure.core.util.Context.NONE); } } ``` @@ -1493,8 +1516,7 @@ public final class IntegrationRuntimeNodesDeleteSamples { */ public final class IntegrationRuntimeNodesGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimeNodes_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_Get.json */ /** * Sample code: IntegrationRuntimeNodes_Get. @@ -1502,8 +1524,9 @@ public final class IntegrationRuntimeNodesGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimeNodesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimeNodes().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", "Node_1", com.azure.core.util.Context.NONE); + manager.integrationRuntimeNodes() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1", + com.azure.core.util.Context.NONE); } } ``` @@ -1516,8 +1539,7 @@ public final class IntegrationRuntimeNodesGetSamples { */ public final class IntegrationRuntimeNodesGetIpAddressSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimeNodes_GetIpAddress.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_GetIpAddress.json */ /** * Sample code: IntegrationRuntimeNodes_GetIpAddress. @@ -1526,8 +1548,9 @@ public final class IntegrationRuntimeNodesGetIpAddressSamples { */ public static void integrationRuntimeNodesGetIpAddress(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimeNodes().getIpAddressWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", "Node_1", com.azure.core.util.Context.NONE); + manager.integrationRuntimeNodes() + .getIpAddressWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + "Node_1", com.azure.core.util.Context.NONE); } } ``` @@ -1542,8 +1565,7 @@ import com.azure.resourcemanager.datafactory.models.UpdateIntegrationRuntimeNode */ public final class IntegrationRuntimeNodesUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimeNodes_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_Update.json */ /** * Sample code: IntegrationRuntimeNodes_Update. @@ -1551,9 +1573,9 @@ public final class IntegrationRuntimeNodesUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimeNodesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimeNodes().updateWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", "Node_1", new UpdateIntegrationRuntimeNodeRequest().withConcurrentJobsLimit(2), - com.azure.core.util.Context.NONE); + manager.integrationRuntimeNodes() + .updateWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1", + new UpdateIntegrationRuntimeNodeRequest().withConcurrentJobsLimit(2), com.azure.core.util.Context.NONE); } } ``` @@ -1568,8 +1590,7 @@ import com.azure.resourcemanager.datafactory.models.GetSsisObjectMetadataRequest */ public final class IntegrationRuntimeObjectMetadataGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimeObjectMetadata_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeObjectMetadata_Get.json */ /** * Sample code: IntegrationRuntimeObjectMetadata_Get. @@ -1578,9 +1599,9 @@ public final class IntegrationRuntimeObjectMetadataGetSamples { */ public static void integrationRuntimeObjectMetadataGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimeObjectMetadatas().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "testactivityv2", new GetSsisObjectMetadataRequest().withMetadataPath("ssisFolders"), - com.azure.core.util.Context.NONE); + manager.integrationRuntimeObjectMetadatas() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "testactivityv2", + new GetSsisObjectMetadataRequest().withMetadataPath("ssisFolders"), com.azure.core.util.Context.NONE); } } ``` @@ -1593,8 +1614,7 @@ public final class IntegrationRuntimeObjectMetadataGetSamples { */ public final class IntegrationRuntimeObjectMetadataRefreshSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimeObjectMetadata_Refresh.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeObjectMetadata_Refresh.json */ /** * Sample code: IntegrationRuntimeObjectMetadata_Refresh. @@ -1603,8 +1623,8 @@ public final class IntegrationRuntimeObjectMetadataRefreshSamples { */ public static void integrationRuntimeObjectMetadataRefresh(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimeObjectMetadatas().refresh("exampleResourceGroup", "exampleFactoryName", - "testactivityv2", com.azure.core.util.Context.NONE); + manager.integrationRuntimeObjectMetadatas() + .refresh("exampleResourceGroup", "exampleFactoryName", "testactivityv2", com.azure.core.util.Context.NONE); } } ``` @@ -1619,8 +1639,7 @@ import com.azure.resourcemanager.datafactory.models.CreateLinkedIntegrationRunti */ public final class IntegrationRuntimesCreateLinkedIntegrationRuntimeSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_CreateLinkedIntegrationRuntime.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_CreateLinkedIntegrationRuntime.json */ /** * Sample code: IntegrationRuntimes_CreateLinkedIntegrationRuntime. @@ -1629,12 +1648,14 @@ public final class IntegrationRuntimesCreateLinkedIntegrationRuntimeSamples { */ public static void integrationRuntimesCreateLinkedIntegrationRuntime( com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().createLinkedIntegrationRuntimeWithResponse("exampleResourceGroup", - "exampleFactoryName", "exampleIntegrationRuntime", - new CreateLinkedIntegrationRuntimeRequest().withName("bfa92911-9fb6-4fbe-8f23-beae87bc1c83") - .withSubscriptionId("061774c7-4b5a-4159-a55b-365581830283") - .withDataFactoryName("e9955d6d-56ea-4be3-841c-52a12c1a9981").withDataFactoryLocation("West US"), - com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .createLinkedIntegrationRuntimeWithResponse("exampleResourceGroup", "exampleFactoryName", + "exampleIntegrationRuntime", + new CreateLinkedIntegrationRuntimeRequest().withName("bfa92911-9fb6-4fbe-8f23-beae87bc1c83") + .withSubscriptionId("061774c7-4b5a-4159-a55b-365581830283") + .withDataFactoryName("e9955d6d-56ea-4be3-841c-52a12c1a9981") + .withDataFactoryLocation("West US"), + com.azure.core.util.Context.NONE); } } ``` @@ -1649,8 +1670,7 @@ import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntime */ public final class IntegrationRuntimesCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Create.json */ /** * Sample code: IntegrationRuntimes_Create. @@ -1658,7 +1678,8 @@ public final class IntegrationRuntimesCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().define("exampleIntegrationRuntime") + manager.integrationRuntimes() + .define("exampleIntegrationRuntime") .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withProperties(new SelfHostedIntegrationRuntime().withDescription("A selfhosted integration runtime")) .create(); @@ -1674,8 +1695,7 @@ public final class IntegrationRuntimesCreateOrUpdateSamples { */ public final class IntegrationRuntimesDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Delete.json */ /** * Sample code: IntegrationRuntimes_Delete. @@ -1683,8 +1703,9 @@ public final class IntegrationRuntimesDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } ``` @@ -1697,8 +1718,7 @@ public final class IntegrationRuntimesDeleteSamples { */ public final class IntegrationRuntimesGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Get.json */ /** * Sample code: IntegrationRuntimes_Get. @@ -1706,8 +1726,9 @@ public final class IntegrationRuntimesGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", null, com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", null, + com.azure.core.util.Context.NONE); } } ``` @@ -1720,8 +1741,7 @@ public final class IntegrationRuntimesGetSamples { */ public final class IntegrationRuntimesGetConnectionInfoSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_GetConnectionInfo.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_GetConnectionInfo.json */ /** * Sample code: IntegrationRuntimes_GetConnectionInfo. @@ -1730,8 +1750,9 @@ public final class IntegrationRuntimesGetConnectionInfoSamples { */ public static void integrationRuntimesGetConnectionInfo(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().getConnectionInfoWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .getConnectionInfoWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } ``` @@ -1744,8 +1765,7 @@ public final class IntegrationRuntimesGetConnectionInfoSamples { */ public final class IntegrationRuntimesGetMonitoringDataSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_GetMonitoringData.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_GetMonitoringData.json */ /** * Sample code: IntegrationRuntimes_GetMonitoringData. @@ -1754,8 +1774,9 @@ public final class IntegrationRuntimesGetMonitoringDataSamples { */ public static void integrationRuntimesGetMonitoringData(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().getMonitoringDataWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .getMonitoringDataWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } ``` @@ -1768,8 +1789,7 @@ public final class IntegrationRuntimesGetMonitoringDataSamples { */ public final class IntegrationRuntimesGetStatusSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_GetStatus.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_GetStatus.json */ /** * Sample code: IntegrationRuntimes_GetStatus. @@ -1777,8 +1797,9 @@ public final class IntegrationRuntimesGetStatusSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesGetStatus(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().getStatusWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .getStatusWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } ``` @@ -1791,8 +1812,7 @@ public final class IntegrationRuntimesGetStatusSamples { */ public final class IntegrationRuntimesListAuthKeysSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_ListAuthKeys.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_ListAuthKeys.json */ /** * Sample code: IntegrationRuntimes_ListAuthKeys. @@ -1801,8 +1821,9 @@ public final class IntegrationRuntimesListAuthKeysSamples { */ public static void integrationRuntimesListAuthKeys(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().listAuthKeysWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .listAuthKeysWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } ``` @@ -1815,8 +1836,7 @@ public final class IntegrationRuntimesListAuthKeysSamples { */ public final class IntegrationRuntimesListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_ListByFactory.json */ /** * Sample code: IntegrationRuntimes_ListByFactory. @@ -1825,8 +1845,8 @@ public final class IntegrationRuntimesListByFactorySamples { */ public static void integrationRuntimesListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -1839,8 +1859,7 @@ public final class IntegrationRuntimesListByFactorySamples { */ public final class IntegrationRuntimesListOutboundNetworkDependenciesEndpointsSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_ListOutboundNetworkDependenciesEndpoints.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_ListOutboundNetworkDependenciesEndpoints.json */ /** * Sample code: IntegrationRuntimes_OutboundNetworkDependenciesEndpoints. @@ -1849,8 +1868,9 @@ public final class IntegrationRuntimesListOutboundNetworkDependenciesEndpointsSa */ public static void integrationRuntimesOutboundNetworkDependenciesEndpoints( com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().listOutboundNetworkDependenciesEndpointsWithResponse("exampleResourceGroup", - "exampleFactoryName", "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .listOutboundNetworkDependenciesEndpointsWithResponse("exampleResourceGroup", "exampleFactoryName", + "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); } } ``` @@ -1866,8 +1886,7 @@ import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeRegenerate */ public final class IntegrationRuntimesRegenerateAuthKeySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_RegenerateAuthKey.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_RegenerateAuthKey.json */ /** * Sample code: IntegrationRuntimes_RegenerateAuthKey. @@ -1876,10 +1895,10 @@ public final class IntegrationRuntimesRegenerateAuthKeySamples { */ public static void integrationRuntimesRegenerateAuthKey(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().regenerateAuthKeyWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", - new IntegrationRuntimeRegenerateKeyParameters().withKeyName(IntegrationRuntimeAuthKeyName.AUTH_KEY2), - com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .regenerateAuthKeyWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + new IntegrationRuntimeRegenerateKeyParameters().withKeyName(IntegrationRuntimeAuthKeyName.AUTH_KEY2), + com.azure.core.util.Context.NONE); } } ``` @@ -1894,8 +1913,7 @@ import com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntimeRequ */ public final class IntegrationRuntimesRemoveLinksSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_RemoveLinks.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_RemoveLinks.json */ /** * Sample code: IntegrationRuntimes_Upgrade. @@ -1903,10 +1921,10 @@ public final class IntegrationRuntimesRemoveLinksSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesUpgrade(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().removeLinksWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", - new LinkedIntegrationRuntimeRequest().withLinkedFactoryName("exampleFactoryName-linked"), - com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .removeLinksWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + new LinkedIntegrationRuntimeRequest().withLinkedFactoryName("exampleFactoryName-linked"), + com.azure.core.util.Context.NONE); } } ``` @@ -1919,8 +1937,7 @@ public final class IntegrationRuntimesRemoveLinksSamples { */ public final class IntegrationRuntimesStartSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Start.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Start.json */ /** * Sample code: IntegrationRuntimes_Start. @@ -1928,8 +1945,9 @@ public final class IntegrationRuntimesStartSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().start("exampleResourceGroup", "exampleFactoryName", - "exampleManagedIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .start("exampleResourceGroup", "exampleFactoryName", "exampleManagedIntegrationRuntime", + com.azure.core.util.Context.NONE); } } ``` @@ -1942,8 +1960,7 @@ public final class IntegrationRuntimesStartSamples { */ public final class IntegrationRuntimesStopSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Stop.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Stop.json */ /** * Sample code: IntegrationRuntimes_Stop. @@ -1951,8 +1968,9 @@ public final class IntegrationRuntimesStopSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesStop(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().stop("exampleResourceGroup", "exampleFactoryName", - "exampleManagedIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .stop("exampleResourceGroup", "exampleFactoryName", "exampleManagedIntegrationRuntime", + com.azure.core.util.Context.NONE); } } ``` @@ -1965,8 +1983,7 @@ public final class IntegrationRuntimesStopSamples { */ public final class IntegrationRuntimesSyncCredentialsSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_SyncCredentials.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_SyncCredentials.json */ /** * Sample code: IntegrationRuntimes_SyncCredentials. @@ -1975,8 +1992,9 @@ public final class IntegrationRuntimesSyncCredentialsSamples { */ public static void integrationRuntimesSyncCredentials(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().syncCredentialsWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .syncCredentialsWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } ``` @@ -1992,8 +2010,7 @@ import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeResource; */ public final class IntegrationRuntimesUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Update.json */ /** * Sample code: IntegrationRuntimes_Update. @@ -2001,8 +2018,10 @@ public final class IntegrationRuntimesUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - IntegrationRuntimeResource resource = manager.integrationRuntimes().getWithResponse("exampleResourceGroup", - "exampleFactoryName", "exampleIntegrationRuntime", null, com.azure.core.util.Context.NONE).getValue(); + IntegrationRuntimeResource resource = manager.integrationRuntimes() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", null, + com.azure.core.util.Context.NONE) + .getValue(); resource.update().withAutoUpdate(IntegrationRuntimeAutoUpdate.OFF).withUpdateDelayOffset("\"PT3H\"").apply(); } } @@ -2016,8 +2035,7 @@ public final class IntegrationRuntimesUpdateSamples { */ public final class IntegrationRuntimesUpgradeSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Upgrade.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Upgrade.json */ /** * Sample code: IntegrationRuntimes_Upgrade. @@ -2025,8 +2043,9 @@ public final class IntegrationRuntimesUpgradeSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesUpgrade(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().upgradeWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .upgradeWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } ``` @@ -2045,9 +2064,7 @@ import java.io.IOException; */ public final class LinkedServicesCreateOrUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Create - * .json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Create.json */ /** * Sample code: LinkedServices_Create. @@ -2056,19 +2073,19 @@ public final class LinkedServicesCreateOrUpdateSamples { */ public static void linkedServicesCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - manager.linkedServices().define("exampleLinkedService") + manager.linkedServices() + .define("exampleLinkedService") .withExistingFactory("exampleResourceGroup", "exampleFactoryName") - .withProperties(new AzureStorageLinkedService() - .withConnectionString(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( + .withProperties(new AzureStorageLinkedService().withConnectionString(SerializerFactory + .createDefaultManagementSerializerAdapter() + .deserialize( "{\"type\":\"SecureString\",\"value\":\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=\"}", Object.class, SerializerEncoding.JSON))) .create(); } /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Update - * .json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Update.json */ /** * Sample code: LinkedServices_Update. @@ -2077,12 +2094,16 @@ public final class LinkedServicesCreateOrUpdateSamples { */ public static void linkedServicesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - LinkedServiceResource resource = manager.linkedServices().getWithResponse("exampleResourceGroup", - "exampleFactoryName", "exampleLinkedService", null, com.azure.core.util.Context.NONE).getValue(); - resource.update().withProperties(new AzureStorageLinkedService().withDescription("Example description") - .withConnectionString(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"SecureString\",\"value\":\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=\"}", - Object.class, SerializerEncoding.JSON))) + LinkedServiceResource resource = manager.linkedServices() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", null, + com.azure.core.util.Context.NONE) + .getValue(); + resource.update() + .withProperties(new AzureStorageLinkedService().withDescription("Example description") + .withConnectionString(SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize( + "{\"type\":\"SecureString\",\"value\":\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=\"}", + Object.class, SerializerEncoding.JSON))) .apply(); } } @@ -2096,9 +2117,7 @@ public final class LinkedServicesCreateOrUpdateSamples { */ public final class LinkedServicesDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Delete - * .json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Delete.json */ /** * Sample code: LinkedServices_Delete. @@ -2106,8 +2125,9 @@ public final class LinkedServicesDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void linkedServicesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.linkedServices().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleLinkedService", com.azure.core.util.Context.NONE); + manager.linkedServices() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", + com.azure.core.util.Context.NONE); } } ``` @@ -2120,9 +2140,7 @@ public final class LinkedServicesDeleteSamples { */ public final class LinkedServicesGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Get. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Get.json */ /** * Sample code: LinkedServices_Get. @@ -2130,8 +2148,9 @@ public final class LinkedServicesGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void linkedServicesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.linkedServices().getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", - null, com.azure.core.util.Context.NONE); + manager.linkedServices() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", null, + com.azure.core.util.Context.NONE); } } ``` @@ -2144,8 +2163,7 @@ public final class LinkedServicesGetSamples { */ public final class LinkedServicesListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * LinkedServices_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_ListByFactory.json */ /** * Sample code: LinkedServices_ListByFactory. @@ -2153,8 +2171,8 @@ public final class LinkedServicesListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void linkedServicesListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.linkedServices().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.linkedServices() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -2172,8 +2190,7 @@ import java.util.Map; */ public final class ManagedPrivateEndpointsCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedPrivateEndpoints_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_Create.json */ /** * Sample code: ManagedVirtualNetworks_Create. @@ -2181,10 +2198,12 @@ public final class ManagedPrivateEndpointsCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void managedVirtualNetworksCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedPrivateEndpoints().define("exampleManagedPrivateEndpointName") + manager.managedPrivateEndpoints() + .define("exampleManagedPrivateEndpointName") .withExistingManagedVirtualNetwork("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName") - .withProperties(new ManagedPrivateEndpoint().withFqdns(Arrays.asList()).withGroupId("blob") + .withProperties(new ManagedPrivateEndpoint().withFqdns(Arrays.asList()) + .withGroupId("blob") .withPrivateLinkResourceId( "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.Storage/storageAccounts/exampleBlobStorage") .withAdditionalProperties(mapOf())) @@ -2213,8 +2232,7 @@ public final class ManagedPrivateEndpointsCreateOrUpdateSamples { */ public final class ManagedPrivateEndpointsDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedPrivateEndpoints_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_Delete.json */ /** * Sample code: ManagedVirtualNetworks_Delete. @@ -2222,8 +2240,9 @@ public final class ManagedPrivateEndpointsDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void managedVirtualNetworksDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedPrivateEndpoints().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleManagedVirtualNetworkName", "exampleManagedPrivateEndpointName", com.azure.core.util.Context.NONE); + manager.managedPrivateEndpoints() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", + "exampleManagedPrivateEndpointName", com.azure.core.util.Context.NONE); } } ``` @@ -2236,8 +2255,7 @@ public final class ManagedPrivateEndpointsDeleteSamples { */ public final class ManagedPrivateEndpointsGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedPrivateEndpoints_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_Get.json */ /** * Sample code: ManagedPrivateEndpoints_Get. @@ -2245,9 +2263,9 @@ public final class ManagedPrivateEndpointsGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void managedPrivateEndpointsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedPrivateEndpoints().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleManagedVirtualNetworkName", "exampleManagedPrivateEndpointName", null, - com.azure.core.util.Context.NONE); + manager.managedPrivateEndpoints() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", + "exampleManagedPrivateEndpointName", null, com.azure.core.util.Context.NONE); } } ``` @@ -2260,8 +2278,7 @@ public final class ManagedPrivateEndpointsGetSamples { */ public final class ManagedPrivateEndpointsListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedPrivateEndpoints_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_ListByFactory.json */ /** * Sample code: ManagedPrivateEndpoints_ListByFactory. @@ -2270,8 +2287,9 @@ public final class ManagedPrivateEndpointsListByFactorySamples { */ public static void managedPrivateEndpointsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedPrivateEndpoints().listByFactory("exampleResourceGroup", "exampleFactoryName", - "exampleManagedVirtualNetworkName", com.azure.core.util.Context.NONE); + manager.managedPrivateEndpoints() + .listByFactory("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", + com.azure.core.util.Context.NONE); } } ``` @@ -2288,8 +2306,7 @@ import java.util.Map; */ public final class ManagedVirtualNetworksCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedVirtualNetworks_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedVirtualNetworks_Create.json */ /** * Sample code: ManagedVirtualNetworks_Create. @@ -2297,9 +2314,11 @@ public final class ManagedVirtualNetworksCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void managedVirtualNetworksCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedVirtualNetworks().define("exampleManagedVirtualNetworkName") + manager.managedVirtualNetworks() + .define("exampleManagedVirtualNetworkName") .withExistingFactory("exampleResourceGroup", "exampleFactoryName") - .withProperties(new ManagedVirtualNetwork().withAdditionalProperties(mapOf())).create(); + .withProperties(new ManagedVirtualNetwork().withAdditionalProperties(mapOf())) + .create(); } // Use "Map.of" if available @@ -2324,8 +2343,7 @@ public final class ManagedVirtualNetworksCreateOrUpdateSamples { */ public final class ManagedVirtualNetworksGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedVirtualNetworks_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedVirtualNetworks_Get.json */ /** * Sample code: ManagedVirtualNetworks_Get. @@ -2333,8 +2351,9 @@ public final class ManagedVirtualNetworksGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void managedVirtualNetworksGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedVirtualNetworks().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleManagedVirtualNetworkName", null, com.azure.core.util.Context.NONE); + manager.managedVirtualNetworks() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", null, + com.azure.core.util.Context.NONE); } } ``` @@ -2347,8 +2366,7 @@ public final class ManagedVirtualNetworksGetSamples { */ public final class ManagedVirtualNetworksListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedVirtualNetworks_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedVirtualNetworks_ListByFactory.json */ /** * Sample code: ManagedVirtualNetworks_ListByFactory. @@ -2357,8 +2375,8 @@ public final class ManagedVirtualNetworksListByFactorySamples { */ public static void managedVirtualNetworksListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedVirtualNetworks().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.managedVirtualNetworks() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -2371,8 +2389,7 @@ public final class ManagedVirtualNetworksListByFactorySamples { */ public final class OperationsListSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Operations_List.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Operations_List.json */ /** * Sample code: Operations_List. @@ -2393,9 +2410,7 @@ public final class OperationsListSamples { */ public final class PipelineRunsCancelSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Cancel. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Cancel.json */ /** * Sample code: PipelineRuns_Cancel. @@ -2403,8 +2418,9 @@ public final class PipelineRunsCancelSamples { * @param manager Entry point to DataFactoryManager. */ public static void pipelineRunsCancel(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.pipelineRuns().cancelWithResponse("exampleResourceGroup", "exampleFactoryName", - "16ac5348-ff82-4f95-a80d-638c1d47b721", null, com.azure.core.util.Context.NONE); + manager.pipelineRuns() + .cancelWithResponse("exampleResourceGroup", "exampleFactoryName", "16ac5348-ff82-4f95-a80d-638c1d47b721", + null, com.azure.core.util.Context.NONE); } } ``` @@ -2417,8 +2433,7 @@ public final class PipelineRunsCancelSamples { */ public final class PipelineRunsGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Get.json */ /** * Sample code: PipelineRuns_Get. @@ -2426,8 +2441,9 @@ public final class PipelineRunsGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void pipelineRunsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.pipelineRuns().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", com.azure.core.util.Context.NONE); + manager.pipelineRuns() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", + com.azure.core.util.Context.NONE); } } ``` @@ -2447,8 +2463,7 @@ import java.util.Arrays; */ public final class PipelineRunsQueryByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * PipelineRuns_QueryByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_QueryByFactory.json */ /** * Sample code: PipelineRuns_QueryByFactory. @@ -2456,12 +2471,14 @@ public final class PipelineRunsQueryByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void pipelineRunsQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.pipelineRuns().queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", - new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z")) - .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")) - .withFilters(Arrays.asList(new RunQueryFilter().withOperand(RunQueryFilterOperand.PIPELINE_NAME) - .withOperator(RunQueryFilterOperator.EQUALS).withValues(Arrays.asList("examplePipeline")))), - com.azure.core.util.Context.NONE); + manager.pipelineRuns() + .queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", + new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z")) + .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")) + .withFilters(Arrays.asList(new RunQueryFilter().withOperand(RunQueryFilterOperand.PIPELINE_NAME) + .withOperator(RunQueryFilterOperator.EQUALS) + .withValues(Arrays.asList("examplePipeline")))), + com.azure.core.util.Context.NONE); } } ``` @@ -2494,8 +2511,7 @@ import java.util.Map; */ public final class PipelinesCreateOrUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Create.json */ /** * Sample code: Pipelines_Create. @@ -2504,35 +2520,39 @@ public final class PipelinesCreateOrUpdateSamples { */ public static void pipelinesCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - manager.pipelines().define("examplePipeline").withExistingFactory("exampleResourceGroup", "exampleFactoryName") + manager.pipelines() + .define("examplePipeline") + .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withActivities(Arrays.asList(new ForEachActivity().withName("ExampleForeachActivity") .withIsSequential(true) .withItems(new Expression().withValue("@pipeline().parameters.OutputBlobNameList")) .withActivities(Arrays.asList(new CopyActivity().withName("ExampleCopyActivity") - .withInputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset").withParameters( - mapOf("MyFileName", "examplecontainer.csv", "MyFolderPath", "examplecontainer")))) + .withInputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset") + .withParameters( + mapOf("MyFileName", "examplecontainer.csv", "MyFolderPath", "examplecontainer")))) .withOutputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset") .withParameters(mapOf("MyFileName", - SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@item()\"}", Object.class, - SerializerEncoding.JSON), + SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@item()\"}", Object.class, + SerializerEncoding.JSON), "MyFolderPath", "examplecontainer")))) - .withSource(new BlobSource()).withSink(new BlobSink()).withDataIntegrationUnits(32))))) + .withSource(new BlobSource()) + .withSink(new BlobSink()) + .withDataIntegrationUnits(32))))) .withParameters(mapOf("JobId", new ParameterSpecification().withType(ParameterType.STRING), "OutputBlobNameList", new ParameterSpecification().withType(ParameterType.ARRAY))) .withVariables(mapOf("TestVariableArray", new VariableSpecification().withType(VariableType.ARRAY))) .withRunDimensions(mapOf("JobId", - SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@pipeline().parameters.JobId\"}", Object.class, - SerializerEncoding.JSON))) + SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@pipeline().parameters.JobId\"}", Object.class, + SerializerEncoding.JSON))) .withPolicy(new PipelinePolicy() .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("0.00:10:00"))) .create(); } /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Update.json */ /** * Sample code: Pipelines_Update. @@ -2541,20 +2561,28 @@ public final class PipelinesCreateOrUpdateSamples { */ public static void pipelinesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - PipelineResource resource = manager.pipelines().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "examplePipeline", null, com.azure.core.util.Context.NONE).getValue(); - resource.update().withDescription("Example description").withActivities(Arrays.asList(new ForEachActivity() - .withName("ExampleForeachActivity").withIsSequential(true) - .withItems(new Expression().withValue("@pipeline().parameters.OutputBlobNameList")) - .withActivities(Arrays.asList(new CopyActivity().withName("ExampleCopyActivity") - .withInputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset") - .withParameters(mapOf("MyFileName", "examplecontainer.csv", "MyFolderPath", "examplecontainer")))) - .withOutputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset") - .withParameters(mapOf("MyFileName", - SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@item()\"}", Object.class, SerializerEncoding.JSON), - "MyFolderPath", "examplecontainer")))) - .withSource(new BlobSource()).withSink(new BlobSink()).withDataIntegrationUnits(32))))) + PipelineResource resource = manager.pipelines() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null, + com.azure.core.util.Context.NONE) + .getValue(); + resource.update() + .withDescription("Example description") + .withActivities(Arrays.asList(new ForEachActivity().withName("ExampleForeachActivity") + .withIsSequential(true) + .withItems(new Expression().withValue("@pipeline().parameters.OutputBlobNameList")) + .withActivities(Arrays.asList(new CopyActivity().withName("ExampleCopyActivity") + .withInputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset") + .withParameters( + mapOf("MyFileName", "examplecontainer.csv", "MyFolderPath", "examplecontainer")))) + .withOutputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset") + .withParameters(mapOf("MyFileName", + SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@item()\"}", Object.class, + SerializerEncoding.JSON), + "MyFolderPath", "examplecontainer")))) + .withSource(new BlobSource()) + .withSink(new BlobSink()) + .withDataIntegrationUnits(32))))) .withParameters(mapOf("OutputBlobNameList", new ParameterSpecification().withType(ParameterType.ARRAY))) .withPolicy(new PipelinePolicy() .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("0.00:10:00"))) @@ -2589,9 +2617,7 @@ import java.util.Map; */ public final class PipelinesCreateRunSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_CreateRun. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_CreateRun.json */ /** * Sample code: Pipelines_CreateRun. @@ -2600,11 +2626,13 @@ public final class PipelinesCreateRunSamples { */ public static void pipelinesCreateRun(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - manager.pipelines().createRunWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null, - null, null, null, - mapOf("OutputBlobNameList", SerializerFactory.createDefaultManagementSerializerAdapter() - .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON)), - com.azure.core.util.Context.NONE); + manager.pipelines() + .createRunWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null, null, null, + null, + mapOf("OutputBlobNameList", + SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON)), + com.azure.core.util.Context.NONE); } // Use "Map.of" if available @@ -2629,8 +2657,7 @@ public final class PipelinesCreateRunSamples { */ public final class PipelinesDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Delete.json */ /** * Sample code: Pipelines_Delete. @@ -2638,8 +2665,9 @@ public final class PipelinesDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void pipelinesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.pipelines().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", - com.azure.core.util.Context.NONE); + manager.pipelines() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", + com.azure.core.util.Context.NONE); } } ``` @@ -2652,8 +2680,7 @@ public final class PipelinesDeleteSamples { */ public final class PipelinesGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Get.json */ /** * Sample code: Pipelines_Get. @@ -2661,8 +2688,9 @@ public final class PipelinesGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void pipelinesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.pipelines().getWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null, - com.azure.core.util.Context.NONE); + manager.pipelines() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null, + com.azure.core.util.Context.NONE); } } ``` @@ -2675,8 +2703,7 @@ public final class PipelinesGetSamples { */ public final class PipelinesListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Pipelines_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_ListByFactory.json */ /** * Sample code: Pipelines_ListByFactory. @@ -2684,8 +2711,8 @@ public final class PipelinesListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void pipelinesListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.pipelines().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.pipelines() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -2698,8 +2725,7 @@ public final class PipelinesListByFactorySamples { */ public final class PrivateEndPointConnectionsListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * PrivateEndPointConnections_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PrivateEndPointConnections_ListByFactory.json */ /** * Sample code: privateEndPointConnections_ListByFactory. @@ -2708,8 +2734,8 @@ public final class PrivateEndPointConnectionsListByFactorySamples { */ public static void privateEndPointConnectionsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.privateEndPointConnections().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.privateEndPointConnections() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -2726,8 +2752,7 @@ import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionState; */ public final class PrivateEndpointConnectionOperationCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ApproveRejectPrivateEndpointConnection.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ApproveRejectPrivateEndpointConnection.json */ /** * Sample code: Approves or rejects a private endpoint connection for a factory. @@ -2736,11 +2761,13 @@ public final class PrivateEndpointConnectionOperationCreateOrUpdateSamples { */ public static void approvesOrRejectsAPrivateEndpointConnectionForAFactory( com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.privateEndpointConnectionOperations().define("connection") + manager.privateEndpointConnectionOperations() + .define("connection") .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withProperties(new PrivateLinkConnectionApprovalRequest() .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("Approved") - .withDescription("Approved by admin.").withActionsRequired("")) + .withDescription("Approved by admin.") + .withActionsRequired("")) .withPrivateEndpoint(new PrivateEndpoint().withId( "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/privateEndpoints/myPrivateEndpoint"))) .create(); @@ -2756,8 +2783,7 @@ public final class PrivateEndpointConnectionOperationCreateOrUpdateSamples { */ public final class PrivateEndpointConnectionOperationDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DeletePrivateEndpointConnection.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DeletePrivateEndpointConnection.json */ /** * Sample code: Delete a private endpoint connection for a datafactory. @@ -2766,8 +2792,9 @@ public final class PrivateEndpointConnectionOperationDeleteSamples { */ public static void deleteAPrivateEndpointConnectionForADatafactory( com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.privateEndpointConnectionOperations().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "connection", com.azure.core.util.Context.NONE); + manager.privateEndpointConnectionOperations() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "connection", + com.azure.core.util.Context.NONE); } } ``` @@ -2780,8 +2807,7 @@ public final class PrivateEndpointConnectionOperationDeleteSamples { */ public final class PrivateEndpointConnectionOperationGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * GetPrivateEndpointConnection.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GetPrivateEndpointConnection.json */ /** * Sample code: Get a private endpoint connection for a datafactory. @@ -2790,8 +2816,9 @@ public final class PrivateEndpointConnectionOperationGetSamples { */ public static void getAPrivateEndpointConnectionForADatafactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.privateEndpointConnectionOperations().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "connection", null, com.azure.core.util.Context.NONE); + manager.privateEndpointConnectionOperations() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "connection", null, + com.azure.core.util.Context.NONE); } } ``` @@ -2804,8 +2831,7 @@ public final class PrivateEndpointConnectionOperationGetSamples { */ public final class PrivateLinkResourcesGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * GetPrivateLinkResources.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GetPrivateLinkResources.json */ /** * Sample code: Get private link resources of a site. @@ -2814,8 +2840,8 @@ public final class PrivateLinkResourcesGetSamples { */ public static void getPrivateLinkResourcesOfASite(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.privateLinkResources().getWithResponse("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.privateLinkResources() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -2828,9 +2854,7 @@ public final class PrivateLinkResourcesGetSamples { */ public final class TriggerRunsCancelSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Cancel. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Cancel.json */ /** * Sample code: Triggers_Cancel. @@ -2838,8 +2862,9 @@ public final class TriggerRunsCancelSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersCancel(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggerRuns().cancelWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", com.azure.core.util.Context.NONE); + manager.triggerRuns() + .cancelWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", + "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", com.azure.core.util.Context.NONE); } } ``` @@ -2859,8 +2884,7 @@ import java.util.Arrays; */ public final class TriggerRunsQueryByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * TriggerRuns_QueryByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_QueryByFactory.json */ /** * Sample code: TriggerRuns_QueryByFactory. @@ -2868,12 +2892,14 @@ public final class TriggerRunsQueryByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void triggerRunsQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggerRuns().queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", - new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z")) - .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")) - .withFilters(Arrays.asList(new RunQueryFilter().withOperand(RunQueryFilterOperand.TRIGGER_NAME) - .withOperator(RunQueryFilterOperator.EQUALS).withValues(Arrays.asList("exampleTrigger")))), - com.azure.core.util.Context.NONE); + manager.triggerRuns() + .queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", + new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z")) + .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")) + .withFilters(Arrays.asList(new RunQueryFilter().withOperand(RunQueryFilterOperand.TRIGGER_NAME) + .withOperator(RunQueryFilterOperator.EQUALS) + .withValues(Arrays.asList("exampleTrigger")))), + com.azure.core.util.Context.NONE); } } ``` @@ -2886,9 +2912,7 @@ public final class TriggerRunsQueryByFactorySamples { */ public final class TriggerRunsRerunSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Rerun. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Rerun.json */ /** * Sample code: Triggers_Rerun. @@ -2896,8 +2920,9 @@ public final class TriggerRunsRerunSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersRerun(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggerRuns().rerunWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", com.azure.core.util.Context.NONE); + manager.triggerRuns() + .rerunWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", + "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", com.azure.core.util.Context.NONE); } } ``` @@ -2924,8 +2949,7 @@ import java.util.Map; */ public final class TriggersCreateOrUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Create.json */ /** * Sample code: Triggers_Create. @@ -2934,9 +2958,8 @@ public final class TriggersCreateOrUpdateSamples { */ public static void triggersCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - manager - .triggers().define( - "exampleTrigger") + manager.triggers() + .define("exampleTrigger") .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withProperties(new ScheduleTrigger() .withPipelines(Arrays.asList(new TriggerPipelineReference() @@ -2945,15 +2968,16 @@ public final class TriggersCreateOrUpdateSamples { SerializerFactory.createDefaultManagementSerializerAdapter() .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON))))) .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.MINUTE) - .withInterval(4).withStartTime(OffsetDateTime.parse("2018-06-16T00:39:13.8441801Z")) - .withEndTime(OffsetDateTime.parse("2018-06-16T00:55:13.8441801Z")).withTimeZone("UTC") + .withInterval(4) + .withStartTime(OffsetDateTime.parse("2018-06-16T00:39:13.8441801Z")) + .withEndTime(OffsetDateTime.parse("2018-06-16T00:55:13.8441801Z")) + .withTimeZone("UTC") .withAdditionalProperties(mapOf()))) .create(); } /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Update.json */ /** * Sample code: Triggers_Update. @@ -2962,8 +2986,10 @@ public final class TriggersCreateOrUpdateSamples { */ public static void triggersUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - TriggerResource resource = manager.triggers().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleTrigger", null, com.azure.core.util.Context.NONE).getValue(); + TriggerResource resource = manager.triggers() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", null, + com.azure.core.util.Context.NONE) + .getValue(); resource.update() .withProperties(new ScheduleTrigger().withDescription("Example description") .withPipelines(Arrays.asList(new TriggerPipelineReference() @@ -2972,8 +2998,10 @@ public final class TriggersCreateOrUpdateSamples { SerializerFactory.createDefaultManagementSerializerAdapter() .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON))))) .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.MINUTE) - .withInterval(4).withStartTime(OffsetDateTime.parse("2018-06-16T00:39:14.905167Z")) - .withEndTime(OffsetDateTime.parse("2018-06-16T00:55:14.905167Z")).withTimeZone("UTC") + .withInterval(4) + .withStartTime(OffsetDateTime.parse("2018-06-16T00:39:14.905167Z")) + .withEndTime(OffsetDateTime.parse("2018-06-16T00:55:14.905167Z")) + .withTimeZone("UTC") .withAdditionalProperties(mapOf()))) .apply(); } @@ -3000,8 +3028,7 @@ public final class TriggersCreateOrUpdateSamples { */ public final class TriggersDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Delete.json */ /** * Sample code: Triggers_Delete. @@ -3009,8 +3036,9 @@ public final class TriggersDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - com.azure.core.util.Context.NONE); + manager.triggers() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", + com.azure.core.util.Context.NONE); } } ``` @@ -3023,8 +3051,7 @@ public final class TriggersDeleteSamples { */ public final class TriggersGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Get.json */ /** * Sample code: Triggers_Get. @@ -3032,8 +3059,9 @@ public final class TriggersGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", null, - com.azure.core.util.Context.NONE); + manager.triggers() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", null, + com.azure.core.util.Context.NONE); } } ``` @@ -3046,8 +3074,7 @@ public final class TriggersGetSamples { */ public final class TriggersGetEventSubscriptionStatusSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Triggers_GetEventSubscriptionStatus.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_GetEventSubscriptionStatus.json */ /** * Sample code: Triggers_GetEventSubscriptionStatus. @@ -3056,8 +3083,9 @@ public final class TriggersGetEventSubscriptionStatusSamples { */ public static void triggersGetEventSubscriptionStatus(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().getEventSubscriptionStatusWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleTrigger", com.azure.core.util.Context.NONE); + manager.triggers() + .getEventSubscriptionStatusWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", + com.azure.core.util.Context.NONE); } } ``` @@ -3070,8 +3098,7 @@ public final class TriggersGetEventSubscriptionStatusSamples { */ public final class TriggersListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Triggers_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_ListByFactory.json */ /** * Sample code: Triggers_ListByFactory. @@ -3079,8 +3106,8 @@ public final class TriggersListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.triggers() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } ``` @@ -3095,8 +3122,7 @@ import com.azure.resourcemanager.datafactory.models.TriggerFilterParameters; */ public final class TriggersQueryByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Triggers_QueryByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_QueryByFactory.json */ /** * Sample code: Triggers_QueryByFactory. @@ -3104,8 +3130,10 @@ public final class TriggersQueryByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", - new TriggerFilterParameters().withParentTriggerName("exampleTrigger"), com.azure.core.util.Context.NONE); + manager.triggers() + .queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", + new TriggerFilterParameters().withParentTriggerName("exampleTrigger"), + com.azure.core.util.Context.NONE); } } ``` @@ -3118,8 +3146,7 @@ public final class TriggersQueryByFactorySamples { */ public final class TriggersStartSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Start.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Start.json */ /** * Sample code: Triggers_Start. @@ -3127,8 +3154,8 @@ public final class TriggersStartSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().start("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - com.azure.core.util.Context.NONE); + manager.triggers() + .start("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", com.azure.core.util.Context.NONE); } } ``` @@ -3141,8 +3168,7 @@ public final class TriggersStartSamples { */ public final class TriggersStopSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Stop.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Stop.json */ /** * Sample code: Triggers_Stop. @@ -3150,8 +3176,8 @@ public final class TriggersStopSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersStop(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().stop("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - com.azure.core.util.Context.NONE); + manager.triggers() + .stop("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", com.azure.core.util.Context.NONE); } } ``` @@ -3164,8 +3190,7 @@ public final class TriggersStopSamples { */ public final class TriggersSubscribeToEventsSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Triggers_SubscribeToEvents.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_SubscribeToEvents.json */ /** * Sample code: Triggers_SubscribeToEvents. @@ -3173,8 +3198,9 @@ public final class TriggersSubscribeToEventsSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersSubscribeToEvents(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().subscribeToEvents("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - com.azure.core.util.Context.NONE); + manager.triggers() + .subscribeToEvents("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", + com.azure.core.util.Context.NONE); } } ``` @@ -3187,8 +3213,7 @@ public final class TriggersSubscribeToEventsSamples { */ public final class TriggersUnsubscribeFromEventsSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Triggers_UnsubscribeFromEvents.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_UnsubscribeFromEvents.json */ /** * Sample code: Triggers_UnsubscribeFromEvents. @@ -3196,8 +3221,9 @@ public final class TriggersUnsubscribeFromEventsSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersUnsubscribeFromEvents(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().unsubscribeFromEvents("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - com.azure.core.util.Context.NONE); + manager.triggers() + .unsubscribeFromEvents("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", + com.azure.core.util.Context.NONE); } } ``` diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/pom.xml b/sdk/datafactory/azure-resourcemanager-datafactory/pom.xml index 41dc5dce9958c..2a3c4b5712373 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/pom.xml +++ b/sdk/datafactory/azure-resourcemanager-datafactory/pom.xml @@ -82,6 +82,12 @@ 5.9.3 test + + org.slf4j + slf4j-simple + 1.7.36 + test + org.mockito mockito-core @@ -100,12 +106,6 @@ 1.14.12 test - - org.slf4j - slf4j-simple - 1.7.36 - test - com.azure.resourcemanager azure-resourcemanager-storage diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java index a69029c9a678a..4fdb7a2af40e7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java @@ -11,8 +11,8 @@ import com.azure.core.http.HttpPipelinePosition; import com.azure.core.http.policy.AddDatePolicy; import com.azure.core.http.policy.AddHeadersFromContextPolicy; -import com.azure.core.http.policy.HttpLogOptions; import com.azure.core.http.policy.HttpLoggingPolicy; +import com.azure.core.http.policy.HttpLogOptions; import com.azure.core.http.policy.HttpPipelinePolicy; import com.azure.core.http.policy.HttpPolicyProviders; import com.azure.core.http.policy.RequestIdPolicy; @@ -43,8 +43,8 @@ import com.azure.resourcemanager.datafactory.implementation.OperationsImpl; import com.azure.resourcemanager.datafactory.implementation.PipelineRunsImpl; import com.azure.resourcemanager.datafactory.implementation.PipelinesImpl; -import com.azure.resourcemanager.datafactory.implementation.PrivateEndPointConnectionsImpl; import com.azure.resourcemanager.datafactory.implementation.PrivateEndpointConnectionOperationsImpl; +import com.azure.resourcemanager.datafactory.implementation.PrivateEndPointConnectionsImpl; import com.azure.resourcemanager.datafactory.implementation.PrivateLinkResourcesImpl; import com.azure.resourcemanager.datafactory.implementation.TriggerRunsImpl; import com.azure.resourcemanager.datafactory.implementation.TriggersImpl; @@ -66,8 +66,8 @@ import com.azure.resourcemanager.datafactory.models.Operations; import com.azure.resourcemanager.datafactory.models.PipelineRuns; import com.azure.resourcemanager.datafactory.models.Pipelines; -import com.azure.resourcemanager.datafactory.models.PrivateEndPointConnections; import com.azure.resourcemanager.datafactory.models.PrivateEndpointConnectionOperations; +import com.azure.resourcemanager.datafactory.models.PrivateEndPointConnections; import com.azure.resourcemanager.datafactory.models.PrivateLinkResources; import com.azure.resourcemanager.datafactory.models.TriggerRuns; import com.azure.resourcemanager.datafactory.models.Triggers; @@ -136,8 +136,10 @@ private DataFactoryManager(HttpPipeline httpPipeline, AzureProfile profile, Dura Objects.requireNonNull(httpPipeline, "'httpPipeline' cannot be null."); Objects.requireNonNull(profile, "'profile' cannot be null."); this.clientObject = new DataFactoryManagementClientBuilder().pipeline(httpPipeline) - .endpoint(profile.getEnvironment().getResourceManagerEndpoint()).subscriptionId(profile.getSubscriptionId()) - .defaultPollInterval(defaultPollInterval).buildClient(); + .endpoint(profile.getEnvironment().getResourceManagerEndpoint()) + .subscriptionId(profile.getSubscriptionId()) + .defaultPollInterval(defaultPollInterval) + .buildClient(); } /** @@ -288,12 +290,19 @@ public DataFactoryManager authenticate(TokenCredential credential, AzureProfile Objects.requireNonNull(profile, "'profile' cannot be null."); StringBuilder userAgentBuilder = new StringBuilder(); - userAgentBuilder.append("azsdk-java").append("-").append("com.azure.resourcemanager.datafactory") - .append("/").append("1.0.0-beta.27"); + userAgentBuilder.append("azsdk-java") + .append("-") + .append("com.azure.resourcemanager.datafactory") + .append("/") + .append("1.0.0-beta.28"); if (!Configuration.getGlobalConfiguration().get("AZURE_TELEMETRY_DISABLED", false)) { - userAgentBuilder.append(" (").append(Configuration.getGlobalConfiguration().get("java.version")) - .append("; ").append(Configuration.getGlobalConfiguration().get("os.name")).append("; ") - .append(Configuration.getGlobalConfiguration().get("os.version")).append("; auto-generated)"); + userAgentBuilder.append(" (") + .append(Configuration.getGlobalConfiguration().get("java.version")) + .append("; ") + .append(Configuration.getGlobalConfiguration().get("os.name")) + .append("; ") + .append(Configuration.getGlobalConfiguration().get("os.version")) + .append("; auto-generated)"); } else { userAgentBuilder.append(" (auto-generated)"); } @@ -312,18 +321,21 @@ public DataFactoryManager authenticate(TokenCredential credential, AzureProfile policies.add(new UserAgentPolicy(userAgentBuilder.toString())); policies.add(new AddHeadersFromContextPolicy()); policies.add(new RequestIdPolicy()); - policies.addAll(this.policies.stream().filter(p -> p.getPipelinePosition() == HttpPipelinePosition.PER_CALL) + policies.addAll(this.policies.stream() + .filter(p -> p.getPipelinePosition() == HttpPipelinePosition.PER_CALL) .collect(Collectors.toList())); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(retryPolicy); policies.add(new AddDatePolicy()); policies.add(new ArmChallengeAuthenticationPolicy(credential, scopes.toArray(new String[0]))); policies.addAll(this.policies.stream() - .filter(p -> p.getPipelinePosition() == HttpPipelinePosition.PER_RETRY).collect(Collectors.toList())); + .filter(p -> p.getPipelinePosition() == HttpPipelinePosition.PER_RETRY) + .collect(Collectors.toList())); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); HttpPipeline httpPipeline = new HttpPipelineBuilder().httpClient(httpClient) - .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); + .policies(policies.toArray(new HttpPipelinePolicy[0])) + .build(); return new DataFactoryManager(httpPipeline, profile, defaultPollInterval); } } @@ -537,7 +549,7 @@ public ManagedPrivateEndpoints managedPrivateEndpoints() { } /** - * Gets the resource collection API of CredentialOperations. It manages ManagedIdentityCredentialResource. + * Gets the resource collection API of CredentialOperations. * * @return Resource collection API of CredentialOperations. */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/CredentialOperationsClient.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/CredentialOperationsClient.java index 77ebaacadd0fd..8c8deb634bd55 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/CredentialOperationsClient.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/CredentialOperationsClient.java @@ -9,7 +9,7 @@ import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityCredentialResourceInner; +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; /** * An instance of this class provides access to all the operations defined in CredentialOperationsClient. @@ -26,7 +26,7 @@ public interface CredentialOperationsClient { * @return a list of credential resources as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) - PagedIterable listByFactory(String resourceGroupName, String factoryName); + PagedIterable listByFactory(String resourceGroupName, String factoryName); /** * List credentials. @@ -40,8 +40,7 @@ public interface CredentialOperationsClient { * @return a list of credential resources as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) - PagedIterable listByFactory(String resourceGroupName, String factoryName, - Context context); + PagedIterable listByFactory(String resourceGroupName, String factoryName, Context context); /** * Creates or updates a credential. @@ -59,9 +58,8 @@ PagedIterable listByFactory(String resou * @return credential resource type along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) - Response createOrUpdateWithResponse(String resourceGroupName, - String factoryName, String credentialName, ManagedIdentityCredentialResourceInner credential, String ifMatch, - Context context); + Response createOrUpdateWithResponse(String resourceGroupName, String factoryName, + String credentialName, CredentialResourceInner credential, String ifMatch, Context context); /** * Creates or updates a credential. @@ -76,8 +74,8 @@ Response createOrUpdateWithResponse(Stri * @return credential resource type. */ @ServiceMethod(returns = ReturnType.SINGLE) - ManagedIdentityCredentialResourceInner createOrUpdate(String resourceGroupName, String factoryName, - String credentialName, ManagedIdentityCredentialResourceInner credential); + CredentialResourceInner createOrUpdate(String resourceGroupName, String factoryName, String credentialName, + CredentialResourceInner credential); /** * Gets a credential. @@ -94,7 +92,7 @@ ManagedIdentityCredentialResourceInner createOrUpdate(String resourceGroupName, * @return a credential along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) - Response getWithResponse(String resourceGroupName, String factoryName, + Response getWithResponse(String resourceGroupName, String factoryName, String credentialName, String ifNoneMatch, Context context); /** @@ -109,7 +107,7 @@ Response getWithResponse(String resource * @return a credential. */ @ServiceMethod(returns = ReturnType.SINGLE) - ManagedIdentityCredentialResourceInner get(String resourceGroupName, String factoryName, String credentialName); + CredentialResourceInner get(String resourceGroupName, String factoryName, String credentialName); /** * Deletes a credential. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/PrivateEndpointConnectionOperationsClient.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/PrivateEndpointConnectionOperationsClient.java index e2021ec007550..5a73a6d874037 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/PrivateEndpointConnectionOperationsClient.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/PrivateEndpointConnectionOperationsClient.java @@ -12,8 +12,7 @@ import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionApprovalRequestResource; /** - * An instance of this class provides access to all the operations defined in - * PrivateEndpointConnectionOperationsClient. + * An instance of this class provides access to all the operations defined in PrivateEndpointConnectionOperationsClient. */ public interface PrivateEndpointConnectionOperationsClient { /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ActivityRunsQueryResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ActivityRunsQueryResponseInner.java index 6751db296e5c4..9141559dd3f1e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ActivityRunsQueryResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ActivityRunsQueryResponseInner.java @@ -54,8 +54,8 @@ public ActivityRunsQueryResponseInner withValue(List value) { } /** - * Get the continuationToken property: The continuation token for getting the next page of results, if any - * remaining results exist, null otherwise. + * Get the continuationToken property: The continuation token for getting the next page of results, if any remaining + * results exist, null otherwise. * * @return the continuationToken value. */ @@ -64,8 +64,8 @@ public String continuationToken() { } /** - * Set the continuationToken property: The continuation token for getting the next page of results, if any - * remaining results exist, null otherwise. + * Set the continuationToken property: The continuation token for getting the next page of results, if any remaining + * results exist, null otherwise. * * @param continuationToken the continuationToken value to set. * @return the ActivityRunsQueryResponseInner object itself. @@ -82,8 +82,9 @@ public ActivityRunsQueryResponseInner withContinuationToken(String continuationT */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property value in model ActivityRunsQueryResponseInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property value in model ActivityRunsQueryResponseInner")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonMwsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonMwsLinkedServiceTypeProperties.java index 1f3c3c46aac77..d75ec9fccd065 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonMwsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonMwsLinkedServiceTypeProperties.java @@ -21,8 +21,7 @@ public final class AmazonMwsLinkedServiceTypeProperties { private Object endpoint; /* - * The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, - * separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) + * The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) */ @JsonProperty(value = "marketplaceID", required = true) private Object marketplaceId; @@ -58,8 +57,7 @@ public final class AmazonMwsLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; @@ -71,8 +69,7 @@ public final class AmazonMwsLinkedServiceTypeProperties { private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -250,8 +247,8 @@ public AmazonMwsLinkedServiceTypeProperties withUseHostVerification(Object useHo } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -260,8 +257,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the AmazonMwsLinkedServiceTypeProperties object itself. @@ -272,8 +269,8 @@ public AmazonMwsLinkedServiceTypeProperties withUsePeerVerification(Object usePe } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -282,8 +279,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AmazonMwsLinkedServiceTypeProperties object itself. @@ -300,23 +297,27 @@ public AmazonMwsLinkedServiceTypeProperties withEncryptedCredential(String encry */ public void validate() { if (endpoint() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property endpoint in model AmazonMwsLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property endpoint in model AmazonMwsLinkedServiceTypeProperties")); } if (marketplaceId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property marketplaceId in model AmazonMwsLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property marketplaceId in model AmazonMwsLinkedServiceTypeProperties")); } if (sellerId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property sellerId in model AmazonMwsLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property sellerId in model AmazonMwsLinkedServiceTypeProperties")); } if (mwsAuthToken() != null) { mwsAuthToken().validate(); } if (accessKeyId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property accessKeyId in model AmazonMwsLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property accessKeyId in model AmazonMwsLinkedServiceTypeProperties")); } if (secretKey() != null) { secretKey().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForLinkedServiceTypeProperties.java index b1a73c79903c4..5f5038cbba22e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForLinkedServiceTypeProperties.java @@ -27,8 +27,7 @@ public final class AmazonRdsForLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -82,8 +81,8 @@ public AmazonRdsForLinkedServiceTypeProperties withPassword(SecretBase password) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -92,8 +91,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AmazonRdsForLinkedServiceTypeProperties object itself. @@ -110,8 +109,9 @@ public AmazonRdsForLinkedServiceTypeProperties withEncryptedCredential(String en */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model AmazonRdsForLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model AmazonRdsForLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForSqlServerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForSqlServerLinkedServiceTypeProperties.java index 34988c0ec670d..ca47af1864baa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForSqlServerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForSqlServerLinkedServiceTypeProperties.java @@ -34,8 +34,7 @@ public final class AmazonRdsForSqlServerLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -117,8 +116,8 @@ public AmazonRdsForSqlServerLinkedServiceTypeProperties withPassword(SecretBase } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -127,8 +126,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AmazonRdsForSqlServerLinkedServiceTypeProperties object itself. @@ -166,8 +165,9 @@ public SqlAlwaysEncryptedProperties alwaysEncryptedSettings() { */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model AmazonRdsForSqlServerLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model AmazonRdsForSqlServerLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRedshiftLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRedshiftLinkedServiceTypeProperties.java index 367dc576ac571..3abe994432c27 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRedshiftLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRedshiftLinkedServiceTypeProperties.java @@ -39,15 +39,13 @@ public final class AmazonRedshiftLinkedServiceTypeProperties { private Object database; /* - * The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is - * 5439. Type: integer (or Expression with resultType integer). + * The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "port") private Object port; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -145,8 +143,8 @@ public AmazonRedshiftLinkedServiceTypeProperties withDatabase(Object database) { } /** - * Get the port property: The TCP port number that the Amazon Redshift server uses to listen for client - * connections. The default value is 5439. Type: integer (or Expression with resultType integer). + * Get the port property: The TCP port number that the Amazon Redshift server uses to listen for client connections. + * The default value is 5439. Type: integer (or Expression with resultType integer). * * @return the port value. */ @@ -155,8 +153,8 @@ public Object port() { } /** - * Set the port property: The TCP port number that the Amazon Redshift server uses to listen for client - * connections. The default value is 5439. Type: integer (or Expression with resultType integer). + * Set the port property: The TCP port number that the Amazon Redshift server uses to listen for client connections. + * The default value is 5439. Type: integer (or Expression with resultType integer). * * @param port the port value to set. * @return the AmazonRedshiftLinkedServiceTypeProperties object itself. @@ -167,8 +165,8 @@ public AmazonRedshiftLinkedServiceTypeProperties withPort(Object port) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -177,8 +175,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AmazonRedshiftLinkedServiceTypeProperties object itself. @@ -195,15 +193,17 @@ public AmazonRedshiftLinkedServiceTypeProperties withEncryptedCredential(String */ public void validate() { if (server() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property server in model AmazonRedshiftLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property server in model AmazonRedshiftLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); } if (database() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property database in model AmazonRedshiftLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property database in model AmazonRedshiftLinkedServiceTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3CompatibleLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3CompatibleLinkedServiceTypeProperties.java index 27065ee1a796d..0aa0ddb7f8ac7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3CompatibleLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3CompatibleLinkedServiceTypeProperties.java @@ -14,8 +14,7 @@ @Fluent public final class AmazonS3CompatibleLinkedServiceTypeProperties { /* - * The access key identifier of the Amazon S3 Compatible Identity and Access Management (IAM) user. Type: string - * (or Expression with resultType string). + * The access key identifier of the Amazon S3 Compatible Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */ @JsonProperty(value = "accessKeyId") private Object accessKeyId; @@ -27,23 +26,19 @@ public final class AmazonS3CompatibleLinkedServiceTypeProperties { private SecretBase secretAccessKey; /* - * This value specifies the endpoint to access with the Amazon S3 Compatible Connector. This is an optional - * property; change it only if you want to try a different service endpoint or want to switch between https and - * http. Type: string (or Expression with resultType string). + * This value specifies the endpoint to access with the Amazon S3 Compatible Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */ @JsonProperty(value = "serviceUrl") private Object serviceUrl; /* - * If true, use S3 path-style access instead of virtual hosted-style access. Default value is false. Type: boolean - * (or Expression with resultType boolean). + * If true, use S3 path-style access instead of virtual hosted-style access. Default value is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "forcePathStyle") private Object forcePathStyle; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -99,9 +94,9 @@ public AmazonS3CompatibleLinkedServiceTypeProperties withSecretAccessKey(SecretB } /** - * Get the serviceUrl property: This value specifies the endpoint to access with the Amazon S3 Compatible - * Connector. This is an optional property; change it only if you want to try a different service endpoint or want - * to switch between https and http. Type: string (or Expression with resultType string). + * Get the serviceUrl property: This value specifies the endpoint to access with the Amazon S3 Compatible Connector. + * This is an optional property; change it only if you want to try a different service endpoint or want to switch + * between https and http. Type: string (or Expression with resultType string). * * @return the serviceUrl value. */ @@ -110,9 +105,9 @@ public Object serviceUrl() { } /** - * Set the serviceUrl property: This value specifies the endpoint to access with the Amazon S3 Compatible - * Connector. This is an optional property; change it only if you want to try a different service endpoint or want - * to switch between https and http. Type: string (or Expression with resultType string). + * Set the serviceUrl property: This value specifies the endpoint to access with the Amazon S3 Compatible Connector. + * This is an optional property; change it only if you want to try a different service endpoint or want to switch + * between https and http. Type: string (or Expression with resultType string). * * @param serviceUrl the serviceUrl value to set. * @return the AmazonS3CompatibleLinkedServiceTypeProperties object itself. @@ -145,8 +140,8 @@ public AmazonS3CompatibleLinkedServiceTypeProperties withForcePathStyle(Object f } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -155,8 +150,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AmazonS3CompatibleLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3DatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3DatasetTypeProperties.java index 97285b76d33f7..43bfe345ff1b2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3DatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3DatasetTypeProperties.java @@ -244,8 +244,9 @@ public AmazonS3DatasetTypeProperties withCompression(DatasetCompression compress */ public void validate() { if (bucketName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property bucketName in model AmazonS3DatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property bucketName in model AmazonS3DatasetTypeProperties")); } if (format() != null) { format().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3LinkedServiceTypeProperties.java index cdd4e761f6239..bef6e21325166 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3LinkedServiceTypeProperties.java @@ -14,15 +14,13 @@ @Fluent public final class AmazonS3LinkedServiceTypeProperties { /* - * The authentication type of S3. Allowed value: AccessKey (default) or TemporarySecurityCredentials. Type: string - * (or Expression with resultType string). + * The authentication type of S3. Allowed value: AccessKey (default) or TemporarySecurityCredentials. Type: string (or Expression with resultType string). */ @JsonProperty(value = "authenticationType") private Object authenticationType; /* - * The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or - * Expression with resultType string). + * The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */ @JsonProperty(value = "accessKeyId") private Object accessKeyId; @@ -34,9 +32,7 @@ public final class AmazonS3LinkedServiceTypeProperties { private SecretBase secretAccessKey; /* - * This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only - * if you want to try a different service endpoint or want to switch between https and http. Type: string (or - * Expression with resultType string). + * This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */ @JsonProperty(value = "serviceUrl") private Object serviceUrl; @@ -48,8 +44,7 @@ public final class AmazonS3LinkedServiceTypeProperties { private SecretBase sessionToken; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -128,8 +123,8 @@ public AmazonS3LinkedServiceTypeProperties withSecretAccessKey(SecretBase secret /** * Get the serviceUrl property: This value specifies the endpoint to access with the S3 Connector. This is an - * optional property; change it only if you want to try a different service endpoint or want to switch between - * https and http. Type: string (or Expression with resultType string). + * optional property; change it only if you want to try a different service endpoint or want to switch between https + * and http. Type: string (or Expression with resultType string). * * @return the serviceUrl value. */ @@ -139,8 +134,8 @@ public Object serviceUrl() { /** * Set the serviceUrl property: This value specifies the endpoint to access with the S3 Connector. This is an - * optional property; change it only if you want to try a different service endpoint or want to switch between - * https and http. Type: string (or Expression with resultType string). + * optional property; change it only if you want to try a different service endpoint or want to switch between https + * and http. Type: string (or Expression with resultType string). * * @param serviceUrl the serviceUrl value to set. * @return the AmazonS3LinkedServiceTypeProperties object itself. @@ -171,8 +166,8 @@ public AmazonS3LinkedServiceTypeProperties withSessionToken(SecretBase sessionTo } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -181,8 +176,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AmazonS3LinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppFiguresLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppFiguresLinkedServiceTypeProperties.java index c18235b782671..bd82a9826ec19 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppFiguresLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppFiguresLinkedServiceTypeProperties.java @@ -107,18 +107,21 @@ public AppFiguresLinkedServiceTypeProperties withClientKey(SecretBase clientKey) */ public void validate() { if (username() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property username in model AppFiguresLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property username in model AppFiguresLinkedServiceTypeProperties")); } if (password() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property password in model AppFiguresLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property password in model AppFiguresLinkedServiceTypeProperties")); } else { password().validate(); } if (clientKey() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property clientKey in model AppFiguresLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property clientKey in model AppFiguresLinkedServiceTypeProperties")); } else { clientKey().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppendVariableActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppendVariableActivityTypeProperties.java index 5491e55e41bae..6337d04ef5ad5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppendVariableActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppendVariableActivityTypeProperties.java @@ -19,8 +19,7 @@ public final class AppendVariableActivityTypeProperties { private String variableName; /* - * Value to be appended. Type: could be a static value matching type of the variable item or Expression with - * resultType matching type of the variable item + * Value to be appended. Type: could be a static value matching type of the variable item or Expression with resultType matching type of the variable item */ @JsonProperty(value = "value") private Object value; @@ -52,8 +51,8 @@ public AppendVariableActivityTypeProperties withVariableName(String variableName } /** - * Get the value property: Value to be appended. Type: could be a static value matching type of the variable item - * or Expression with resultType matching type of the variable item. + * Get the value property: Value to be appended. Type: could be a static value matching type of the variable item or + * Expression with resultType matching type of the variable item. * * @return the value value. */ @@ -62,8 +61,8 @@ public Object value() { } /** - * Set the value property: Value to be appended. Type: could be a static value matching type of the variable item - * or Expression with resultType matching type of the variable item. + * Set the value property: Value to be appended. Type: could be a static value matching type of the variable item or + * Expression with resultType matching type of the variable item. * * @param value the value value to set. * @return the AppendVariableActivityTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AsanaLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AsanaLinkedServiceTypeProperties.java index 4709203c3f960..c599d5f5daff8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AsanaLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AsanaLinkedServiceTypeProperties.java @@ -21,8 +21,7 @@ public final class AsanaLinkedServiceTypeProperties { private SecretBase apiToken; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -54,8 +53,8 @@ public AsanaLinkedServiceTypeProperties withApiToken(SecretBase apiToken) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -64,8 +63,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AsanaLinkedServiceTypeProperties object itself. @@ -82,8 +81,9 @@ public AsanaLinkedServiceTypeProperties withEncryptedCredential(String encrypted */ public void validate() { if (apiToken() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property apiToken in model AsanaLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property apiToken in model AsanaLinkedServiceTypeProperties")); } else { apiToken().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AvroDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AvroDatasetTypeProperties.java index 5f658f88600af..46ff88791532a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AvroDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AvroDatasetTypeProperties.java @@ -59,8 +59,8 @@ public AvroDatasetTypeProperties withLocation(DatasetLocation location) { } /** - * Get the avroCompressionCodec property: The data avroCompressionCodec. Type: string (or Expression with - * resultType string). + * Get the avroCompressionCodec property: The data avroCompressionCodec. Type: string (or Expression with resultType + * string). * * @return the avroCompressionCodec value. */ @@ -69,8 +69,8 @@ public Object avroCompressionCodec() { } /** - * Set the avroCompressionCodec property: The data avroCompressionCodec. Type: string (or Expression with - * resultType string). + * Set the avroCompressionCodec property: The data avroCompressionCodec. Type: string (or Expression with resultType + * string). * * @param avroCompressionCodec the avroCompressionCodec value to set. * @return the AvroDatasetTypeProperties object itself. @@ -107,8 +107,9 @@ public AvroDatasetTypeProperties withAvroCompressionLevel(Integer avroCompressio */ public void validate() { if (location() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property location in model AvroDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property location in model AvroDatasetTypeProperties")); } else { location().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzPowerShellSetupTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzPowerShellSetupTypeProperties.java index 1a8b86a54f7d4..c000e405ae957 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzPowerShellSetupTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzPowerShellSetupTypeProperties.java @@ -52,8 +52,9 @@ public AzPowerShellSetupTypeProperties withVersion(String version) { */ public void validate() { if (version() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property version in model AzPowerShellSetupTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property version in model AzPowerShellSetupTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBatchLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBatchLinkedServiceTypeProperties.java index c31379cc72fd7..6bf7df8a8690f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBatchLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBatchLinkedServiceTypeProperties.java @@ -47,8 +47,7 @@ public final class AzureBatchLinkedServiceTypeProperties { private LinkedServiceReference linkedServiceName; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -166,8 +165,8 @@ public AzureBatchLinkedServiceTypeProperties withLinkedServiceName(LinkedService } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -176,8 +175,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureBatchLinkedServiceTypeProperties object itself. @@ -214,23 +213,27 @@ public AzureBatchLinkedServiceTypeProperties withCredential(CredentialReference */ public void validate() { if (accountName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property accountName in model AzureBatchLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property accountName in model AzureBatchLinkedServiceTypeProperties")); } if (accessKey() != null) { accessKey().validate(); } if (batchUri() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property batchUri in model AzureBatchLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property batchUri in model AzureBatchLinkedServiceTypeProperties")); } if (poolName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property poolName in model AzureBatchLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property poolName in model AzureBatchLinkedServiceTypeProperties")); } if (linkedServiceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property linkedServiceName in model AzureBatchLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property linkedServiceName in model AzureBatchLinkedServiceTypeProperties")); } else { linkedServiceName().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobDatasetTypeProperties.java index 3aad097aa460f..f2bc706f1e211 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobDatasetTypeProperties.java @@ -147,8 +147,8 @@ public AzureBlobDatasetTypeProperties withModifiedDatetimeStart(Object modifiedD } /** - * Get the modifiedDatetimeEnd property: The end of Azure Blob's modified datetime. Type: string (or Expression - * with resultType string). + * Get the modifiedDatetimeEnd property: The end of Azure Blob's modified datetime. Type: string (or Expression with + * resultType string). * * @return the modifiedDatetimeEnd value. */ @@ -157,8 +157,8 @@ public Object modifiedDatetimeEnd() { } /** - * Set the modifiedDatetimeEnd property: The end of Azure Blob's modified datetime. Type: string (or Expression - * with resultType string). + * Set the modifiedDatetimeEnd property: The end of Azure Blob's modified datetime. Type: string (or Expression with + * resultType string). * * @param modifiedDatetimeEnd the modifiedDatetimeEnd value to set. * @return the AzureBlobDatasetTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobFSLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobFSLinkedServiceTypeProperties.java index 0ead154a92d54..0cdfa1f99078b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobFSLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobFSLinkedServiceTypeProperties.java @@ -27,8 +27,7 @@ public final class AzureBlobFSLinkedServiceTypeProperties { private Object accountKey; /* - * The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string - * (or Expression with resultType string). + * The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; @@ -40,23 +39,19 @@ public final class AzureBlobFSLinkedServiceTypeProperties { private SecretBase servicePrincipalKey; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, - * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or - * Expression with resultType string). + * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -68,17 +63,13 @@ public final class AzureBlobFSLinkedServiceTypeProperties { private CredentialReference credential; /* - * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for - * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). + * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* - * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is - * 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be - * AzureKeyVaultSecretReference. + * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; @@ -168,8 +159,8 @@ public AzureBlobFSLinkedServiceTypeProperties withServicePrincipalId(Object serv } /** - * Get the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data - * Lake Storage Gen2 account. + * Get the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data Lake + * Storage Gen2 account. * * @return the servicePrincipalKey value. */ @@ -178,8 +169,8 @@ public SecretBase servicePrincipalKey() { } /** - * Set the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data - * Lake Storage Gen2 account. + * Set the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data Lake + * Storage Gen2 account. * * @param servicePrincipalKey the servicePrincipalKey value to set. * @return the AzureBlobFSLinkedServiceTypeProperties object itself. @@ -212,9 +203,9 @@ public AzureBlobFSLinkedServiceTypeProperties withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -223,9 +214,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the AzureBlobFSLinkedServiceTypeProperties object itself. @@ -236,8 +227,8 @@ public AzureBlobFSLinkedServiceTypeProperties withAzureCloudType(Object azureClo } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -246,8 +237,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureBlobFSLinkedServiceTypeProperties object itself. @@ -278,9 +269,9 @@ public AzureBlobFSLinkedServiceTypeProperties withCredential(CredentialReference } /** - * Get the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Get the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @return the servicePrincipalCredentialType value. */ @@ -289,9 +280,9 @@ public Object servicePrincipalCredentialType() { } /** - * Set the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Set the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @param servicePrincipalCredentialType the servicePrincipalCredentialType value to set. * @return the AzureBlobFSLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobStorageLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobStorageLinkedServiceTypeProperties.java index 77e023ee88324..eb61a4895f877 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobStorageLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobStorageLinkedServiceTypeProperties.java @@ -17,8 +17,7 @@ @Fluent public final class AzureBlobStorageLinkedServiceTypeProperties { /* - * The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, - * SecureString or AzureKeyVaultSecretReference. + * The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. */ @JsonProperty(value = "connectionString") private Object connectionString; @@ -30,8 +29,7 @@ public final class AzureBlobStorageLinkedServiceTypeProperties { private AzureKeyVaultSecretReference accountKey; /* - * SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint - * property. Type: string, SecureString or AzureKeyVaultSecretReference. + * SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. */ @JsonProperty(value = "sasUri") private Object sasUri; @@ -43,15 +41,13 @@ public final class AzureBlobStorageLinkedServiceTypeProperties { private AzureKeyVaultSecretReference sasToken; /* - * Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri - * property. + * Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. */ @JsonProperty(value = "serviceEndpoint") private Object serviceEndpoint; /* - * The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or - * Expression with resultType string). + * The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; @@ -63,30 +59,25 @@ public final class AzureBlobStorageLinkedServiceTypeProperties { private SecretBase servicePrincipalKey; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, - * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or - * Expression with resultType string). + * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* - * Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general - * purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). + * Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). */ @JsonProperty(value = "accountKind") private Object accountKind; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -104,8 +95,7 @@ public final class AzureBlobStorageLinkedServiceTypeProperties { private AzureStorageAuthenticationType authenticationType; /* - * Container uri of the Azure Blob Storage resource only support for anonymous access. Type: string (or Expression - * with resultType string). + * Container uri of the Azure Blob Storage resource only support for anonymous access. Type: string (or Expression with resultType string). */ @JsonProperty(value = "containerUri") private Object containerUri; @@ -289,9 +279,9 @@ public AzureBlobStorageLinkedServiceTypeProperties withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -300,9 +290,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the AzureBlobStorageLinkedServiceTypeProperties object itself. @@ -337,8 +327,8 @@ public AzureBlobStorageLinkedServiceTypeProperties withAccountKind(Object accoun } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -347,8 +337,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureBlobStorageLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerCommandActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerCommandActivityTypeProperties.java index 24f23f66d1abc..4602d99c8d230 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerCommandActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerCommandActivityTypeProperties.java @@ -14,15 +14,13 @@ @Fluent public final class AzureDataExplorerCommandActivityTypeProperties { /* - * A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with - * resultType string). + * A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). */ @JsonProperty(value = "command", required = true) private Object command; /* - * Control command timeout. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) + * Control command timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) */ @JsonProperty(value = "commandTimeout") private Object commandTimeout; @@ -84,8 +82,9 @@ public AzureDataExplorerCommandActivityTypeProperties withCommandTimeout(Object */ public void validate() { if (command() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property command in model AzureDataExplorerCommandActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property command in model AzureDataExplorerCommandActivityTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerLinkedServiceTypeProperties.java index a91df31c6ee2b..3f8670a2ee9cf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerLinkedServiceTypeProperties.java @@ -16,15 +16,13 @@ @Fluent public final class AzureDataExplorerLinkedServiceTypeProperties { /* - * The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format - * https://..kusto.windows.net. Type: string (or Expression with resultType string) + * The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format https://..kusto.windows.net. Type: string (or Expression with resultType string) */ @JsonProperty(value = "endpoint", required = true) private Object endpoint; /* - * The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression - * with resultType string). + * The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; @@ -42,8 +40,7 @@ public final class AzureDataExplorerLinkedServiceTypeProperties { private Object database; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; @@ -61,9 +58,9 @@ public AzureDataExplorerLinkedServiceTypeProperties() { } /** - * Get the endpoint property: The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the - * format https://<clusterName>.<regionName>.kusto.windows.net. Type: string (or Expression with - * resultType string). + * Get the endpoint property: The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format + * https://<clusterName>.<regionName>.kusto.windows.net. Type: string (or Expression with resultType + * string). * * @return the endpoint value. */ @@ -72,9 +69,9 @@ public Object endpoint() { } /** - * Set the endpoint property: The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the - * format https://<clusterName>.<regionName>.kusto.windows.net. Type: string (or Expression with - * resultType string). + * Set the endpoint property: The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format + * https://<clusterName>.<regionName>.kusto.windows.net. Type: string (or Expression with resultType + * string). * * @param endpoint the endpoint value to set. * @return the AzureDataExplorerLinkedServiceTypeProperties object itself. @@ -195,15 +192,17 @@ public AzureDataExplorerLinkedServiceTypeProperties withCredential(CredentialRef */ public void validate() { if (endpoint() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property endpoint in model AzureDataExplorerLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property endpoint in model AzureDataExplorerLinkedServiceTypeProperties")); } if (servicePrincipalKey() != null) { servicePrincipalKey().validate(); } if (database() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property database in model AzureDataExplorerLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property database in model AzureDataExplorerLinkedServiceTypeProperties")); } if (credential() != null) { credential().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeAnalyticsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeAnalyticsLinkedServiceTypeProperties.java index 01bf6c235a476..756319f42194e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeAnalyticsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeAnalyticsLinkedServiceTypeProperties.java @@ -21,8 +21,7 @@ public final class AzureDataLakeAnalyticsLinkedServiceTypeProperties { private Object accountName; /* - * The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or - * Expression with resultType string). + * The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; @@ -34,22 +33,19 @@ public final class AzureDataLakeAnalyticsLinkedServiceTypeProperties { private SecretBase servicePrincipalKey; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant", required = true) private Object tenant; /* - * Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or - * Expression with resultType string). + * Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). */ @JsonProperty(value = "subscriptionId") private Object subscriptionId; /* - * Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or - * Expression with resultType string). + * Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). */ @JsonProperty(value = "resourceGroupName") private Object resourceGroupName; @@ -61,8 +57,7 @@ public final class AzureDataLakeAnalyticsLinkedServiceTypeProperties { private Object dataLakeAnalyticsUri; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -118,8 +113,8 @@ public AzureDataLakeAnalyticsLinkedServiceTypeProperties withServicePrincipalId( } /** - * Get the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data - * Lake Analytics account. + * Get the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data Lake + * Analytics account. * * @return the servicePrincipalKey value. */ @@ -128,8 +123,8 @@ public SecretBase servicePrincipalKey() { } /** - * Set the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data - * Lake Analytics account. + * Set the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data Lake + * Analytics account. * * @param servicePrincipalKey the servicePrincipalKey value to set. * @return the AzureDataLakeAnalyticsLinkedServiceTypeProperties object itself. @@ -228,8 +223,8 @@ public AzureDataLakeAnalyticsLinkedServiceTypeProperties withDataLakeAnalyticsUr } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -238,8 +233,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureDataLakeAnalyticsLinkedServiceTypeProperties object itself. @@ -256,15 +251,17 @@ public AzureDataLakeAnalyticsLinkedServiceTypeProperties withEncryptedCredential */ public void validate() { if (accountName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property accountName in model AzureDataLakeAnalyticsLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property accountName in model AzureDataLakeAnalyticsLinkedServiceTypeProperties")); } if (servicePrincipalKey() != null) { servicePrincipalKey().validate(); } if (tenant() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property tenant in model AzureDataLakeAnalyticsLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property tenant in model AzureDataLakeAnalyticsLinkedServiceTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeStoreLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeStoreLinkedServiceTypeProperties.java index 2e42dbe6255a1..4d277cb6ffebc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeStoreLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeStoreLinkedServiceTypeProperties.java @@ -22,8 +22,7 @@ public final class AzureDataLakeStoreLinkedServiceTypeProperties { private Object dataLakeStoreUri; /* - * The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or - * Expression with resultType string). + * The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; @@ -35,16 +34,13 @@ public final class AzureDataLakeStoreLinkedServiceTypeProperties { private SecretBase servicePrincipalKey; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, - * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or - * Expression with resultType string). + * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ @JsonProperty(value = "azureCloudType") private Object azureCloudType; @@ -56,22 +52,19 @@ public final class AzureDataLakeStoreLinkedServiceTypeProperties { private Object accountName; /* - * Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression - * with resultType string). + * Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). */ @JsonProperty(value = "subscriptionId") private Object subscriptionId; /* - * Data Lake Store account resource group name (if different from Data Factory account). Type: string (or - * Expression with resultType string). + * Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). */ @JsonProperty(value = "resourceGroupName") private Object resourceGroupName; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -133,8 +126,8 @@ public AzureDataLakeStoreLinkedServiceTypeProperties withServicePrincipalId(Obje } /** - * Get the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data - * Lake Store account. + * Get the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data Lake + * Store account. * * @return the servicePrincipalKey value. */ @@ -143,8 +136,8 @@ public SecretBase servicePrincipalKey() { } /** - * Set the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data - * Lake Store account. + * Set the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data Lake + * Store account. * * @param servicePrincipalKey the servicePrincipalKey value to set. * @return the AzureDataLakeStoreLinkedServiceTypeProperties object itself. @@ -177,9 +170,9 @@ public AzureDataLakeStoreLinkedServiceTypeProperties withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -188,9 +181,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the AzureDataLakeStoreLinkedServiceTypeProperties object itself. @@ -265,8 +258,8 @@ public AzureDataLakeStoreLinkedServiceTypeProperties withResourceGroupName(Objec } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -275,8 +268,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureDataLakeStoreLinkedServiceTypeProperties object itself. @@ -313,8 +306,9 @@ public AzureDataLakeStoreLinkedServiceTypeProperties withCredential(CredentialRe */ public void validate() { if (dataLakeStoreUri() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property dataLakeStoreUri in model AzureDataLakeStoreLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property dataLakeStoreUri in model AzureDataLakeStoreLinkedServiceTypeProperties")); } if (servicePrincipalKey() != null) { servicePrincipalKey().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDeltaLakeDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDeltaLakeDatasetTypeProperties.java index 55d60560d3e8e..c32e464f00195 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDeltaLakeDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDeltaLakeDatasetTypeProperties.java @@ -51,8 +51,7 @@ public AzureDatabricksDeltaLakeDatasetTypeProperties withTable(Object table) { } /** - * Get the database property: The database name of delta table. Type: string (or Expression with resultType - * string). + * Get the database property: The database name of delta table. Type: string (or Expression with resultType string). * * @return the database value. */ @@ -61,8 +60,7 @@ public Object database() { } /** - * Set the database property: The database name of delta table. Type: string (or Expression with resultType - * string). + * Set the database property: The database name of delta table. Type: string (or Expression with resultType string). * * @param database the database value to set. * @return the AzureDatabricksDeltaLakeDatasetTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDetltaLakeLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDetltaLakeLinkedServiceTypeProperties.java index 25d849d35f2d3..b38340394c270 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDetltaLakeLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDetltaLakeLinkedServiceTypeProperties.java @@ -16,29 +16,25 @@ @Fluent public final class AzureDatabricksDetltaLakeLinkedServiceTypeProperties { /* - * .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with - * resultType string). + * .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). */ @JsonProperty(value = "domain", required = true) private Object domain; /* - * Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. - * Type: string, SecureString or AzureKeyVaultSecretReference. + * Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. */ @JsonProperty(value = "accessToken") private SecretBase accessToken; /* - * The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or - * Expression with resultType string). + * The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). */ @JsonProperty(value = "clusterId") private Object clusterId; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -108,8 +104,8 @@ public AzureDatabricksDetltaLakeLinkedServiceTypeProperties withAccessToken(Secr } /** - * Get the clusterId property: The id of an existing interactive cluster that will be used for all runs of this - * job. Type: string (or Expression with resultType string). + * Get the clusterId property: The id of an existing interactive cluster that will be used for all runs of this job. + * Type: string (or Expression with resultType string). * * @return the clusterId value. */ @@ -118,8 +114,8 @@ public Object clusterId() { } /** - * Set the clusterId property: The id of an existing interactive cluster that will be used for all runs of this - * job. Type: string (or Expression with resultType string). + * Set the clusterId property: The id of an existing interactive cluster that will be used for all runs of this job. + * Type: string (or Expression with resultType string). * * @param clusterId the clusterId value to set. * @return the AzureDatabricksDetltaLakeLinkedServiceTypeProperties object itself. @@ -130,8 +126,8 @@ public AzureDatabricksDetltaLakeLinkedServiceTypeProperties withClusterId(Object } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -140,8 +136,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureDatabricksDetltaLakeLinkedServiceTypeProperties object itself. @@ -200,8 +196,9 @@ public AzureDatabricksDetltaLakeLinkedServiceTypeProperties withWorkspaceResourc */ public void validate() { if (domain() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property domain in model AzureDatabricksDetltaLakeLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property domain in model AzureDatabricksDetltaLakeLinkedServiceTypeProperties")); } if (accessToken() != null) { accessToken().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksLinkedServiceTypeProperties.java index ef34a4356f510..e78525da0ee49 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksLinkedServiceTypeProperties.java @@ -18,22 +18,19 @@ @Fluent public final class AzureDatabricksLinkedServiceTypeProperties { /* - * .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with - * resultType string). + * .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). */ @JsonProperty(value = "domain", required = true) private Object domain; /* - * Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. - * Type: string (or Expression with resultType string). + * Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). */ @JsonProperty(value = "accessToken") private SecretBase accessToken; /* - * Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression - * with resultType string). + * Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */ @JsonProperty(value = "authentication") private Object authentication; @@ -45,41 +42,31 @@ public final class AzureDatabricksLinkedServiceTypeProperties { private Object workspaceResourceId; /* - * The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or - * Expression with resultType string). + * The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). */ @JsonProperty(value = "existingClusterId") private Object existingClusterId; /* - * The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression - * with resultType string). + * The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). */ @JsonProperty(value = "instancePoolId") private Object instancePoolId; /* - * If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance - * pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or - * Expression with resultType string). + * If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). */ @JsonProperty(value = "newClusterVersion") private Object newClusterVersion; /* - * If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job - * cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 - * or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and - * can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: - * string (or Expression with resultType string). + * If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). */ @JsonProperty(value = "newClusterNumOfWorker") private Object newClusterNumOfWorker; /* - * The node type of the new job cluster. This property is required if newClusterVersion is specified and - * instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or - * Expression with resultType string). + * The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). */ @JsonProperty(value = "newClusterNodeType") private Object newClusterNodeType; @@ -106,44 +93,37 @@ public final class AzureDatabricksLinkedServiceTypeProperties { private Map newClusterCustomTags; /* - * Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType - * string). + * Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "newClusterLogDestination") private Object newClusterLogDestination; /* - * The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: - * string (or Expression with resultType string). + * The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). */ @JsonProperty(value = "newClusterDriverNodeType") private Object newClusterDriverNodeType; /* - * User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType - * array of strings). + * User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). */ @JsonProperty(value = "newClusterInitScripts") private Object newClusterInitScripts; /* - * Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk - * behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType - * boolean). + * Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "newClusterEnableElasticDisk") private Object newClusterEnableElasticDisk; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* - * The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string - * (or Expression with resultType string). + * The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). */ @JsonProperty(value = "policyId") private Object policyId; @@ -207,8 +187,8 @@ public AzureDatabricksLinkedServiceTypeProperties withAccessToken(SecretBase acc } /** - * Get the authentication property: Required to specify MSI, if using Workspace resource id for databricks REST - * API. Type: string (or Expression with resultType string). + * Get the authentication property: Required to specify MSI, if using Workspace resource id for databricks REST API. + * Type: string (or Expression with resultType string). * * @return the authentication value. */ @@ -217,8 +197,8 @@ public Object authentication() { } /** - * Set the authentication property: Required to specify MSI, if using Workspace resource id for databricks REST - * API. Type: string (or Expression with resultType string). + * Set the authentication property: Required to specify MSI, if using Workspace resource id for databricks REST API. + * Type: string (or Expression with resultType string). * * @param authentication the authentication value to set. * @return the AzureDatabricksLinkedServiceTypeProperties object itself. @@ -348,8 +328,8 @@ public AzureDatabricksLinkedServiceTypeProperties withNewClusterNumOfWorker(Obje /** * Get the newClusterNodeType property: The node type of the new job cluster. This property is required if - * newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this - * property is ignored. Type: string (or Expression with resultType string). + * newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property + * is ignored. Type: string (or Expression with resultType string). * * @return the newClusterNodeType value. */ @@ -359,8 +339,8 @@ public Object newClusterNodeType() { /** * Set the newClusterNodeType property: The node type of the new job cluster. This property is required if - * newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this - * property is ignored. Type: string (or Expression with resultType string). + * newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property + * is ignored. Type: string (or Expression with resultType string). * * @param newClusterNodeType the newClusterNodeType value to set. * @return the AzureDatabricksLinkedServiceTypeProperties object itself. @@ -459,8 +439,8 @@ public AzureDatabricksLinkedServiceTypeProperties withNewClusterLogDestination(O } /** - * Get the newClusterDriverNodeType property: The driver node type for the new job cluster. This property is - * ignored in instance pool configurations. Type: string (or Expression with resultType string). + * Get the newClusterDriverNodeType property: The driver node type for the new job cluster. This property is ignored + * in instance pool configurations. Type: string (or Expression with resultType string). * * @return the newClusterDriverNodeType value. */ @@ -469,8 +449,8 @@ public Object newClusterDriverNodeType() { } /** - * Set the newClusterDriverNodeType property: The driver node type for the new job cluster. This property is - * ignored in instance pool configurations. Type: string (or Expression with resultType string). + * Set the newClusterDriverNodeType property: The driver node type for the new job cluster. This property is ignored + * in instance pool configurations. Type: string (or Expression with resultType string). * * @param newClusterDriverNodeType the newClusterDriverNodeType value to set. * @return the AzureDatabricksLinkedServiceTypeProperties object itself. @@ -528,8 +508,8 @@ public Object newClusterEnableElasticDisk() { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -538,8 +518,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureDatabricksLinkedServiceTypeProperties object itself. @@ -598,8 +578,9 @@ public AzureDatabricksLinkedServiceTypeProperties withCredential(CredentialRefer */ public void validate() { if (domain() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property domain in model AzureDatabricksLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property domain in model AzureDatabricksLinkedServiceTypeProperties")); } if (accessToken() != null) { accessToken().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFileStorageLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFileStorageLinkedServiceTypeProperties.java index 51b8b9f9ad900..289a90faaa955 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFileStorageLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFileStorageLinkedServiceTypeProperties.java @@ -33,8 +33,7 @@ public final class AzureFileStorageLinkedServiceTypeProperties { private SecretBase password; /* - * The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or - * AzureKeyVaultSecretReference. + * The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */ @JsonProperty(value = "connectionString") private Object connectionString; @@ -46,8 +45,7 @@ public final class AzureFileStorageLinkedServiceTypeProperties { private AzureKeyVaultSecretReference accountKey; /* - * SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, - * SecureString or AzureKeyVaultSecretReference. + * SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */ @JsonProperty(value = "sasUri") private Object sasUri; @@ -59,8 +57,7 @@ public final class AzureFileStorageLinkedServiceTypeProperties { private AzureKeyVaultSecretReference sasToken; /* - * The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with - * resultType string). + * The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileShare") private Object fileShare; @@ -72,8 +69,7 @@ public final class AzureFileStorageLinkedServiceTypeProperties { private Object snapshot; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -273,8 +269,8 @@ public AzureFileStorageLinkedServiceTypeProperties withSnapshot(Object snapshot) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -283,8 +279,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureFileStorageLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionActivityTypeProperties.java index 7ac9b88068cd2..97fde77138009 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionActivityTypeProperties.java @@ -23,24 +23,20 @@ public final class AzureFunctionActivityTypeProperties { private AzureFunctionActivityMethod method; /* - * Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType - * string) + * Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string) */ @JsonProperty(value = "functionName", required = true) private Object functionName; /* - * Represents the headers that will be sent to the request. For example, to set the language and type on a request: - * "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with - * resultType string). + * Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). */ @JsonProperty(value = "headers") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) - private Map headers; + private Map headers; /* - * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET - * method Type: string (or Expression with resultType string). + * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). */ @JsonProperty(value = "body") private Object body; @@ -100,7 +96,7 @@ public AzureFunctionActivityTypeProperties withFunctionName(Object functionName) * * @return the headers value. */ - public Map headers() { + public Map headers() { return this.headers; } @@ -112,7 +108,7 @@ public Map headers() { * @param headers the headers value to set. * @return the AzureFunctionActivityTypeProperties object itself. */ - public AzureFunctionActivityTypeProperties withHeaders(Map headers) { + public AzureFunctionActivityTypeProperties withHeaders(Map headers) { this.headers = headers; return this; } @@ -146,12 +142,14 @@ public AzureFunctionActivityTypeProperties withBody(Object body) { */ public void validate() { if (method() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property method in model AzureFunctionActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property method in model AzureFunctionActivityTypeProperties")); } if (functionName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property functionName in model AzureFunctionActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property functionName in model AzureFunctionActivityTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionLinkedServiceTypeProperties.java index 7b62e38c9d23d..d68031b820f47 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionLinkedServiceTypeProperties.java @@ -16,8 +16,7 @@ @Fluent public final class AzureFunctionLinkedServiceTypeProperties { /* - * The endpoint of the Azure Function App. URL will be in the format https://.azurewebsites.net. Type: - * string (or Expression with resultType string). + * The endpoint of the Azure Function App. URL will be in the format https://.azurewebsites.net. Type: string (or Expression with resultType string). */ @JsonProperty(value = "functionAppUrl", required = true) private Object functionAppUrl; @@ -29,8 +28,7 @@ public final class AzureFunctionLinkedServiceTypeProperties { private SecretBase functionKey; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -48,8 +46,7 @@ public final class AzureFunctionLinkedServiceTypeProperties { private Object resourceId; /* - * Type of authentication (Required to specify MSI) used to connect to AzureFunction. Type: string (or Expression - * with resultType string). + * Type of authentication (Required to specify MSI) used to connect to AzureFunction. Type: string (or Expression with resultType string). */ @JsonProperty(value = "authentication") private Object authentication; @@ -103,8 +100,8 @@ public AzureFunctionLinkedServiceTypeProperties withFunctionKey(SecretBase funct } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -113,8 +110,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureFunctionLinkedServiceTypeProperties object itself. @@ -195,8 +192,9 @@ public AzureFunctionLinkedServiceTypeProperties withAuthentication(Object authen */ public void validate() { if (functionAppUrl() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property functionAppUrl in model AzureFunctionLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property functionAppUrl in model AzureFunctionLinkedServiceTypeProperties")); } if (functionKey() != null) { functionKey().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureKeyVaultLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureKeyVaultLinkedServiceTypeProperties.java index d1acaea26763b..233f4d6c2f9d7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureKeyVaultLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureKeyVaultLinkedServiceTypeProperties.java @@ -15,8 +15,7 @@ @Fluent public final class AzureKeyVaultLinkedServiceTypeProperties { /* - * The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with - * resultType string). + * The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). */ @JsonProperty(value = "baseUrl", required = true) private Object baseUrl; @@ -82,8 +81,9 @@ public AzureKeyVaultLinkedServiceTypeProperties withCredential(CredentialReferen */ public void validate() { if (baseUrl() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property baseUrl in model AzureKeyVaultLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property baseUrl in model AzureKeyVaultLinkedServiceTypeProperties")); } if (credential() != null) { credential().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLBatchExecutionActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLBatchExecutionActivityTypeProperties.java index cf6ab0ca6413d..afa9f5ff02844 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLBatchExecutionActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLBatchExecutionActivityTypeProperties.java @@ -16,27 +16,21 @@ @Fluent public final class AzureMLBatchExecutionActivityTypeProperties { /* - * Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web - * service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters - * property of the Azure ML batch execution request. + * Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. */ @JsonProperty(value = "globalParameters") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map globalParameters; /* - * Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects - * specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the - * Azure ML batch execution request. + * Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution request. */ @JsonProperty(value = "webServiceOutputs") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map webServiceOutputs; /* - * Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Inputs to AzureMLWebServiceFile objects - * specifying the input Blob locations.. This information will be passed in the WebServiceInputs property of the - * Azure ML batch execution request. + * Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This information will be passed in the WebServiceInputs property of the Azure ML batch execution request. */ @JsonProperty(value = "webServiceInputs") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) @@ -49,9 +43,9 @@ public AzureMLBatchExecutionActivityTypeProperties() { } /** - * Get the globalParameters property: Key,Value pairs to be passed to the Azure ML Batch Execution Service - * endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. - * Values will be passed in the GlobalParameters property of the Azure ML batch execution request. + * Get the globalParameters property: Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. + * Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be + * passed in the GlobalParameters property of the Azure ML batch execution request. * * @return the globalParameters value. */ @@ -60,9 +54,9 @@ public Map globalParameters() { } /** - * Set the globalParameters property: Key,Value pairs to be passed to the Azure ML Batch Execution Service - * endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. - * Values will be passed in the GlobalParameters property of the Azure ML batch execution request. + * Set the globalParameters property: Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. + * Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be + * passed in the GlobalParameters property of the Azure ML batch execution request. * * @param globalParameters the globalParameters value to set. * @return the AzureMLBatchExecutionActivityTypeProperties object itself. @@ -73,9 +67,9 @@ public AzureMLBatchExecutionActivityTypeProperties withGlobalParameters(Map webServiceOutputs() { } /** - * Set the webServiceOutputs property: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service - * Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed - * in the WebServiceOutputs property of the Azure ML batch execution request. + * Set the webServiceOutputs property: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs + * to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the + * WebServiceOutputs property of the Azure ML batch execution request. * * @param webServiceOutputs the webServiceOutputs value to set. * @return the AzureMLBatchExecutionActivityTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLExecutePipelineActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLExecutePipelineActivityTypeProperties.java index 1c0d2df8e61b4..962683b577e80 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLExecutePipelineActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLExecutePipelineActivityTypeProperties.java @@ -31,39 +31,31 @@ public final class AzureMLExecutePipelineActivityTypeProperties { private Object version; /* - * Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property - * of the published pipeline execution request. Type: string (or Expression with resultType string). + * Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). */ @JsonProperty(value = "experimentName") private Object experimentName; /* - * Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline - * parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the - * published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). + * Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). */ @JsonProperty(value = "mlPipelineParameters") private Object mlPipelineParameters; /* - * Dictionary used for changing data path assignments without retraining. Values will be passed in the - * dataPathAssignments property of the published pipeline execution request. Type: object (or Expression with - * resultType object). + * Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object (or Expression with resultType object). */ @JsonProperty(value = "dataPathAssignments") private Object dataPathAssignments; /* - * The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the - * published pipeline execution request. Type: string (or Expression with resultType string). + * The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). */ @JsonProperty(value = "mlParentRunId") private Object mlParentRunId; /* - * Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed - * in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression - * with resultType boolean). + * Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "continueOnStepFailure") private Object continueOnStepFailure; @@ -75,8 +67,8 @@ public AzureMLExecutePipelineActivityTypeProperties() { } /** - * Get the mlPipelineId property: ID of the published Azure ML pipeline. Type: string (or Expression with - * resultType string). + * Get the mlPipelineId property: ID of the published Azure ML pipeline. Type: string (or Expression with resultType + * string). * * @return the mlPipelineId value. */ @@ -85,8 +77,8 @@ public Object mlPipelineId() { } /** - * Set the mlPipelineId property: ID of the published Azure ML pipeline. Type: string (or Expression with - * resultType string). + * Set the mlPipelineId property: ID of the published Azure ML pipeline. Type: string (or Expression with resultType + * string). * * @param mlPipelineId the mlPipelineId value to set. * @return the AzureMLExecutePipelineActivityTypeProperties object itself. @@ -141,9 +133,9 @@ public AzureMLExecutePipelineActivityTypeProperties withVersion(Object version) } /** - * Get the experimentName property: Run history experiment name of the pipeline run. This information will be - * passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression - * with resultType string). + * Get the experimentName property: Run history experiment name of the pipeline run. This information will be passed + * in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with + * resultType string). * * @return the experimentName value. */ @@ -152,9 +144,9 @@ public Object experimentName() { } /** - * Set the experimentName property: Run history experiment name of the pipeline run. This information will be - * passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression - * with resultType string). + * Set the experimentName property: Run history experiment name of the pipeline run. This information will be passed + * in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with + * resultType string). * * @param experimentName the experimentName value to set. * @return the AzureMLExecutePipelineActivityTypeProperties object itself. @@ -216,8 +208,8 @@ public AzureMLExecutePipelineActivityTypeProperties withDataPathAssignments(Obje /** * Get the mlParentRunId property: The parent Azure ML Service pipeline run id. This information will be passed in - * the ParentRunId property of the published pipeline execution request. Type: string (or Expression with - * resultType string). + * the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType + * string). * * @return the mlParentRunId value. */ @@ -227,8 +219,8 @@ public Object mlParentRunId() { /** * Set the mlParentRunId property: The parent Azure ML Service pipeline run id. This information will be passed in - * the ParentRunId property of the published pipeline execution request. Type: string (or Expression with - * resultType string). + * the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType + * string). * * @param mlParentRunId the mlParentRunId value to set. * @return the AzureMLExecutePipelineActivityTypeProperties object itself. @@ -239,9 +231,9 @@ public AzureMLExecutePipelineActivityTypeProperties withMlParentRunId(Object mlP } /** - * Get the continueOnStepFailure property: Whether to continue execution of other steps in the PipelineRun if a - * step fails. This information will be passed in the continueOnStepFailure property of the published pipeline - * execution request. Type: boolean (or Expression with resultType boolean). + * Get the continueOnStepFailure property: Whether to continue execution of other steps in the PipelineRun if a step + * fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution + * request. Type: boolean (or Expression with resultType boolean). * * @return the continueOnStepFailure value. */ @@ -250,9 +242,9 @@ public Object continueOnStepFailure() { } /** - * Set the continueOnStepFailure property: Whether to continue execution of other steps in the PipelineRun if a - * step fails. This information will be passed in the continueOnStepFailure property of the published pipeline - * execution request. Type: boolean (or Expression with resultType boolean). + * Set the continueOnStepFailure property: Whether to continue execution of other steps in the PipelineRun if a step + * fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution + * request. Type: boolean (or Expression with resultType boolean). * * @param continueOnStepFailure the continueOnStepFailure value to set. * @return the AzureMLExecutePipelineActivityTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLLinkedServiceTypeProperties.java index d9681de265b00..8fb5be0ab6ac2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLLinkedServiceTypeProperties.java @@ -15,8 +15,7 @@ @Fluent public final class AzureMLLinkedServiceTypeProperties { /* - * The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with - * resultType string). + * The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). */ @JsonProperty(value = "mlEndpoint", required = true) private Object mlEndpoint; @@ -28,43 +27,37 @@ public final class AzureMLLinkedServiceTypeProperties { private SecretBase apiKey; /* - * The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with - * resultType string). + * The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). */ @JsonProperty(value = "updateResourceEndpoint") private Object updateResourceEndpoint; /* - * The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML - * Studio web service. Type: string (or Expression with resultType string). + * The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* - * The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure - * ML Studio web service. + * The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. */ @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* - * Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with - * resultType string). + * Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with resultType string). */ @JsonProperty(value = "authentication") private Object authentication; @@ -162,8 +155,8 @@ public AzureMLLinkedServiceTypeProperties withServicePrincipalId(Object serviceP } /** - * Get the servicePrincipalKey property: The key of the service principal used to authenticate against the - * ARM-based updateResourceEndpoint of an Azure ML Studio web service. + * Get the servicePrincipalKey property: The key of the service principal used to authenticate against the ARM-based + * updateResourceEndpoint of an Azure ML Studio web service. * * @return the servicePrincipalKey value. */ @@ -172,8 +165,8 @@ public SecretBase servicePrincipalKey() { } /** - * Set the servicePrincipalKey property: The key of the service principal used to authenticate against the - * ARM-based updateResourceEndpoint of an Azure ML Studio web service. + * Set the servicePrincipalKey property: The key of the service principal used to authenticate against the ARM-based + * updateResourceEndpoint of an Azure ML Studio web service. * * @param servicePrincipalKey the servicePrincipalKey value to set. * @return the AzureMLLinkedServiceTypeProperties object itself. @@ -206,8 +199,8 @@ public AzureMLLinkedServiceTypeProperties withTenant(Object tenant) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -216,8 +209,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureMLLinkedServiceTypeProperties object itself. @@ -256,12 +249,14 @@ public AzureMLLinkedServiceTypeProperties withAuthentication(Object authenticati */ public void validate() { if (mlEndpoint() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property mlEndpoint in model AzureMLLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property mlEndpoint in model AzureMLLinkedServiceTypeProperties")); } if (apiKey() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property apiKey in model AzureMLLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property apiKey in model AzureMLLinkedServiceTypeProperties")); } else { apiKey().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLServiceLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLServiceLinkedServiceTypeProperties.java index 5c0a38fb588f3..c02bd3a5e1a12 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLServiceLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLServiceLinkedServiceTypeProperties.java @@ -33,36 +33,31 @@ public final class AzureMLServiceLinkedServiceTypeProperties { private Object mlWorkspaceName; /* - * Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with - * resultType string). + * Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with resultType string). */ @JsonProperty(value = "authentication") private Object authentication; /* - * The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service - * pipeline. Type: string (or Expression with resultType string). + * The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* - * The key of the service principal used to authenticate against the endpoint of a published Azure ML Service - * pipeline. + * The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. */ @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -162,8 +157,8 @@ public AzureMLServiceLinkedServiceTypeProperties withAuthentication(Object authe } /** - * Get the servicePrincipalId property: The ID of the service principal used to authenticate against the endpoint - * of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). + * Get the servicePrincipalId property: The ID of the service principal used to authenticate against the endpoint of + * a published Azure ML Service pipeline. Type: string (or Expression with resultType string). * * @return the servicePrincipalId value. */ @@ -172,8 +167,8 @@ public Object servicePrincipalId() { } /** - * Set the servicePrincipalId property: The ID of the service principal used to authenticate against the endpoint - * of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). + * Set the servicePrincipalId property: The ID of the service principal used to authenticate against the endpoint of + * a published Azure ML Service pipeline. Type: string (or Expression with resultType string). * * @param servicePrincipalId the servicePrincipalId value to set. * @return the AzureMLServiceLinkedServiceTypeProperties object itself. @@ -228,8 +223,8 @@ public AzureMLServiceLinkedServiceTypeProperties withTenant(Object tenant) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -238,8 +233,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureMLServiceLinkedServiceTypeProperties object itself. @@ -256,16 +251,19 @@ public AzureMLServiceLinkedServiceTypeProperties withEncryptedCredential(String */ public void validate() { if (subscriptionId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property subscriptionId in model AzureMLServiceLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property subscriptionId in model AzureMLServiceLinkedServiceTypeProperties")); } if (resourceGroupName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property resourceGroupName in model AzureMLServiceLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property resourceGroupName in model AzureMLServiceLinkedServiceTypeProperties")); } if (mlWorkspaceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property mlWorkspaceName in model AzureMLServiceLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property mlWorkspaceName in model AzureMLServiceLinkedServiceTypeProperties")); } if (servicePrincipalKey() != null) { servicePrincipalKey().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLUpdateResourceActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLUpdateResourceActivityTypeProperties.java index 24c502340580b..c1302aaba1a88 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLUpdateResourceActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLUpdateResourceActivityTypeProperties.java @@ -15,8 +15,7 @@ @Fluent public final class AzureMLUpdateResourceActivityTypeProperties { /* - * Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with - * resultType string). + * Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). */ @JsonProperty(value = "trainedModelName", required = true) private Object trainedModelName; @@ -28,8 +27,7 @@ public final class AzureMLUpdateResourceActivityTypeProperties { private LinkedServiceReference trainedModelLinkedServiceName; /* - * The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the - * update operation. Type: string (or Expression with resultType string). + * The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). */ @JsonProperty(value = "trainedModelFilePath", required = true) private Object trainedModelFilePath; @@ -116,18 +114,21 @@ public AzureMLUpdateResourceActivityTypeProperties withTrainedModelFilePath(Obje */ public void validate() { if (trainedModelName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property trainedModelName in model AzureMLUpdateResourceActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property trainedModelName in model AzureMLUpdateResourceActivityTypeProperties")); } if (trainedModelLinkedServiceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property trainedModelLinkedServiceName in model AzureMLUpdateResourceActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property trainedModelLinkedServiceName in model AzureMLUpdateResourceActivityTypeProperties")); } else { trainedModelLinkedServiceName().validate(); } if (trainedModelFilePath() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property trainedModelFilePath in model AzureMLUpdateResourceActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property trainedModelFilePath in model AzureMLUpdateResourceActivityTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMariaDBLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMariaDBLinkedServiceTypeProperties.java index 4b30085cd8eb7..ac170f579b292 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMariaDBLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMariaDBLinkedServiceTypeProperties.java @@ -26,8 +26,7 @@ public final class AzureMariaDBLinkedServiceTypeProperties { private AzureKeyVaultSecretReference pwd; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -81,8 +80,8 @@ public AzureMariaDBLinkedServiceTypeProperties withPwd(AzureKeyVaultSecretRefere } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -91,8 +90,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureMariaDBLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMySqlLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMySqlLinkedServiceTypeProperties.java index 6e76cde187a52..10e4695fbd0b3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMySqlLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMySqlLinkedServiceTypeProperties.java @@ -27,8 +27,7 @@ public final class AzureMySqlLinkedServiceTypeProperties { private AzureKeyVaultSecretReference password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -82,8 +81,8 @@ public AzureMySqlLinkedServiceTypeProperties withPassword(AzureKeyVaultSecretRef } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -92,8 +91,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureMySqlLinkedServiceTypeProperties object itself. @@ -110,8 +109,9 @@ public AzureMySqlLinkedServiceTypeProperties withEncryptedCredential(String encr */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model AzureMySqlLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model AzureMySqlLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlLinkedServiceTypeProperties.java index ac6abf63cf48b..d4a6a2368f4c2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlLinkedServiceTypeProperties.java @@ -26,8 +26,7 @@ public final class AzurePostgreSqlLinkedServiceTypeProperties { private AzureKeyVaultSecretReference password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -81,8 +80,8 @@ public AzurePostgreSqlLinkedServiceTypeProperties withPassword(AzureKeyVaultSecr } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -91,8 +90,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzurePostgreSqlLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlTableDatasetTypeProperties.java index 972ec84c71f1a..8fde167599b4d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlTableDatasetTypeProperties.java @@ -13,8 +13,7 @@ @Fluent public final class AzurePostgreSqlTableDatasetTypeProperties { /* - * The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or - * Expression with resultType string). + * The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tableName") private Object tableName; @@ -38,8 +37,8 @@ public AzurePostgreSqlTableDatasetTypeProperties() { } /** - * Get the tableName property: The table name of the Azure PostgreSQL database which includes both schema and - * table. Type: string (or Expression with resultType string). + * Get the tableName property: The table name of the Azure PostgreSQL database which includes both schema and table. + * Type: string (or Expression with resultType string). * * @return the tableName value. */ @@ -48,8 +47,8 @@ public Object tableName() { } /** - * Set the tableName property: The table name of the Azure PostgreSQL database which includes both schema and - * table. Type: string (or Expression with resultType string). + * Set the tableName property: The table name of the Azure PostgreSQL database which includes both schema and table. + * Type: string (or Expression with resultType string). * * @param tableName the tableName value to set. * @return the AzurePostgreSqlTableDatasetTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchIndexDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchIndexDatasetTypeProperties.java index c62eaafdb04a3..68dd8c122ea00 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchIndexDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchIndexDatasetTypeProperties.java @@ -54,8 +54,9 @@ public AzureSearchIndexDatasetTypeProperties withIndexName(Object indexName) { */ public void validate() { if (indexName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property indexName in model AzureSearchIndexDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property indexName in model AzureSearchIndexDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchLinkedServiceTypeProperties.java index 8006ad348d442..e08819f675b9a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchLinkedServiceTypeProperties.java @@ -27,8 +27,7 @@ public final class AzureSearchLinkedServiceTypeProperties { private SecretBase key; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -80,8 +79,8 @@ public AzureSearchLinkedServiceTypeProperties withKey(SecretBase key) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -90,8 +89,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureSearchLinkedServiceTypeProperties object itself. @@ -108,8 +107,9 @@ public AzureSearchLinkedServiceTypeProperties withEncryptedCredential(String enc */ public void validate() { if (url() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property url in model AzureSearchLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property url in model AzureSearchLinkedServiceTypeProperties")); } if (key() != null) { key().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDWLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDWLinkedServiceTypeProperties.java index a79bea3b2c7ac..d787c832c3c05 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDWLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDWLinkedServiceTypeProperties.java @@ -17,8 +17,7 @@ @Fluent public final class AzureSqlDWLinkedServiceTypeProperties { /* - * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or - * AzureKeyVaultSecretReference. + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ @JsonProperty(value = "connectionString", required = true) private Object connectionString; @@ -30,8 +29,7 @@ public final class AzureSqlDWLinkedServiceTypeProperties { private AzureKeyVaultSecretReference password; /* - * The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or - * Expression with resultType string). + * The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; @@ -43,23 +41,19 @@ public final class AzureSqlDWLinkedServiceTypeProperties { private SecretBase servicePrincipalKey; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, - * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or - * Expression with resultType string). + * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -185,9 +179,9 @@ public AzureSqlDWLinkedServiceTypeProperties withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -196,9 +190,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the AzureSqlDWLinkedServiceTypeProperties object itself. @@ -209,8 +203,8 @@ public AzureSqlDWLinkedServiceTypeProperties withAzureCloudType(Object azureClou } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -219,8 +213,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureSqlDWLinkedServiceTypeProperties object itself. @@ -257,8 +251,9 @@ public AzureSqlDWLinkedServiceTypeProperties withCredential(CredentialReference */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model AzureSqlDWLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model AzureSqlDWLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDatabaseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDatabaseLinkedServiceTypeProperties.java index d0509d0ff808c..c6322e696f4a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDatabaseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDatabaseLinkedServiceTypeProperties.java @@ -30,8 +30,7 @@ public final class AzureSqlDatabaseLinkedServiceTypeProperties { private AzureKeyVaultSecretReference password; /* - * The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression - * with resultType string). + * The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; @@ -43,23 +42,19 @@ public final class AzureSqlDatabaseLinkedServiceTypeProperties { private SecretBase servicePrincipalKey; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, - * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or - * Expression with resultType string). + * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -191,9 +186,9 @@ public AzureSqlDatabaseLinkedServiceTypeProperties withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -202,9 +197,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the AzureSqlDatabaseLinkedServiceTypeProperties object itself. @@ -215,8 +210,8 @@ public AzureSqlDatabaseLinkedServiceTypeProperties withAzureCloudType(Object azu } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -225,8 +220,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureSqlDatabaseLinkedServiceTypeProperties object itself. @@ -284,8 +279,9 @@ public AzureSqlDatabaseLinkedServiceTypeProperties withCredential(CredentialRefe */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model AzureSqlDatabaseLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model AzureSqlDatabaseLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlMILinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlMILinkedServiceTypeProperties.java index d54bfa9f7b717..dca9bee932a9f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlMILinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlMILinkedServiceTypeProperties.java @@ -30,8 +30,7 @@ public final class AzureSqlMILinkedServiceTypeProperties { private AzureKeyVaultSecretReference password; /* - * The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or - * Expression with resultType string). + * The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; @@ -43,23 +42,19 @@ public final class AzureSqlMILinkedServiceTypeProperties { private SecretBase servicePrincipalKey; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, - * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or - * Expression with resultType string). + * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -191,9 +186,9 @@ public AzureSqlMILinkedServiceTypeProperties withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -202,9 +197,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the AzureSqlMILinkedServiceTypeProperties object itself. @@ -215,8 +210,8 @@ public AzureSqlMILinkedServiceTypeProperties withAzureCloudType(Object azureClou } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -225,8 +220,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureSqlMILinkedServiceTypeProperties object itself. @@ -284,8 +279,9 @@ public AzureSqlMILinkedServiceTypeProperties withCredential(CredentialReference */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model AzureSqlMILinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model AzureSqlMILinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureStorageLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureStorageLinkedServiceTypeProperties.java index 52456b95168ed..193dded6820f7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureStorageLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureStorageLinkedServiceTypeProperties.java @@ -14,8 +14,7 @@ @Fluent public final class AzureStorageLinkedServiceTypeProperties { /* - * The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or - * AzureKeyVaultSecretReference. + * The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */ @JsonProperty(value = "connectionString") private Object connectionString; @@ -27,8 +26,7 @@ public final class AzureStorageLinkedServiceTypeProperties { private AzureKeyVaultSecretReference accountKey; /* - * SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, - * SecureString or AzureKeyVaultSecretReference. + * SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */ @JsonProperty(value = "sasUri") private Object sasUri; @@ -40,8 +38,7 @@ public final class AzureStorageLinkedServiceTypeProperties { private AzureKeyVaultSecretReference sasToken; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -137,8 +134,8 @@ public AzureStorageLinkedServiceTypeProperties withSasToken(AzureKeyVaultSecretR } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -147,8 +144,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureStorageLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSynapseArtifactsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSynapseArtifactsLinkedServiceTypeProperties.java index f77f8b4b8c7c2..89cd3f01c9005 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSynapseArtifactsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSynapseArtifactsLinkedServiceTypeProperties.java @@ -14,24 +14,19 @@ @Fluent public final class AzureSynapseArtifactsLinkedServiceTypeProperties { /* - * https://.dev.azuresynapse.net, Azure Synapse Analytics workspace URL. Type: string (or Expression - * with resultType string). + * https://.dev.azuresynapse.net, Azure Synapse Analytics workspace URL. Type: string (or Expression with resultType string). */ @JsonProperty(value = "endpoint", required = true) private Object endpoint; /* - * Required to specify MSI, if using system assigned managed identity as authentication method. Type: string (or - * Expression with resultType string). + * Required to specify MSI, if using system assigned managed identity as authentication method. Type: string (or Expression with resultType string). */ @JsonProperty(value = "authentication") private Object authentication; /* - * The resource ID of the Synapse workspace. The format should be: - * /subscriptions/{subscriptionID}/resourceGroups/{resourceGroup}/providers/Microsoft.Synapse/workspaces/{ - * workspaceName}. - * Type: string (or Expression with resultType string). + * The resource ID of the Synapse workspace. The format should be: /subscriptions/{subscriptionID}/resourceGroups/{resourceGroup}/providers/Microsoft.Synapse/workspaces/{workspaceName}. Type: string (or Expression with resultType string). */ @JsonProperty(value = "workspaceResourceId") private Object workspaceResourceId; @@ -117,8 +112,9 @@ public AzureSynapseArtifactsLinkedServiceTypeProperties withWorkspaceResourceId( */ public void validate() { if (endpoint() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property endpoint in model AzureSynapseArtifactsLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property endpoint in model AzureSynapseArtifactsLinkedServiceTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureTableDatasetTypeProperties.java index 7a43513475f45..0a8c11f4be1ae 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureTableDatasetTypeProperties.java @@ -54,8 +54,9 @@ public AzureTableDatasetTypeProperties withTableName(Object tableName) { */ public void validate() { if (tableName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property tableName in model AzureTableDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property tableName in model AzureTableDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BinaryDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BinaryDatasetTypeProperties.java index 02d4dccaed0d6..93216164bc521 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BinaryDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BinaryDatasetTypeProperties.java @@ -80,8 +80,9 @@ public BinaryDatasetTypeProperties withCompression(DatasetCompression compressio */ public void validate() { if (location() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property location in model BinaryDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property location in model BinaryDatasetTypeProperties")); } else { location().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobEventsTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobEventsTriggerTypeProperties.java index fc96526696215..2fcbc195e0ed2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobEventsTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobEventsTriggerTypeProperties.java @@ -16,17 +16,13 @@ @Fluent public final class BlobEventsTriggerTypeProperties { /* - * The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' - * will only fire the trigger for blobs in the december folder under the records container. At least one of these - * must be provided: blobPathBeginsWith, blobPathEndsWith. + * The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. */ @JsonProperty(value = "blobPathBeginsWith") private String blobPathBeginsWith; /* - * The blob path must end with the pattern provided for trigger to fire. For example, 'december/boxes.csv' will - * only fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: - * blobPathBeginsWith, blobPathEndsWith. + * The blob path must end with the pattern provided for trigger to fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. */ @JsonProperty(value = "blobPathEndsWith") private String blobPathEndsWith; @@ -57,8 +53,8 @@ public BlobEventsTriggerTypeProperties() { /** * Get the blobPathBeginsWith property: The blob path must begin with the pattern provided for trigger to fire. For - * example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the - * records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + * example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records + * container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. * * @return the blobPathBeginsWith value. */ @@ -68,8 +64,8 @@ public String blobPathBeginsWith() { /** * Set the blobPathBeginsWith property: The blob path must begin with the pattern provided for trigger to fire. For - * example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the - * records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + * example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records + * container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. * * @param blobPathBeginsWith the blobPathBeginsWith value to set. * @return the BlobEventsTriggerTypeProperties object itself. @@ -81,8 +77,8 @@ public BlobEventsTriggerTypeProperties withBlobPathBeginsWith(String blobPathBeg /** * Get the blobPathEndsWith property: The blob path must end with the pattern provided for trigger to fire. For - * example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least - * one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + * example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one + * of these must be provided: blobPathBeginsWith, blobPathEndsWith. * * @return the blobPathEndsWith value. */ @@ -92,8 +88,8 @@ public String blobPathEndsWith() { /** * Set the blobPathEndsWith property: The blob path must end with the pattern provided for trigger to fire. For - * example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least - * one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + * example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one + * of these must be provided: blobPathBeginsWith, blobPathEndsWith. * * @param blobPathEndsWith the blobPathEndsWith value to set. * @return the BlobEventsTriggerTypeProperties object itself. @@ -170,12 +166,14 @@ public BlobEventsTriggerTypeProperties withScope(String scope) { */ public void validate() { if (events() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property events in model BlobEventsTriggerTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property events in model BlobEventsTriggerTypeProperties")); } if (scope() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property scope in model BlobEventsTriggerTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property scope in model BlobEventsTriggerTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobTriggerTypeProperties.java index b7b88f5ce4c6b..7d20ea0e4341d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobTriggerTypeProperties.java @@ -105,12 +105,14 @@ public BlobTriggerTypeProperties withLinkedService(LinkedServiceReference linked */ public void validate() { if (folderPath() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property folderPath in model BlobTriggerTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property folderPath in model BlobTriggerTypeProperties")); } if (linkedService() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property linkedService in model BlobTriggerTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property linkedService in model BlobTriggerTypeProperties")); } else { linkedService().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraLinkedServiceTypeProperties.java index 4eeae3107299c..3c0d7d3049a86 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraLinkedServiceTypeProperties.java @@ -45,8 +45,7 @@ public final class CassandraLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -160,8 +159,8 @@ public CassandraLinkedServiceTypeProperties withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -170,8 +169,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the CassandraLinkedServiceTypeProperties object itself. @@ -188,8 +187,9 @@ public CassandraLinkedServiceTypeProperties withEncryptedCredential(String encry */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model CassandraLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model CassandraLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraTableDatasetTypeProperties.java index fb5ba6ae7936f..05b3de503311e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraTableDatasetTypeProperties.java @@ -31,8 +31,8 @@ public CassandraTableDatasetTypeProperties() { } /** - * Get the tableName property: The table name of the Cassandra database. Type: string (or Expression with - * resultType string). + * Get the tableName property: The table name of the Cassandra database. Type: string (or Expression with resultType + * string). * * @return the tableName value. */ @@ -41,8 +41,8 @@ public Object tableName() { } /** - * Set the tableName property: The table name of the Cassandra database. Type: string (or Expression with - * resultType string). + * Set the tableName property: The table name of the Cassandra database. Type: string (or Expression with resultType + * string). * * @param tableName the tableName value to set. * @return the CassandraTableDatasetTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChainingTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChainingTriggerTypeProperties.java index 51fa268c2c7f3..6d951463be731 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChainingTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChainingTriggerTypeProperties.java @@ -80,14 +80,16 @@ public ChainingTriggerTypeProperties withRunDimension(String runDimension) { */ public void validate() { if (dependsOn() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property dependsOn in model ChainingTriggerTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property dependsOn in model ChainingTriggerTypeProperties")); } else { dependsOn().forEach(e -> e.validate()); } if (runDimension() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property runDimension in model ChainingTriggerTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property runDimension in model ChainingTriggerTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCapture.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCapture.java index 46da029d6a4a7..edc86279eb792 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCapture.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCapture.java @@ -14,8 +14,8 @@ import java.util.List; /** - * A Azure Data Factory object which automatically detects data changes at the source and then sends the updated data - * to the destination. + * A Azure Data Factory object which automatically detects data changes at the source and then sends the updated data to + * the destination. */ @Fluent public final class ChangeDataCapture { @@ -217,20 +217,22 @@ public void validate() { folder().validate(); } if (sourceConnectionsInfo() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property sourceConnectionsInfo in model ChangeDataCapture")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property sourceConnectionsInfo in model ChangeDataCapture")); } else { sourceConnectionsInfo().forEach(e -> e.validate()); } if (targetConnectionsInfo() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property targetConnectionsInfo in model ChangeDataCapture")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property targetConnectionsInfo in model ChangeDataCapture")); } else { targetConnectionsInfo().forEach(e -> e.validate()); } if (policy() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property policy in model ChangeDataCapture")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property policy in model ChangeDataCapture")); } else { policy().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCaptureResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCaptureResourceInner.java index e10b2ca942e5d..587867b6cd17e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCaptureResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCaptureResourceInner.java @@ -304,8 +304,9 @@ public ChangeDataCaptureResourceInner withStatus(String status) { */ public void validate() { if (innerProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerProperties in model ChangeDataCaptureResourceInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerProperties in model ChangeDataCaptureResourceInner")); } else { innerProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CmdkeySetupTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CmdkeySetupTypeProperties.java index 2f4e7838d76e9..9995266145c9a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CmdkeySetupTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CmdkeySetupTypeProperties.java @@ -105,16 +105,19 @@ public CmdkeySetupTypeProperties withPassword(SecretBase password) { */ public void validate() { if (targetName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property targetName in model CmdkeySetupTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property targetName in model CmdkeySetupTypeProperties")); } if (username() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property username in model CmdkeySetupTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property username in model CmdkeySetupTypeProperties")); } if (password() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property password in model CmdkeySetupTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property password in model CmdkeySetupTypeProperties")); } else { password().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsEntityDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsEntityDatasetTypeProperties.java index a33e70b25e3d6..b03b39d080855 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsEntityDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsEntityDatasetTypeProperties.java @@ -25,8 +25,7 @@ public CommonDataServiceForAppsEntityDatasetTypeProperties() { } /** - * Get the entityName property: The logical name of the entity. Type: string (or Expression with resultType - * string). + * Get the entityName property: The logical name of the entity. Type: string (or Expression with resultType string). * * @return the entityName value. */ @@ -35,8 +34,7 @@ public Object entityName() { } /** - * Set the entityName property: The logical name of the entity. Type: string (or Expression with resultType - * string). + * Set the entityName property: The logical name of the entity. Type: string (or Expression with resultType string). * * @param entityName the entityName value to set. * @return the CommonDataServiceForAppsEntityDatasetTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsLinkedServiceTypeProperties.java index 9fe39f8e71d44..06e8c761350da 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsLinkedServiceTypeProperties.java @@ -15,53 +15,43 @@ @Fluent public final class CommonDataServiceForAppsLinkedServiceTypeProperties { /* - * The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps - * Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or - * Expression with resultType string). + * The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType string). */ @JsonProperty(value = "deploymentType", required = true) private Object deploymentType; /* - * The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and - * not allowed for online. Type: string (or Expression with resultType string). + * The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */ @JsonProperty(value = "hostName") private Object hostname; /* - * The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not - * allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. + * The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */ @JsonProperty(value = "port") private Object port; /* - * The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not - * allowed for on-prem. Type: string (or Expression with resultType string). + * The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */ @JsonProperty(value = "serviceUri") private Object serviceUri; /* - * The organization name of the Common Data Service for Apps instance. The property is required for on-prem and - * required for online when there are more than one Common Data Service for Apps instances associated with the - * user. Type: string (or Expression with resultType string). + * The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). */ @JsonProperty(value = "organizationName") private Object organizationName; /* - * The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, - * 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online - * scenario. Type: string (or Expression with resultType string). + * The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */ @JsonProperty(value = "authenticationType", required = true) private Object authenticationType; /* - * User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType - * string). + * User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). */ @JsonProperty(value = "username") private Object username; @@ -73,31 +63,25 @@ public final class CommonDataServiceForAppsLinkedServiceTypeProperties { private SecretBase password; /* - * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: - * string (or Expression with resultType string). + * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* - * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for - * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). + * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* - * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is - * 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be - * AzureKeyVaultSecretReference. + * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -313,9 +297,9 @@ public CommonDataServiceForAppsLinkedServiceTypeProperties withServicePrincipalI } /** - * Get the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Get the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @return the servicePrincipalCredentialType value. */ @@ -324,9 +308,9 @@ public Object servicePrincipalCredentialType() { } /** - * Set the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Set the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @param servicePrincipalCredentialType the servicePrincipalCredentialType value to set. * @return the CommonDataServiceForAppsLinkedServiceTypeProperties object itself. @@ -365,8 +349,8 @@ public SecretBase servicePrincipalCredential() { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -375,8 +359,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the CommonDataServiceForAppsLinkedServiceTypeProperties object itself. @@ -393,12 +377,14 @@ public CommonDataServiceForAppsLinkedServiceTypeProperties withEncryptedCredenti */ public void validate() { if (deploymentType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property deploymentType in model CommonDataServiceForAppsLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property deploymentType in model CommonDataServiceForAppsLinkedServiceTypeProperties")); } if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model CommonDataServiceForAppsLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model CommonDataServiceForAppsLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ConcurLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ConcurLinkedServiceTypeProperties.java index aeebc90fc0cec..813c0faba488d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ConcurLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ConcurLinkedServiceTypeProperties.java @@ -15,8 +15,7 @@ @Fluent public final class ConcurLinkedServiceTypeProperties { /* - * Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. - * Type: object. + * Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. Type: object. */ @JsonProperty(value = "connectionProperties") private Object connectionProperties; @@ -46,8 +45,7 @@ public final class ConcurLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; @@ -59,8 +57,7 @@ public final class ConcurLinkedServiceTypeProperties { private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -198,8 +195,8 @@ public ConcurLinkedServiceTypeProperties withUseHostVerification(Object useHostV } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -208,8 +205,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the ConcurLinkedServiceTypeProperties object itself. @@ -220,8 +217,8 @@ public ConcurLinkedServiceTypeProperties withUsePeerVerification(Object usePeerV } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -230,8 +227,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ConcurLinkedServiceTypeProperties object itself. @@ -248,12 +245,14 @@ public ConcurLinkedServiceTypeProperties withEncryptedCredential(String encrypte */ public void validate() { if (clientId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property clientId in model ConcurLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property clientId in model ConcurLinkedServiceTypeProperties")); } if (username() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property username in model ConcurLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property username in model ConcurLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CopyActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CopyActivityTypeProperties.java index 079ca73baa8eb..462a9b2341901 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CopyActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CopyActivityTypeProperties.java @@ -40,8 +40,7 @@ public final class CopyActivityTypeProperties { private Object translator; /* - * Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with - * resultType boolean). + * Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "enableStaging") private Object enableStaging; @@ -53,15 +52,13 @@ public final class CopyActivityTypeProperties { private StagingSettings stagingSettings; /* - * Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: - * integer (or Expression with resultType integer), minimum: 0. + * Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. */ @JsonProperty(value = "parallelCopies") private Object parallelCopies; /* - * Maximum number of data integration units that can be used to perform this data movement. Type: integer (or - * Expression with resultType integer), minimum: 0. + * Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. */ @JsonProperty(value = "dataIntegrationUnits") private Object dataIntegrationUnits; @@ -442,14 +439,16 @@ public CopyActivityTypeProperties withSkipErrorFile(SkipErrorFile skipErrorFile) */ public void validate() { if (source() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property source in model CopyActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property source in model CopyActivityTypeProperties")); } else { source().validate(); } if (sink() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property sink in model CopyActivityTypeProperties")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property sink in model CopyActivityTypeProperties")); } else { sink().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbLinkedServiceTypeProperties.java index 05c44b79f879a..77f6f90e8c8ef 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbLinkedServiceTypeProperties.java @@ -40,39 +40,31 @@ public final class CosmosDbLinkedServiceTypeProperties { private SecretBase accountKey; /* - * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: - * string (or Expression with resultType string). + * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* - * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for - * key/secret, 'ServicePrincipalCert' for certificate. Type: string. + * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string. */ @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* - * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is - * 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be - * AzureKeyVaultSecretReference. + * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, - * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or - * Expression with resultType string). + * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ @JsonProperty(value = "azureCloudType") private Object azureCloudType; @@ -84,8 +76,7 @@ public final class CosmosDbLinkedServiceTypeProperties { private CosmosDbConnectionMode connectionMode; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -211,9 +202,8 @@ public CosmosDbLinkedServiceTypeProperties withServicePrincipalId(Object service } /** - * Get the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string. + * Get the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string. * * @return the servicePrincipalCredentialType value. */ @@ -222,9 +212,8 @@ public Object servicePrincipalCredentialType() { } /** - * Set the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string. + * Set the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string. * * @param servicePrincipalCredentialType the servicePrincipalCredentialType value to set. * @return the CosmosDbLinkedServiceTypeProperties object itself. @@ -284,9 +273,9 @@ public CosmosDbLinkedServiceTypeProperties withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -295,9 +284,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the CosmosDbLinkedServiceTypeProperties object itself. @@ -328,8 +317,8 @@ public CosmosDbLinkedServiceTypeProperties withConnectionMode(CosmosDbConnection } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -338,8 +327,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the CosmosDbLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiCollectionDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiCollectionDatasetTypeProperties.java index f245f3e6c052c..d20230c163176 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiCollectionDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiCollectionDatasetTypeProperties.java @@ -54,8 +54,9 @@ public CosmosDbMongoDbApiCollectionDatasetTypeProperties withCollection(Object c */ public void validate() { if (collection() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property collection in model CosmosDbMongoDbApiCollectionDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property collection in model CosmosDbMongoDbApiCollectionDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiLinkedServiceTypeProperties.java index f85c9b131d1ee..6be531c7657f6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiLinkedServiceTypeProperties.java @@ -14,22 +14,19 @@ @Fluent public final class CosmosDbMongoDbApiLinkedServiceTypeProperties { /* - * Whether the CosmosDB (MongoDB API) server version is higher than 3.2. The default value is false. Type: boolean - * (or Expression with resultType boolean). + * Whether the CosmosDB (MongoDB API) server version is higher than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "isServerVersionAbove32") private Object isServerVersionAbove32; /* - * The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: - * string, SecureString or AzureKeyVaultSecretReference. + * The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* - * The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with - * resultType string). + * The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with resultType string). */ @JsonProperty(value = "database", required = true) private Object database; @@ -113,12 +110,14 @@ public CosmosDbMongoDbApiLinkedServiceTypeProperties withDatabase(Object databas */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model CosmosDbMongoDbApiLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model CosmosDbMongoDbApiLinkedServiceTypeProperties")); } if (database() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property database in model CosmosDbMongoDbApiLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property database in model CosmosDbMongoDbApiLinkedServiceTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbSqlApiCollectionDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbSqlApiCollectionDatasetTypeProperties.java index 79e9fbea76d96..afa3515db0fce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbSqlApiCollectionDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbSqlApiCollectionDatasetTypeProperties.java @@ -54,8 +54,9 @@ public CosmosDbSqlApiCollectionDatasetTypeProperties withCollectionName(Object c */ public void validate() { if (collectionName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property collectionName in model CosmosDbSqlApiCollectionDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property collectionName in model CosmosDbSqlApiCollectionDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CouchbaseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CouchbaseLinkedServiceTypeProperties.java index 40a20f06aba49..789c819c001f8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CouchbaseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CouchbaseLinkedServiceTypeProperties.java @@ -26,8 +26,7 @@ public final class CouchbaseLinkedServiceTypeProperties { private AzureKeyVaultSecretReference credString; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -81,8 +80,8 @@ public CouchbaseLinkedServiceTypeProperties withCredString(AzureKeyVaultSecretRe } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -91,8 +90,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the CouchbaseLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CreateRunResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CreateRunResponseInner.java index 7ed0f420b919f..25dd6df109acf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CreateRunResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CreateRunResponseInner.java @@ -52,8 +52,8 @@ public CreateRunResponseInner withRunId(String runId) { */ public void validate() { if (runId() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property runId in model CreateRunResponseInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property runId in model CreateRunResponseInner")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIdentityCredentialResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CredentialResourceInner.java similarity index 67% rename from sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIdentityCredentialResourceInner.java rename to sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CredentialResourceInner.java index de9a9d43771f1..ea24d7597b3e4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIdentityCredentialResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CredentialResourceInner.java @@ -7,19 +7,19 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; import com.azure.core.util.logging.ClientLogger; -import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential; +import com.azure.resourcemanager.datafactory.models.Credential; import com.fasterxml.jackson.annotation.JsonProperty; /** * Credential resource type. */ @Fluent -public final class ManagedIdentityCredentialResourceInner extends SubResource { +public class CredentialResourceInner extends SubResource { /* - * Managed Identity Credential properties. + * Properties of credentials. */ @JsonProperty(value = "properties", required = true) - private ManagedIdentityCredential properties; + private Credential properties; /* * The resource name. @@ -40,27 +40,27 @@ public final class ManagedIdentityCredentialResourceInner extends SubResource { private String etag; /** - * Creates an instance of ManagedIdentityCredentialResourceInner class. + * Creates an instance of CredentialResourceInner class. */ - public ManagedIdentityCredentialResourceInner() { + public CredentialResourceInner() { } /** - * Get the properties property: Managed Identity Credential properties. + * Get the properties property: Properties of credentials. * * @return the properties value. */ - public ManagedIdentityCredential properties() { + public Credential properties() { return this.properties; } /** - * Set the properties property: Managed Identity Credential properties. + * Set the properties property: Properties of credentials. * * @param properties the properties value to set. - * @return the ManagedIdentityCredentialResourceInner object itself. + * @return the CredentialResourceInner object itself. */ - public ManagedIdentityCredentialResourceInner withProperties(ManagedIdentityCredential properties) { + public CredentialResourceInner withProperties(Credential properties) { this.properties = properties; return this; } @@ -96,7 +96,7 @@ public String etag() { * {@inheritDoc} */ @Override - public ManagedIdentityCredentialResourceInner withId(String id) { + public CredentialResourceInner withId(String id) { super.withId(id); return this; } @@ -108,12 +108,13 @@ public ManagedIdentityCredentialResourceInner withId(String id) { */ public void validate() { if (properties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property properties in model ManagedIdentityCredentialResourceInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model CredentialResourceInner")); } else { properties().validate(); } } - private static final ClientLogger LOGGER = new ClientLogger(ManagedIdentityCredentialResourceInner.class); + private static final ClientLogger LOGGER = new ClientLogger(CredentialResourceInner.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomActivityTypeProperties.java index a15b101a753d1..3ed29c6e6d6e5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomActivityTypeProperties.java @@ -42,23 +42,20 @@ public final class CustomActivityTypeProperties { private CustomActivityReferenceObject referenceObjects; /* - * User defined property bag. There is no restriction on the keys or values that can be used. The user specified - * custom activity has the full responsibility to consume and interpret the content defined. + * User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. */ @JsonProperty(value = "extendedProperties") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map extendedProperties; /* - * The retention time for the files submitted for custom activity. Type: double (or Expression with resultType - * double). + * The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). */ @JsonProperty(value = "retentionTimeInDays") private Object retentionTimeInDays; /* - * Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType - * double). + * Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType double). */ @JsonProperty(value = "autoUserSpecification") private Object autoUserSpecification; @@ -224,8 +221,9 @@ public CustomActivityTypeProperties withAutoUserSpecification(Object autoUserSpe */ public void validate() { if (command() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property command in model CustomActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property command in model CustomActivityTypeProperties")); } if (resourceLinkedService() != null) { resourceLinkedService().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomEventsTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomEventsTriggerTypeProperties.java index 123655b3d3fdb..0bd07611ac342 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomEventsTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomEventsTriggerTypeProperties.java @@ -15,15 +15,13 @@ @Fluent public final class CustomEventsTriggerTypeProperties { /* - * The event subject must begin with the pattern provided for trigger to fire. At least one of these must be - * provided: subjectBeginsWith, subjectEndsWith. + * The event subject must begin with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. */ @JsonProperty(value = "subjectBeginsWith") private String subjectBeginsWith; /* - * The event subject must end with the pattern provided for trigger to fire. At least one of these must be - * provided: subjectBeginsWith, subjectEndsWith. + * The event subject must end with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. */ @JsonProperty(value = "subjectEndsWith") private String subjectEndsWith; @@ -137,12 +135,14 @@ public CustomEventsTriggerTypeProperties withScope(String scope) { */ public void validate() { if (events() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property events in model CustomEventsTriggerTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property events in model CustomEventsTriggerTypeProperties")); } if (scope() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property scope in model CustomEventsTriggerTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property scope in model CustomEventsTriggerTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowResourceInner.java index a211cd876aac5..ceeadc91ac6a8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowResourceInner.java @@ -108,8 +108,9 @@ public DataFlowResourceInner withId(String id) { */ public void validate() { if (properties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property properties in model DataFlowResourceInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model DataFlowResourceInner")); } else { properties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataLakeAnalyticsUsqlActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataLakeAnalyticsUsqlActivityTypeProperties.java index afef6b97669ea..55945f7ffbff4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataLakeAnalyticsUsqlActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataLakeAnalyticsUsqlActivityTypeProperties.java @@ -17,8 +17,7 @@ @Fluent public final class DataLakeAnalyticsUsqlActivityTypeProperties { /* - * Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType - * string). + * Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). */ @JsonProperty(value = "scriptPath", required = true) private Object scriptPath; @@ -30,15 +29,13 @@ public final class DataLakeAnalyticsUsqlActivityTypeProperties { private LinkedServiceReference scriptLinkedService; /* - * The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression - * with resultType integer), minimum: 1. + * The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. */ @JsonProperty(value = "degreeOfParallelism") private Object degreeOfParallelism; /* - * Determines which jobs out of all that are queued should be selected to run first. The lower the number, the - * higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. + * Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. */ @JsonProperty(value = "priority") private Object priority; @@ -57,8 +54,7 @@ public final class DataLakeAnalyticsUsqlActivityTypeProperties { private Object runtimeVersion; /* - * Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or - * Expression with resultType string). + * Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or Expression with resultType string). */ @JsonProperty(value = "compilationMode") private Object compilationMode; @@ -229,12 +225,14 @@ public DataLakeAnalyticsUsqlActivityTypeProperties withCompilationMode(Object co */ public void validate() { if (scriptPath() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property scriptPath in model DataLakeAnalyticsUsqlActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property scriptPath in model DataLakeAnalyticsUsqlActivityTypeProperties")); } if (scriptLinkedService() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property scriptLinkedService in model DataLakeAnalyticsUsqlActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property scriptLinkedService in model DataLakeAnalyticsUsqlActivityTypeProperties")); } else { scriptLinkedService().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksNotebookActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksNotebookActivityTypeProperties.java index 6e49cd2a0b1fe..18f0434733dd2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksNotebookActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksNotebookActivityTypeProperties.java @@ -17,15 +17,13 @@ @Fluent public final class DatabricksNotebookActivityTypeProperties { /* - * The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. - * Type: string (or Expression with resultType string). + * The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). */ @JsonProperty(value = "notebookPath", required = true) private Object notebookPath; /* - * Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the - * default value from the notebook will be used. + * Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. */ @JsonProperty(value = "baseParameters") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) @@ -44,8 +42,8 @@ public DatabricksNotebookActivityTypeProperties() { } /** - * Get the notebookPath property: The absolute path of the notebook to be run in the Databricks Workspace. This - * path must begin with a slash. Type: string (or Expression with resultType string). + * Get the notebookPath property: The absolute path of the notebook to be run in the Databricks Workspace. This path + * must begin with a slash. Type: string (or Expression with resultType string). * * @return the notebookPath value. */ @@ -54,8 +52,8 @@ public Object notebookPath() { } /** - * Set the notebookPath property: The absolute path of the notebook to be run in the Databricks Workspace. This - * path must begin with a slash. Type: string (or Expression with resultType string). + * Set the notebookPath property: The absolute path of the notebook to be run in the Databricks Workspace. This path + * must begin with a slash. Type: string (or Expression with resultType string). * * @param notebookPath the notebookPath value to set. * @return the DatabricksNotebookActivityTypeProperties object itself. @@ -114,8 +112,9 @@ public DatabricksNotebookActivityTypeProperties withLibraries(List exposureControlResponses() { */ public void validate() { if (exposureControlResponses() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property exposureControlResponses in model ExposureControlBatchResponseInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property exposureControlResponses in model ExposureControlBatchResponseInner")); } else { exposureControlResponses().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FailActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FailActivityTypeProperties.java index dd7be62a4a4a1..d10fd3b1d76ab 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FailActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FailActivityTypeProperties.java @@ -14,15 +14,13 @@ @Fluent public final class FailActivityTypeProperties { /* - * The error message that surfaced in the Fail activity. It can be dynamic content that's evaluated to a non - * empty/blank string at runtime. Type: string (or Expression with resultType string). + * The error message that surfaced in the Fail activity. It can be dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or Expression with resultType string). */ @JsonProperty(value = "message", required = true) private Object message; /* - * The error code that categorizes the error type of the Fail activity. It can be dynamic content that's evaluated - * to a non empty/blank string at runtime. Type: string (or Expression with resultType string). + * The error code that categorizes the error type of the Fail activity. It can be dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or Expression with resultType string). */ @JsonProperty(value = "errorCode", required = true) private Object errorCode; @@ -86,12 +84,14 @@ public FailActivityTypeProperties withErrorCode(Object errorCode) { */ public void validate() { if (message() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property message in model FailActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property message in model FailActivityTypeProperties")); } if (errorCode() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property errorCode in model FailActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property errorCode in model FailActivityTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileServerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileServerLinkedServiceTypeProperties.java index df31ff9abf30c..5e683c9f8b62d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileServerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileServerLinkedServiceTypeProperties.java @@ -33,8 +33,7 @@ public final class FileServerLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -106,8 +105,8 @@ public FileServerLinkedServiceTypeProperties withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -116,8 +115,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the FileServerLinkedServiceTypeProperties object itself. @@ -134,8 +133,9 @@ public FileServerLinkedServiceTypeProperties withEncryptedCredential(String encr */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model FileServerLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model FileServerLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileShareDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileShareDatasetTypeProperties.java index 800ef91e6a84c..36f0bc2309c6c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileShareDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileShareDatasetTypeProperties.java @@ -45,8 +45,7 @@ public final class FileShareDatasetTypeProperties { private DatasetStorageFormat format; /* - * Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string - * (or Expression with resultType string). + * Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileFilter") private Object fileFilter; @@ -64,8 +63,8 @@ public FileShareDatasetTypeProperties() { } /** - * Get the folderPath property: The path of the on-premises file system. Type: string (or Expression with - * resultType string). + * Get the folderPath property: The path of the on-premises file system. Type: string (or Expression with resultType + * string). * * @return the folderPath value. */ @@ -74,8 +73,8 @@ public Object folderPath() { } /** - * Set the folderPath property: The path of the on-premises file system. Type: string (or Expression with - * resultType string). + * Set the folderPath property: The path of the on-premises file system. Type: string (or Expression with resultType + * string). * * @param folderPath the folderPath value to set. * @return the FileShareDatasetTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FilterActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FilterActivityTypeProperties.java index 4a94bb286e4dc..687079add0b14 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FilterActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FilterActivityTypeProperties.java @@ -79,14 +79,16 @@ public FilterActivityTypeProperties withCondition(Expression condition) { */ public void validate() { if (items() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property items in model FilterActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property items in model FilterActivityTypeProperties")); } else { items().validate(); } if (condition() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property condition in model FilterActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property condition in model FilterActivityTypeProperties")); } else { condition().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ForEachActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ForEachActivityTypeProperties.java index e2f9dddea3257..0311ed0a8c65a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ForEachActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ForEachActivityTypeProperties.java @@ -135,14 +135,16 @@ public ForEachActivityTypeProperties withActivities(List activities) { */ public void validate() { if (items() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property items in model ForEachActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property items in model ForEachActivityTypeProperties")); } else { items().validate(); } if (activities() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property activities in model ForEachActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property activities in model ForEachActivityTypeProperties")); } else { activities().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FtpServerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FtpServerLinkedServiceTypeProperties.java index 978e7f308cd70..d356423bb70d2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FtpServerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FtpServerLinkedServiceTypeProperties.java @@ -22,8 +22,7 @@ public final class FtpServerLinkedServiceTypeProperties { private Object host; /* - * The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: - * integer (or Expression with resultType integer), minimum: 0. + * The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. */ @JsonProperty(value = "port") private Object port; @@ -47,22 +46,19 @@ public final class FtpServerLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* - * If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression - * with resultType boolean). + * If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "enableSsl") private Object enableSsl; /* - * If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: - * boolean (or Expression with resultType boolean). + * If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "enableServerCertificateValidation") private Object enableServerCertificateValidation; @@ -136,8 +132,7 @@ public FtpServerLinkedServiceTypeProperties withAuthenticationType(FtpAuthentica } /** - * Get the username property: Username to logon the FTP server. Type: string (or Expression with resultType - * string). + * Get the username property: Username to logon the FTP server. Type: string (or Expression with resultType string). * * @return the username value. */ @@ -146,8 +141,7 @@ public Object username() { } /** - * Set the username property: Username to logon the FTP server. Type: string (or Expression with resultType - * string). + * Set the username property: Username to logon the FTP server. Type: string (or Expression with resultType string). * * @param username the username value to set. * @return the FtpServerLinkedServiceTypeProperties object itself. @@ -178,8 +172,8 @@ public FtpServerLinkedServiceTypeProperties withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -188,8 +182,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the FtpServerLinkedServiceTypeProperties object itself. @@ -200,8 +194,8 @@ public FtpServerLinkedServiceTypeProperties withEncryptedCredential(String encry } /** - * Get the enableSsl property: If true, connect to the FTP server over SSL/TLS channel. Default value is true. - * Type: boolean (or Expression with resultType boolean). + * Get the enableSsl property: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: + * boolean (or Expression with resultType boolean). * * @return the enableSsl value. */ @@ -210,8 +204,8 @@ public Object enableSsl() { } /** - * Set the enableSsl property: If true, connect to the FTP server over SSL/TLS channel. Default value is true. - * Type: boolean (or Expression with resultType boolean). + * Set the enableSsl property: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: + * boolean (or Expression with resultType boolean). * * @param enableSsl the enableSsl value to set. * @return the FtpServerLinkedServiceTypeProperties object itself. @@ -222,8 +216,8 @@ public FtpServerLinkedServiceTypeProperties withEnableSsl(Object enableSsl) { } /** - * Get the enableServerCertificateValidation property: If true, validate the FTP server SSL certificate when - * connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). + * Get the enableServerCertificateValidation property: If true, validate the FTP server SSL certificate when connect + * over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). * * @return the enableServerCertificateValidation value. */ @@ -232,8 +226,8 @@ public Object enableServerCertificateValidation() { } /** - * Set the enableServerCertificateValidation property: If true, validate the FTP server SSL certificate when - * connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). + * Set the enableServerCertificateValidation property: If true, validate the FTP server SSL certificate when connect + * over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). * * @param enableServerCertificateValidation the enableServerCertificateValidation value to set. * @return the FtpServerLinkedServiceTypeProperties object itself. @@ -251,8 +245,9 @@ public Object enableServerCertificateValidation() { */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model FtpServerLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model FtpServerLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GetMetadataActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GetMetadataActivityTypeProperties.java index ae4612bf38cc9..0ee85a73e38a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GetMetadataActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GetMetadataActivityTypeProperties.java @@ -134,8 +134,9 @@ public GetMetadataActivityTypeProperties withFormatSettings(FormatReadSettings f */ public void validate() { if (dataset() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property dataset in model GetMetadataActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property dataset in model GetMetadataActivityTypeProperties")); } else { dataset().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GlobalParameterResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GlobalParameterResourceInner.java index bf11d26468d03..8bffe96175ce3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GlobalParameterResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GlobalParameterResourceInner.java @@ -111,8 +111,9 @@ public GlobalParameterResourceInner withId(String id) { */ public void validate() { if (properties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property properties in model GlobalParameterResourceInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model GlobalParameterResourceInner")); } else { properties().values().forEach(e -> { if (e != null) { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleAdWordsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleAdWordsLinkedServiceTypeProperties.java index 6b177bb67190e..4cc02c8a8032f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleAdWordsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleAdWordsLinkedServiceTypeProperties.java @@ -15,15 +15,13 @@ @Fluent public final class GoogleAdWordsLinkedServiceTypeProperties { /* - * (Deprecated) Properties used to connect to GoogleAds. It is mutually exclusive with any other properties in the - * linked service. Type: object. + * (Deprecated) Properties used to connect to GoogleAds. It is mutually exclusive with any other properties in the linked service. Type: object. */ @JsonProperty(value = "connectionProperties") private Object connectionProperties; /* - * The Client customer ID of the AdWords account that you want to fetch report data for. Type: string (or - * Expression with resultType string). + * The Client customer ID of the AdWords account that you want to fetch report data for. Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientCustomerID") private Object clientCustomerId; @@ -35,8 +33,7 @@ public final class GoogleAdWordsLinkedServiceTypeProperties { private SecretBase developerToken; /* - * The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on - * self-hosted IR. + * The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. */ @JsonProperty(value = "authenticationType") private GoogleAdWordsAuthenticationType authenticationType; @@ -48,8 +45,7 @@ public final class GoogleAdWordsLinkedServiceTypeProperties { private SecretBase refreshToken; /* - * The client id of the google application used to acquire the refresh token. Type: string (or Expression with - * resultType string). + * The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientId") private Object clientId; @@ -61,67 +57,55 @@ public final class GoogleAdWordsLinkedServiceTypeProperties { private SecretBase clientSecret; /* - * The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - * Type: string (or Expression with resultType string). + * The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. Type: string (or Expression with resultType string). */ @JsonProperty(value = "email") private Object email; /* - * (Deprecated) The full path to the .p12 key file that is used to authenticate the service account email address - * and can only be used on self-hosted IR. Type: string (or Expression with resultType string). + * (Deprecated) The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. Type: string (or Expression with resultType string). */ @JsonProperty(value = "keyFilePath") private Object keyFilePath; /* - * (Deprecated) The full path of the .pem file containing trusted CA certificates for verifying the server when - * connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the - * cacerts.pem file installed with the IR. Type: string (or Expression with resultType string). + * (Deprecated) The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. Type: string (or Expression with resultType string). */ @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* - * (Deprecated) Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. - * The default value is false. Type: boolean (or Expression with resultType boolean). + * (Deprecated) Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* - * The private key that is used to authenticate the service account email address and can only be used on - * self-hosted IR. + * The private key that is used to authenticate the service account email address and can only be used on self-hosted IR. */ @JsonProperty(value = "privateKey") private SecretBase privateKey; /* - * The customer ID of the Google Ads Manager account through which you want to fetch report data of specific - * Customer. Type: string (or Expression with resultType string). + * The customer ID of the Google Ads Manager account through which you want to fetch report data of specific Customer. Type: string (or Expression with resultType string). */ @JsonProperty(value = "loginCustomerID") private Object loginCustomerId; /* - * The Google Ads API major version such as v14. The supported major versions could be found on - * https://developers.google.com/google-ads/api/docs/release-notes. Type: string (or Expression with resultType - * string). + * The Google Ads API major version such as v14. The supported major versions could be found on https://developers.google.com/google-ads/api/docs/release-notes. Type: string (or Expression with resultType string). */ @JsonProperty(value = "googleAdsApiVersion") private Object googleAdsApiVersion; /* - * Specifies whether to use the legacy data type mappings, which maps float, int32 and int64 from Google to string. - * Do not set this to true unless you want to keep backward compatibility with legacy driver's data type mappings. - * Type: boolean (or Expression with resultType boolean). + * Specifies whether to use the legacy data type mappings, which maps float, int32 and int64 from Google to string. Do not set this to true unless you want to keep backward compatibility with legacy driver's data type mappings. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "supportLegacyDataTypes") private Object supportLegacyDataTypes; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -309,8 +293,8 @@ public GoogleAdWordsLinkedServiceTypeProperties withEmail(Object email) { /** * Get the keyFilePath property: (Deprecated) The full path to the .p12 key file that is used to authenticate the - * service account email address and can only be used on self-hosted IR. Type: string (or Expression with - * resultType string). + * service account email address and can only be used on self-hosted IR. Type: string (or Expression with resultType + * string). * * @return the keyFilePath value. */ @@ -320,8 +304,8 @@ public Object keyFilePath() { /** * Set the keyFilePath property: (Deprecated) The full path to the .p12 key file that is used to authenticate the - * service account email address and can only be used on self-hosted IR. Type: string (or Expression with - * resultType string). + * service account email address and can only be used on self-hosted IR. Type: string (or Expression with resultType + * string). * * @param keyFilePath the keyFilePath value to set. * @return the GoogleAdWordsLinkedServiceTypeProperties object itself. @@ -474,8 +458,8 @@ public GoogleAdWordsLinkedServiceTypeProperties withSupportLegacyDataTypes(Objec } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -484,8 +468,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the GoogleAdWordsLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryLinkedServiceTypeProperties.java index 68935cff7ae68..d3db4723de70e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryLinkedServiceTypeProperties.java @@ -22,23 +22,19 @@ public final class GoogleBigQueryLinkedServiceTypeProperties { private Object project; /* - * A comma-separated list of public BigQuery projects to access. Type: string (or Expression with resultType - * string). + * A comma-separated list of public BigQuery projects to access. Type: string (or Expression with resultType string). */ @JsonProperty(value = "additionalProjects") private Object additionalProjects; /* - * Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables - * that combine BigQuery data with data from Google Drive. The default value is false. Type: string (or Expression - * with resultType string). + * Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. Type: string (or Expression with resultType string). */ @JsonProperty(value = "requestGoogleDriveScope") private Object requestGoogleDriveScope; /* - * The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on - * self-hosted IR. + * The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. */ @JsonProperty(value = "authenticationType", required = true) private GoogleBigQueryAuthenticationType authenticationType; @@ -50,8 +46,7 @@ public final class GoogleBigQueryLinkedServiceTypeProperties { private SecretBase refreshToken; /* - * The client id of the google application used to acquire the refresh token. Type: string (or Expression with - * resultType string). + * The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientId") private Object clientId; @@ -63,37 +58,31 @@ public final class GoogleBigQueryLinkedServiceTypeProperties { private SecretBase clientSecret; /* - * The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - * Type: string (or Expression with resultType string). + * The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. Type: string (or Expression with resultType string). */ @JsonProperty(value = "email") private Object email; /* - * The full path to the .p12 key file that is used to authenticate the service account email address and can only - * be used on self-hosted IR. Type: string (or Expression with resultType string). + * The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. Type: string (or Expression with resultType string). */ @JsonProperty(value = "keyFilePath") private Object keyFilePath; /* - * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over - * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file - * installed with the IR. Type: string (or Expression with resultType string). + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. Type: string (or Expression with resultType string). */ @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* - * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default - * value is false.Type: boolean (or Expression with resultType boolean). + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false.Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -149,9 +138,9 @@ public GoogleBigQueryLinkedServiceTypeProperties withAdditionalProjects(Object a } /** - * Get the requestGoogleDriveScope property: Whether to request access to Google Drive. Allowing Google Drive - * access enables support for federated tables that combine BigQuery data with data from Google Drive. The default - * value is false. Type: string (or Expression with resultType string). + * Get the requestGoogleDriveScope property: Whether to request access to Google Drive. Allowing Google Drive access + * enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is + * false. Type: string (or Expression with resultType string). * * @return the requestGoogleDriveScope value. */ @@ -160,9 +149,9 @@ public Object requestGoogleDriveScope() { } /** - * Set the requestGoogleDriveScope property: Whether to request access to Google Drive. Allowing Google Drive - * access enables support for federated tables that combine BigQuery data with data from Google Drive. The default - * value is false. Type: string (or Expression with resultType string). + * Set the requestGoogleDriveScope property: Whether to request access to Google Drive. Allowing Google Drive access + * enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is + * false. Type: string (or Expression with resultType string). * * @param requestGoogleDriveScope the requestGoogleDriveScope value to set. * @return the GoogleBigQueryLinkedServiceTypeProperties object itself. @@ -282,9 +271,8 @@ public GoogleBigQueryLinkedServiceTypeProperties withEmail(Object email) { } /** - * Get the keyFilePath property: The full path to the .p12 key file that is used to authenticate the service - * account email address and can only be used on self-hosted IR. Type: string (or Expression with resultType - * string). + * Get the keyFilePath property: The full path to the .p12 key file that is used to authenticate the service account + * email address and can only be used on self-hosted IR. Type: string (or Expression with resultType string). * * @return the keyFilePath value. */ @@ -293,9 +281,8 @@ public Object keyFilePath() { } /** - * Set the keyFilePath property: The full path to the .p12 key file that is used to authenticate the service - * account email address and can only be used on self-hosted IR. Type: string (or Expression with resultType - * string). + * Set the keyFilePath property: The full path to the .p12 key file that is used to authenticate the service account + * email address and can only be used on self-hosted IR. Type: string (or Expression with resultType string). * * @param keyFilePath the keyFilePath value to set. * @return the GoogleBigQueryLinkedServiceTypeProperties object itself. @@ -306,10 +293,9 @@ public GoogleBigQueryLinkedServiceTypeProperties withKeyFilePath(Object keyFileP } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. Type: string (or Expression with resultType - * string). + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. Type: string (or Expression with resultType string). * * @return the trustedCertPath value. */ @@ -318,10 +304,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. Type: string (or Expression with resultType - * string). + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. Type: string (or Expression with resultType string). * * @param trustedCertPath the trustedCertPath value to set. * @return the GoogleBigQueryLinkedServiceTypeProperties object itself. @@ -354,8 +339,8 @@ public GoogleBigQueryLinkedServiceTypeProperties withUseSystemTrustStore(Object } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -364,8 +349,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the GoogleBigQueryLinkedServiceTypeProperties object itself. @@ -382,12 +367,14 @@ public GoogleBigQueryLinkedServiceTypeProperties withEncryptedCredential(String */ public void validate() { if (project() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property project in model GoogleBigQueryLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property project in model GoogleBigQueryLinkedServiceTypeProperties")); } if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model GoogleBigQueryLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model GoogleBigQueryLinkedServiceTypeProperties")); } if (refreshToken() != null) { refreshToken().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryV2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryV2LinkedServiceTypeProperties.java index a06e475520281..236dbe4a51615 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryV2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryV2LinkedServiceTypeProperties.java @@ -28,8 +28,7 @@ public final class GoogleBigQueryV2LinkedServiceTypeProperties { private GoogleBigQueryV2AuthenticationType authenticationType; /* - * The client id of the google application used to acquire the refresh token. Type: string (or Expression with - * resultType string). + * The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientId") private Object clientId; @@ -47,15 +46,13 @@ public final class GoogleBigQueryV2LinkedServiceTypeProperties { private SecretBase refreshToken; /* - * The content of the .json key file that is used to authenticate the service account. Type: string (or Expression - * with resultType string). + * The content of the .json key file that is used to authenticate the service account. Type: string (or Expression with resultType string). */ @JsonProperty(value = "keyFileContent") private SecretBase keyFileContent; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -196,8 +193,8 @@ public GoogleBigQueryV2LinkedServiceTypeProperties withKeyFileContent(SecretBase } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -206,8 +203,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the GoogleBigQueryV2LinkedServiceTypeProperties object itself. @@ -224,12 +221,14 @@ public GoogleBigQueryV2LinkedServiceTypeProperties withEncryptedCredential(Strin */ public void validate() { if (projectId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property projectId in model GoogleBigQueryV2LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property projectId in model GoogleBigQueryV2LinkedServiceTypeProperties")); } if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model GoogleBigQueryV2LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model GoogleBigQueryV2LinkedServiceTypeProperties")); } if (clientSecret() != null) { clientSecret().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleCloudStorageLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleCloudStorageLinkedServiceTypeProperties.java index 6be2cc934923f..863b800348edf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleCloudStorageLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleCloudStorageLinkedServiceTypeProperties.java @@ -14,8 +14,7 @@ @Fluent public final class GoogleCloudStorageLinkedServiceTypeProperties { /* - * The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string - * (or Expression with resultType string). + * The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */ @JsonProperty(value = "accessKeyId") private Object accessKeyId; @@ -27,16 +26,13 @@ public final class GoogleCloudStorageLinkedServiceTypeProperties { private SecretBase secretAccessKey; /* - * This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional - * property; change it only if you want to try a different service endpoint or want to switch between https and - * http. Type: string (or Expression with resultType string). + * This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */ @JsonProperty(value = "serviceUrl") private Object serviceUrl; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -92,9 +88,9 @@ public GoogleCloudStorageLinkedServiceTypeProperties withSecretAccessKey(SecretB } /** - * Get the serviceUrl property: This value specifies the endpoint to access with the Google Cloud Storage - * Connector. This is an optional property; change it only if you want to try a different service endpoint or want - * to switch between https and http. Type: string (or Expression with resultType string). + * Get the serviceUrl property: This value specifies the endpoint to access with the Google Cloud Storage Connector. + * This is an optional property; change it only if you want to try a different service endpoint or want to switch + * between https and http. Type: string (or Expression with resultType string). * * @return the serviceUrl value. */ @@ -103,9 +99,9 @@ public Object serviceUrl() { } /** - * Set the serviceUrl property: This value specifies the endpoint to access with the Google Cloud Storage - * Connector. This is an optional property; change it only if you want to try a different service endpoint or want - * to switch between https and http. Type: string (or Expression with resultType string). + * Set the serviceUrl property: This value specifies the endpoint to access with the Google Cloud Storage Connector. + * This is an optional property; change it only if you want to try a different service endpoint or want to switch + * between https and http. Type: string (or Expression with resultType string). * * @param serviceUrl the serviceUrl value to set. * @return the GoogleCloudStorageLinkedServiceTypeProperties object itself. @@ -116,8 +112,8 @@ public GoogleCloudStorageLinkedServiceTypeProperties withServiceUrl(Object servi } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -126,8 +122,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the GoogleCloudStorageLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleSheetsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleSheetsLinkedServiceTypeProperties.java index 76b04d92e7ed2..1ac9a9ff7bdf3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleSheetsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleSheetsLinkedServiceTypeProperties.java @@ -21,8 +21,7 @@ public final class GoogleSheetsLinkedServiceTypeProperties { private SecretBase apiToken; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -54,8 +53,8 @@ public GoogleSheetsLinkedServiceTypeProperties withApiToken(SecretBase apiToken) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -64,8 +63,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the GoogleSheetsLinkedServiceTypeProperties object itself. @@ -82,8 +81,9 @@ public GoogleSheetsLinkedServiceTypeProperties withEncryptedCredential(String en */ public void validate() { if (apiToken() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property apiToken in model GoogleSheetsLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property apiToken in model GoogleSheetsLinkedServiceTypeProperties")); } else { apiToken().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GreenplumLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GreenplumLinkedServiceTypeProperties.java index a64dd6af6ea2c..5f1ca292c8ae7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GreenplumLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GreenplumLinkedServiceTypeProperties.java @@ -26,8 +26,7 @@ public final class GreenplumLinkedServiceTypeProperties { private AzureKeyVaultSecretReference pwd; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -81,8 +80,8 @@ public GreenplumLinkedServiceTypeProperties withPwd(AzureKeyVaultSecretReference } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -91,8 +90,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the GreenplumLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HBaseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HBaseLinkedServiceTypeProperties.java index 3869f200f9829..f825663ef3975 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HBaseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HBaseLinkedServiceTypeProperties.java @@ -58,16 +58,13 @@ public final class HBaseLinkedServiceTypeProperties { private Object enableSsl; /* - * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over - * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file - * installed with the IR. + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* - * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when - * connecting over SSL. The default value is false. + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ @JsonProperty(value = "allowHostNameCNMismatch") private Object allowHostnameCNMismatch; @@ -79,8 +76,7 @@ public final class HBaseLinkedServiceTypeProperties { private Object allowSelfSignedServerCert; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -238,9 +234,9 @@ public HBaseLinkedServiceTypeProperties withEnableSsl(Object enableSsl) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @return the trustedCertPath value. */ @@ -249,9 +245,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @param trustedCertPath the trustedCertPath value to set. * @return the HBaseLinkedServiceTypeProperties object itself. @@ -306,8 +302,8 @@ public HBaseLinkedServiceTypeProperties withAllowSelfSignedServerCert(Object all } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -316,8 +312,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HBaseLinkedServiceTypeProperties object itself. @@ -334,12 +330,14 @@ public HBaseLinkedServiceTypeProperties withEncryptedCredential(String encrypted */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model HBaseLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model HBaseLinkedServiceTypeProperties")); } if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model HBaseLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model HBaseLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightHiveActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightHiveActivityTypeProperties.java index 8d70b5260c221..34d1dc368b61b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightHiveActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightHiveActivityTypeProperties.java @@ -62,8 +62,7 @@ public final class HDInsightHiveActivityTypeProperties { private Map variables; /* - * Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security - * Package) + * Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package) */ @JsonProperty(value = "queryTimeout") private Integer queryTimeout; @@ -216,8 +215,8 @@ public HDInsightHiveActivityTypeProperties withVariables(Map var } /** - * Get the queryTimeout property: Query timeout value (in minutes). Effective when the HDInsight cluster is with - * ESP (Enterprise Security Package). + * Get the queryTimeout property: Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP + * (Enterprise Security Package). * * @return the queryTimeout value. */ @@ -226,8 +225,8 @@ public Integer queryTimeout() { } /** - * Set the queryTimeout property: Query timeout value (in minutes). Effective when the HDInsight cluster is with - * ESP (Enterprise Security Package). + * Set the queryTimeout property: Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP + * (Enterprise Security Package). * * @param queryTimeout the queryTimeout value to set. * @return the HDInsightHiveActivityTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightLinkedServiceTypeProperties.java index f9b8b51f5d0ae..54287e12b58c0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightLinkedServiceTypeProperties.java @@ -46,8 +46,7 @@ public final class HDInsightLinkedServiceTypeProperties { private LinkedServiceReference hcatalogLinkedServiceName; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -59,8 +58,7 @@ public final class HDInsightLinkedServiceTypeProperties { private Object isEspEnabled; /* - * Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with - * resultType string). + * Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileSystem") private Object fileSystem; @@ -175,8 +173,8 @@ public LinkedServiceReference hcatalogLinkedServiceName() { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -185,8 +183,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HDInsightLinkedServiceTypeProperties object itself. @@ -247,8 +245,9 @@ public HDInsightLinkedServiceTypeProperties withFileSystem(Object fileSystem) { */ public void validate() { if (clusterUri() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property clusterUri in model HDInsightLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property clusterUri in model HDInsightLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightMapReduceActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightMapReduceActivityTypeProperties.java index a5f12e7247003..bbdb732e75a6d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightMapReduceActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightMapReduceActivityTypeProperties.java @@ -244,12 +244,14 @@ public void validate() { storageLinkedServices().forEach(e -> e.validate()); } if (className() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property className in model HDInsightMapReduceActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property className in model HDInsightMapReduceActivityTypeProperties")); } if (jarFilePath() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property jarFilePath in model HDInsightMapReduceActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property jarFilePath in model HDInsightMapReduceActivityTypeProperties")); } if (jarLinkedService() != null) { jarLinkedService().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightOnDemandLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightOnDemandLinkedServiceTypeProperties.java index e62141d6d751f..0dc1cb0c7691b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightOnDemandLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightOnDemandLinkedServiceTypeProperties.java @@ -25,9 +25,7 @@ public final class HDInsightOnDemandLinkedServiceTypeProperties { private Object clusterSize; /* - * The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster - * stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum - * value is 5 mins. Type: string (or Expression with resultType string). + * The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). */ @JsonProperty(value = "timeToLive", required = true) private Object timeToLive; @@ -75,8 +73,7 @@ public final class HDInsightOnDemandLinkedServiceTypeProperties { private Object clusterResourceGroup; /* - * The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType - * string). + * The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType string). */ @JsonProperty(value = "clusterNamePrefix") private Object clusterNamePrefix; @@ -94,8 +91,7 @@ public final class HDInsightOnDemandLinkedServiceTypeProperties { private SecretBase clusterPassword; /* - * The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType - * string). + * The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). */ @JsonProperty(value = "clusterSshUserName") private Object clusterSshUsername; @@ -107,15 +103,13 @@ public final class HDInsightOnDemandLinkedServiceTypeProperties { private SecretBase clusterSshPassword; /* - * Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can - * register them on your behalf. + * Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can register them on your behalf. */ @JsonProperty(value = "additionalLinkedServiceNames") private List additionalLinkedServiceNames; /* - * The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is - * created by using the Azure SQL database as the metastore. + * The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database as the metastore. */ @JsonProperty(value = "hcatalogLinkedServiceName") private LinkedServiceReference hcatalogLinkedServiceName; @@ -181,8 +175,7 @@ public final class HDInsightOnDemandLinkedServiceTypeProperties { private Object yarnConfiguration; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -206,23 +199,19 @@ public final class HDInsightOnDemandLinkedServiceTypeProperties { private Object zookeeperNodeSize; /* - * Custom script actions to run on HDI ondemand cluster once it's up. Please refer to - * https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure% - * 2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + * Custom script actions to run on HDI ondemand cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. */ @JsonProperty(value = "scriptActions") private List scriptActions; /* - * The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or - * Expression with resultType string). + * The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). */ @JsonProperty(value = "virtualNetworkId") private Object virtualNetworkId; /* - * The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is - * required. Type: string (or Expression with resultType string). + * The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). */ @JsonProperty(value = "subnetName") private Object subnetName; @@ -286,8 +275,7 @@ public HDInsightOnDemandLinkedServiceTypeProperties withTimeToLive(Object timeTo } /** - * Get the version property: Version of the HDInsight cluster.  Type: string (or Expression with resultType - * string). + * Get the version property: Version of the HDInsight cluster.  Type: string (or Expression with resultType string). * * @return the version value. */ @@ -296,8 +284,7 @@ public Object version() { } /** - * Set the version property: Version of the HDInsight cluster.  Type: string (or Expression with resultType - * string). + * Set the version property: Version of the HDInsight cluster.  Type: string (or Expression with resultType string). * * @param version the version value to set. * @return the HDInsightOnDemandLinkedServiceTypeProperties object itself. @@ -331,8 +318,8 @@ public LinkedServiceReference linkedServiceName() { } /** - * Get the hostSubscriptionId property: The customer’s subscription to host the cluster. Type: string (or - * Expression with resultType string). + * Get the hostSubscriptionId property: The customer’s subscription to host the cluster. Type: string (or Expression + * with resultType string). * * @return the hostSubscriptionId value. */ @@ -341,8 +328,8 @@ public Object hostSubscriptionId() { } /** - * Set the hostSubscriptionId property: The customer’s subscription to host the cluster. Type: string (or - * Expression with resultType string). + * Set the hostSubscriptionId property: The customer’s subscription to host the cluster. Type: string (or Expression + * with resultType string). * * @param hostSubscriptionId the hostSubscriptionId value to set. * @return the HDInsightOnDemandLinkedServiceTypeProperties object itself. @@ -461,8 +448,8 @@ public HDInsightOnDemandLinkedServiceTypeProperties withClusterNamePrefix(Object } /** - * Get the clusterUsername property: The username to access the cluster. Type: string (or Expression with - * resultType string). + * Get the clusterUsername property: The username to access the cluster. Type: string (or Expression with resultType + * string). * * @return the clusterUsername value. */ @@ -471,8 +458,8 @@ public Object clusterUsername() { } /** - * Set the clusterUsername property: The username to access the cluster. Type: string (or Expression with - * resultType string). + * Set the clusterUsername property: The username to access the cluster. Type: string (or Expression with resultType + * string). * * @param clusterUsername the clusterUsername value to set. * @return the HDInsightOnDemandLinkedServiceTypeProperties object itself. @@ -677,8 +664,8 @@ public HDInsightOnDemandLinkedServiceTypeProperties withHBaseConfiguration(Objec } /** - * Get the hdfsConfiguration property: Specifies the HDFS configuration parameters (hdfs-site.xml) for the - * HDInsight cluster. + * Get the hdfsConfiguration property: Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight + * cluster. * * @return the hdfsConfiguration value. */ @@ -687,8 +674,8 @@ public Object hdfsConfiguration() { } /** - * Set the hdfsConfiguration property: Specifies the HDFS configuration parameters (hdfs-site.xml) for the - * HDInsight cluster. + * Set the hdfsConfiguration property: Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight + * cluster. * * @param hdfsConfiguration the hdfsConfiguration value to set. * @return the HDInsightOnDemandLinkedServiceTypeProperties object itself. @@ -699,8 +686,8 @@ public HDInsightOnDemandLinkedServiceTypeProperties withHdfsConfiguration(Object } /** - * Get the hiveConfiguration property: Specifies the hive configuration parameters (hive-site.xml) for the - * HDInsight cluster. + * Get the hiveConfiguration property: Specifies the hive configuration parameters (hive-site.xml) for the HDInsight + * cluster. * * @return the hiveConfiguration value. */ @@ -709,8 +696,8 @@ public Object hiveConfiguration() { } /** - * Set the hiveConfiguration property: Specifies the hive configuration parameters (hive-site.xml) for the - * HDInsight cluster. + * Set the hiveConfiguration property: Specifies the hive configuration parameters (hive-site.xml) for the HDInsight + * cluster. * * @param hiveConfiguration the hiveConfiguration value to set. * @return the HDInsightOnDemandLinkedServiceTypeProperties object itself. @@ -787,8 +774,8 @@ public HDInsightOnDemandLinkedServiceTypeProperties withStormConfiguration(Objec } /** - * Get the yarnConfiguration property: Specifies the Yarn configuration parameters (yarn-site.xml) for the - * HDInsight cluster. + * Get the yarnConfiguration property: Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight + * cluster. * * @return the yarnConfiguration value. */ @@ -797,8 +784,8 @@ public Object yarnConfiguration() { } /** - * Set the yarnConfiguration property: Specifies the Yarn configuration parameters (yarn-site.xml) for the - * HDInsight cluster. + * Set the yarnConfiguration property: Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight + * cluster. * * @param yarnConfiguration the yarnConfiguration value to set. * @return the HDInsightOnDemandLinkedServiceTypeProperties object itself. @@ -809,8 +796,8 @@ public HDInsightOnDemandLinkedServiceTypeProperties withYarnConfiguration(Object } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -819,8 +806,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HDInsightOnDemandLinkedServiceTypeProperties object itself. @@ -985,37 +972,44 @@ public HDInsightOnDemandLinkedServiceTypeProperties withCredential(CredentialRef */ public void validate() { if (clusterSize() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property clusterSize in model HDInsightOnDemandLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property clusterSize in model HDInsightOnDemandLinkedServiceTypeProperties")); } if (timeToLive() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property timeToLive in model HDInsightOnDemandLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property timeToLive in model HDInsightOnDemandLinkedServiceTypeProperties")); } if (version() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property version in model HDInsightOnDemandLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property version in model HDInsightOnDemandLinkedServiceTypeProperties")); } if (linkedServiceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property linkedServiceName in model HDInsightOnDemandLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property linkedServiceName in model HDInsightOnDemandLinkedServiceTypeProperties")); } else { linkedServiceName().validate(); } if (hostSubscriptionId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property hostSubscriptionId in model HDInsightOnDemandLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property hostSubscriptionId in model HDInsightOnDemandLinkedServiceTypeProperties")); } if (servicePrincipalKey() != null) { servicePrincipalKey().validate(); } if (tenant() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property tenant in model HDInsightOnDemandLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property tenant in model HDInsightOnDemandLinkedServiceTypeProperties")); } if (clusterResourceGroup() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property clusterResourceGroup in model HDInsightOnDemandLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property clusterResourceGroup in model HDInsightOnDemandLinkedServiceTypeProperties")); } if (clusterPassword() != null) { clusterPassword().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightSparkActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightSparkActivityTypeProperties.java index 1dda57cd6683b..7847f97ba6f8b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightSparkActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightSparkActivityTypeProperties.java @@ -19,15 +19,13 @@ @Fluent public final class HDInsightSparkActivityTypeProperties { /* - * The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType - * string). + * The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). */ @JsonProperty(value = "rootPath", required = true) private Object rootPath; /* - * The relative path to the root folder of the code/package to be executed. Type: string (or Expression with - * resultType string). + * The relative path to the root folder of the code/package to be executed. Type: string (or Expression with resultType string). */ @JsonProperty(value = "entryFilePath", required = true) private Object entryFilePath; @@ -160,8 +158,8 @@ public HDInsightSparkActivityTypeProperties withGetDebugInfo(HDInsightActivityDe } /** - * Get the sparkJobLinkedService property: The storage linked service for uploading the entry file and - * dependencies, and for receiving logs. + * Get the sparkJobLinkedService property: The storage linked service for uploading the entry file and dependencies, + * and for receiving logs. * * @return the sparkJobLinkedService value. */ @@ -170,8 +168,8 @@ public LinkedServiceReference sparkJobLinkedService() { } /** - * Set the sparkJobLinkedService property: The storage linked service for uploading the entry file and - * dependencies, and for receiving logs. + * Set the sparkJobLinkedService property: The storage linked service for uploading the entry file and dependencies, + * and for receiving logs. * * @param sparkJobLinkedService the sparkJobLinkedService value to set. * @return the HDInsightSparkActivityTypeProperties object itself. @@ -251,12 +249,14 @@ public HDInsightSparkActivityTypeProperties withSparkConfig(Map */ public void validate() { if (rootPath() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property rootPath in model HDInsightSparkActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property rootPath in model HDInsightSparkActivityTypeProperties")); } if (entryFilePath() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property entryFilePath in model HDInsightSparkActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property entryFilePath in model HDInsightSparkActivityTypeProperties")); } if (sparkJobLinkedService() != null) { sparkJobLinkedService().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightStreamingActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightStreamingActivityTypeProperties.java index 859d8e118d536..4ae5b90bd27fb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightStreamingActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightStreamingActivityTypeProperties.java @@ -348,24 +348,29 @@ public void validate() { storageLinkedServices().forEach(e -> e.validate()); } if (mapper() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property mapper in model HDInsightStreamingActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property mapper in model HDInsightStreamingActivityTypeProperties")); } if (reducer() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property reducer in model HDInsightStreamingActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property reducer in model HDInsightStreamingActivityTypeProperties")); } if (input() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property input in model HDInsightStreamingActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property input in model HDInsightStreamingActivityTypeProperties")); } if (output() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property output in model HDInsightStreamingActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property output in model HDInsightStreamingActivityTypeProperties")); } if (filePaths() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property filePaths in model HDInsightStreamingActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property filePaths in model HDInsightStreamingActivityTypeProperties")); } if (fileLinkedService() != null) { fileLinkedService().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HdfsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HdfsLinkedServiceTypeProperties.java index 91a6954f336cc..a725029b9c40b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HdfsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HdfsLinkedServiceTypeProperties.java @@ -15,22 +15,19 @@ @Fluent public final class HdfsLinkedServiceTypeProperties { /* - * The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with - * resultType string). + * The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). */ @JsonProperty(value = "url", required = true) private Object url; /* - * Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or - * Expression with resultType string). + * Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). */ @JsonProperty(value = "authenticationType") private Object authenticationType; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -98,8 +95,8 @@ public HdfsLinkedServiceTypeProperties withAuthenticationType(Object authenticat } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -108,8 +105,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HdfsLinkedServiceTypeProperties object itself. @@ -168,8 +165,9 @@ public HdfsLinkedServiceTypeProperties withPassword(SecretBase password) { */ public void validate() { if (url() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property url in model HdfsLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property url in model HdfsLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HiveLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HiveLinkedServiceTypeProperties.java index 844e0dd81241b..fbc46f35d2365 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HiveLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HiveLinkedServiceTypeProperties.java @@ -18,8 +18,7 @@ @Fluent public final class HiveLinkedServiceTypeProperties { /* - * IP address or host name of the Hive server, separated by ';' for multiple hosts (only when serviceDiscoveryMode - * is enable). + * IP address or host name of the Hive server, separated by ';' for multiple hosts (only when serviceDiscoveryMode is enable). */ @JsonProperty(value = "host", required = true) private Object host; @@ -91,23 +90,19 @@ public final class HiveLinkedServiceTypeProperties { private Object enableSsl; /* - * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over - * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file - * installed with the IR. + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* - * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default - * value is false. + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* - * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when - * connecting over SSL. The default value is false. + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ @JsonProperty(value = "allowHostNameCNMismatch") private Object allowHostnameCNMismatch; @@ -119,8 +114,7 @@ public final class HiveLinkedServiceTypeProperties { private Object allowSelfSignedServerCert; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -132,8 +126,8 @@ public HiveLinkedServiceTypeProperties() { } /** - * Get the host property: IP address or host name of the Hive server, separated by ';' for multiple hosts (only - * when serviceDiscoveryMode is enable). + * Get the host property: IP address or host name of the Hive server, separated by ';' for multiple hosts (only when + * serviceDiscoveryMode is enable). * * @return the host value. */ @@ -142,8 +136,8 @@ public Object host() { } /** - * Set the host property: IP address or host name of the Hive server, separated by ';' for multiple hosts (only - * when serviceDiscoveryMode is enable). + * Set the host property: IP address or host name of the Hive server, separated by ';' for multiple hosts (only when + * serviceDiscoveryMode is enable). * * @param host the host value to set. * @return the HiveLinkedServiceTypeProperties object itself. @@ -275,8 +269,8 @@ public HiveLinkedServiceTypeProperties withZooKeeperNameSpace(Object zooKeeperNa } /** - * Get the useNativeQuery property: Specifies whether the driver uses native HiveQL queries,or converts them into - * an equivalent form in HiveQL. + * Get the useNativeQuery property: Specifies whether the driver uses native HiveQL queries,or converts them into an + * equivalent form in HiveQL. * * @return the useNativeQuery value. */ @@ -285,8 +279,8 @@ public Object useNativeQuery() { } /** - * Set the useNativeQuery property: Specifies whether the driver uses native HiveQL queries,or converts them into - * an equivalent form in HiveQL. + * Set the useNativeQuery property: Specifies whether the driver uses native HiveQL queries,or converts them into an + * equivalent form in HiveQL. * * @param useNativeQuery the useNativeQuery value to set. * @return the HiveLinkedServiceTypeProperties object itself. @@ -379,9 +373,9 @@ public HiveLinkedServiceTypeProperties withEnableSsl(Object enableSsl) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @return the trustedCertPath value. */ @@ -390,9 +384,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @param trustedCertPath the trustedCertPath value to set. * @return the HiveLinkedServiceTypeProperties object itself. @@ -469,8 +463,8 @@ public HiveLinkedServiceTypeProperties withAllowSelfSignedServerCert(Object allo } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -479,8 +473,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HiveLinkedServiceTypeProperties object itself. @@ -497,12 +491,14 @@ public HiveLinkedServiceTypeProperties withEncryptedCredential(String encryptedC */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model HiveLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model HiveLinkedServiceTypeProperties")); } if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model HiveLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model HiveLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpDatasetTypeProperties.java index 436e7cf4b99aa..9fc2295ca617c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpDatasetTypeProperties.java @@ -15,8 +15,7 @@ @Fluent public final class HttpDatasetTypeProperties { /* - * The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression - * with resultType string). + * The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). */ @JsonProperty(value = "relativeUrl") private Object relativeUrl; @@ -82,8 +81,8 @@ public HttpDatasetTypeProperties withRelativeUrl(Object relativeUrl) { } /** - * Get the requestMethod property: The HTTP method for the HTTP request. Type: string (or Expression with - * resultType string). + * Get the requestMethod property: The HTTP method for the HTTP request. Type: string (or Expression with resultType + * string). * * @return the requestMethod value. */ @@ -92,8 +91,8 @@ public Object requestMethod() { } /** - * Set the requestMethod property: The HTTP method for the HTTP request. Type: string (or Expression with - * resultType string). + * Set the requestMethod property: The HTTP method for the HTTP request. Type: string (or Expression with resultType + * string). * * @param requestMethod the requestMethod value to set. * @return the HttpDatasetTypeProperties object itself. @@ -104,8 +103,7 @@ public HttpDatasetTypeProperties withRequestMethod(Object requestMethod) { } /** - * Get the requestBody property: The body for the HTTP request. Type: string (or Expression with resultType - * string). + * Get the requestBody property: The body for the HTTP request. Type: string (or Expression with resultType string). * * @return the requestBody value. */ @@ -114,8 +112,7 @@ public Object requestBody() { } /** - * Set the requestBody property: The body for the HTTP request. Type: string (or Expression with resultType - * string). + * Set the requestBody property: The body for the HTTP request. Type: string (or Expression with resultType string). * * @param requestBody the requestBody value to set. * @return the HttpDatasetTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpLinkedServiceTypeProperties.java index f6e200ba5ba6c..63acd4d566a57 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpLinkedServiceTypeProperties.java @@ -16,8 +16,7 @@ @Fluent public final class HttpLinkedServiceTypeProperties { /* - * The base URL of the HTTP endpoint, e.g. https://www.microsoft.com. Type: string (or Expression with resultType - * string). + * The base URL of the HTTP endpoint, e.g. https://www.microsoft.com. Type: string (or Expression with resultType string). */ @JsonProperty(value = "url", required = true) private Object url; @@ -41,38 +40,31 @@ public final class HttpLinkedServiceTypeProperties { private SecretBase password; /* - * The additional HTTP headers in the request to RESTful API used for authorization. Type: key value pairs (value - * should be string type). + * The additional HTTP headers in the request to RESTful API used for authorization. Type: key value pairs (value should be string type). */ @JsonProperty(value = "authHeaders") private Object authHeaders; /* - * Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with - * ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: - * string (or Expression with resultType string). + * Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). */ @JsonProperty(value = "embeddedCertData") private Object embeddedCertData; /* - * Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises - * copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be - * specified. Type: string (or Expression with resultType string). + * Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). */ @JsonProperty(value = "certThumbprint") private Object certThumbprint; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* - * If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with - * resultType boolean). + * If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "enableServerCertificateValidation") private Object enableServerCertificateValidation; @@ -193,8 +185,8 @@ public HttpLinkedServiceTypeProperties withAuthHeaders(Object authHeaders) { /** * Get the embeddedCertData property: Base64 encoded certificate data for ClientCertificate authentication. For - * on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password - * should be specified. Type: string (or Expression with resultType string). + * on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should + * be specified. Type: string (or Expression with resultType string). * * @return the embeddedCertData value. */ @@ -204,8 +196,8 @@ public Object embeddedCertData() { /** * Set the embeddedCertData property: Base64 encoded certificate data for ClientCertificate authentication. For - * on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password - * should be specified. Type: string (or Expression with resultType string). + * on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should + * be specified. Type: string (or Expression with resultType string). * * @param embeddedCertData the embeddedCertData value to set. * @return the HttpLinkedServiceTypeProperties object itself. @@ -240,8 +232,8 @@ public HttpLinkedServiceTypeProperties withCertThumbprint(Object certThumbprint) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -250,8 +242,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HttpLinkedServiceTypeProperties object itself. @@ -291,8 +283,9 @@ public Object enableServerCertificateValidation() { */ public void validate() { if (url() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property url in model HttpLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property url in model HttpLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HubspotLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HubspotLinkedServiceTypeProperties.java index 6e3379a730a9d..a36a0d4939459 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HubspotLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HubspotLinkedServiceTypeProperties.java @@ -45,8 +45,7 @@ public final class HubspotLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; @@ -58,8 +57,7 @@ public final class HubspotLinkedServiceTypeProperties { private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -195,8 +193,8 @@ public HubspotLinkedServiceTypeProperties withUseHostVerification(Object useHost } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -205,8 +203,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the HubspotLinkedServiceTypeProperties object itself. @@ -217,8 +215,8 @@ public HubspotLinkedServiceTypeProperties withUsePeerVerification(Object usePeer } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -227,8 +225,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HubspotLinkedServiceTypeProperties object itself. @@ -245,8 +243,9 @@ public HubspotLinkedServiceTypeProperties withEncryptedCredential(String encrypt */ public void validate() { if (clientId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property clientId in model HubspotLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property clientId in model HubspotLinkedServiceTypeProperties")); } if (clientSecret() != null) { clientSecret().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IfConditionActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IfConditionActivityTypeProperties.java index 953858e76e7da..031723cf56dbd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IfConditionActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IfConditionActivityTypeProperties.java @@ -17,22 +17,19 @@ @Fluent public final class IfConditionActivityTypeProperties { /* - * An expression that would evaluate to Boolean. This is used to determine the block of activities - * (ifTrueActivities or ifFalseActivities) that will be executed. + * An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. */ @JsonProperty(value = "expression", required = true) private Expression expression; /* - * List of activities to execute if expression is evaluated to true. This is an optional property and if not - * provided, the activity will exit without any action. + * List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. */ @JsonProperty(value = "ifTrueActivities") private List ifTrueActivities; /* - * List of activities to execute if expression is evaluated to false. This is an optional property and if not - * provided, the activity will exit without any action. + * List of activities to execute if expression is evaluated to false. This is an optional property and if not provided, the activity will exit without any action. */ @JsonProperty(value = "ifFalseActivities") private List ifFalseActivities; @@ -44,8 +41,8 @@ public IfConditionActivityTypeProperties() { } /** - * Get the expression property: An expression that would evaluate to Boolean. This is used to determine the block - * of activities (ifTrueActivities or ifFalseActivities) that will be executed. + * Get the expression property: An expression that would evaluate to Boolean. This is used to determine the block of + * activities (ifTrueActivities or ifFalseActivities) that will be executed. * * @return the expression value. */ @@ -54,8 +51,8 @@ public Expression expression() { } /** - * Set the expression property: An expression that would evaluate to Boolean. This is used to determine the block - * of activities (ifTrueActivities or ifFalseActivities) that will be executed. + * Set the expression property: An expression that would evaluate to Boolean. This is used to determine the block of + * activities (ifTrueActivities or ifFalseActivities) that will be executed. * * @param expression the expression value to set. * @return the IfConditionActivityTypeProperties object itself. @@ -88,8 +85,8 @@ public IfConditionActivityTypeProperties withIfTrueActivities(List ifT } /** - * Get the ifFalseActivities property: List of activities to execute if expression is evaluated to false. This is - * an optional property and if not provided, the activity will exit without any action. + * Get the ifFalseActivities property: List of activities to execute if expression is evaluated to false. This is an + * optional property and if not provided, the activity will exit without any action. * * @return the ifFalseActivities value. */ @@ -98,8 +95,8 @@ public List ifFalseActivities() { } /** - * Set the ifFalseActivities property: List of activities to execute if expression is evaluated to false. This is - * an optional property and if not provided, the activity will exit without any action. + * Set the ifFalseActivities property: List of activities to execute if expression is evaluated to false. This is an + * optional property and if not provided, the activity will exit without any action. * * @param ifFalseActivities the ifFalseActivities value to set. * @return the IfConditionActivityTypeProperties object itself. @@ -116,8 +113,9 @@ public IfConditionActivityTypeProperties withIfFalseActivities(List if */ public void validate() { if (expression() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property expression in model IfConditionActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property expression in model IfConditionActivityTypeProperties")); } else { expression().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ImpalaLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ImpalaLinkedServiceTypeProperties.java index 7564fab3070f0..87fff08b53cfe 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ImpalaLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ImpalaLinkedServiceTypeProperties.java @@ -52,23 +52,19 @@ public final class ImpalaLinkedServiceTypeProperties { private Object enableSsl; /* - * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over - * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file - * installed with the IR. + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* - * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default - * value is false. + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* - * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when - * connecting over SSL. The default value is false. + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ @JsonProperty(value = "allowHostNameCNMismatch") private Object allowHostnameCNMismatch; @@ -80,8 +76,7 @@ public final class ImpalaLinkedServiceTypeProperties { private Object allowSelfSignedServerCert; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -219,9 +214,9 @@ public ImpalaLinkedServiceTypeProperties withEnableSsl(Object enableSsl) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @return the trustedCertPath value. */ @@ -230,9 +225,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @param trustedCertPath the trustedCertPath value to set. * @return the ImpalaLinkedServiceTypeProperties object itself. @@ -309,8 +304,8 @@ public ImpalaLinkedServiceTypeProperties withAllowSelfSignedServerCert(Object al } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -319,8 +314,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ImpalaLinkedServiceTypeProperties object itself. @@ -337,12 +332,14 @@ public ImpalaLinkedServiceTypeProperties withEncryptedCredential(String encrypte */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model ImpalaLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model ImpalaLinkedServiceTypeProperties")); } if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model ImpalaLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model ImpalaLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/InformixLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/InformixLinkedServiceTypeProperties.java index 5ebc0a41837f0..a04a9248506ef 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/InformixLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/InformixLinkedServiceTypeProperties.java @@ -15,15 +15,13 @@ @Fluent public final class InformixLinkedServiceTypeProperties { /* - * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: - * string, or SecureString, or AzureKeyVaultSecretReference, or Expression with resultType string. + * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with resultType string. */ @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* - * Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and - * Basic. Type: string (or Expression with resultType string). + * Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */ @JsonProperty(value = "authenticationType") private Object authenticationType; @@ -47,8 +45,7 @@ public final class InformixLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -61,8 +58,8 @@ public InformixLinkedServiceTypeProperties() { /** * Get the connectionString property: The non-access credential portion of the connection string as well as an - * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression - * with resultType string. + * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with + * resultType string. * * @return the connectionString value. */ @@ -72,8 +69,8 @@ public Object connectionString() { /** * Set the connectionString property: The non-access credential portion of the connection string as well as an - * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression - * with resultType string. + * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with + * resultType string. * * @param connectionString the connectionString value to set. * @return the InformixLinkedServiceTypeProperties object itself. @@ -170,8 +167,8 @@ public InformixLinkedServiceTypeProperties withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -180,8 +177,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the InformixLinkedServiceTypeProperties object itself. @@ -198,8 +195,9 @@ public InformixLinkedServiceTypeProperties withEncryptedCredential(String encryp */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model InformixLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model InformixLinkedServiceTypeProperties")); } if (credential() != null) { credential().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeResourceInner.java index fab0554e3cf87..2b428d34dcf27 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeResourceInner.java @@ -108,8 +108,9 @@ public IntegrationRuntimeResourceInner withId(String id) { */ public void validate() { if (properties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property properties in model IntegrationRuntimeResourceInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model IntegrationRuntimeResourceInner")); } else { properties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeStatusResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeStatusResponseInner.java index fad047287005f..1ac377c03764c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeStatusResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeStatusResponseInner.java @@ -68,8 +68,9 @@ public IntegrationRuntimeStatusResponseInner withProperties(IntegrationRuntimeSt */ public void validate() { if (properties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property properties in model IntegrationRuntimeStatusResponseInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model IntegrationRuntimeStatusResponseInner")); } else { properties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JiraLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JiraLinkedServiceTypeProperties.java index 9c457947028d0..15c8c498ae291 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JiraLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JiraLinkedServiceTypeProperties.java @@ -21,8 +21,7 @@ public final class JiraLinkedServiceTypeProperties { private Object host; /* - * The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting - * through HTTPS, or 8080 if connecting through HTTP. + * The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. */ @JsonProperty(value = "port") private Object port; @@ -46,8 +45,7 @@ public final class JiraLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; @@ -59,8 +57,7 @@ public final class JiraLinkedServiceTypeProperties { private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -92,8 +89,8 @@ public JiraLinkedServiceTypeProperties withHost(Object host) { } /** - * Get the port property: The TCP port that the Jira server uses to listen for client connections. The default - * value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. + * Get the port property: The TCP port that the Jira server uses to listen for client connections. The default value + * is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. * * @return the port value. */ @@ -102,8 +99,8 @@ public Object port() { } /** - * Set the port property: The TCP port that the Jira server uses to listen for client connections. The default - * value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. + * Set the port property: The TCP port that the Jira server uses to listen for client connections. The default value + * is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. * * @param port the port value to set. * @return the JiraLinkedServiceTypeProperties object itself. @@ -198,8 +195,8 @@ public JiraLinkedServiceTypeProperties withUseHostVerification(Object useHostVer } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -208,8 +205,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the JiraLinkedServiceTypeProperties object itself. @@ -220,8 +217,8 @@ public JiraLinkedServiceTypeProperties withUsePeerVerification(Object usePeerVer } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -230,8 +227,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the JiraLinkedServiceTypeProperties object itself. @@ -248,12 +245,14 @@ public JiraLinkedServiceTypeProperties withEncryptedCredential(String encryptedC */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model JiraLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model JiraLinkedServiceTypeProperties")); } if (username() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property username in model JiraLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property username in model JiraLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JsonDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JsonDatasetTypeProperties.java index 7b68b1fa71bd9..6499cf3e11dd6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JsonDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JsonDatasetTypeProperties.java @@ -22,10 +22,7 @@ public final class JsonDatasetTypeProperties { private DatasetLocation location; /* - * The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes - * another Unicode encoding. Refer to the name column of the table in the following link to set supported values: - * https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType - * string). + * The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */ @JsonProperty(value = "encodingName") private Object encodingName; @@ -115,8 +112,9 @@ public JsonDatasetTypeProperties withCompression(DatasetCompression compression) */ public void validate() { if (location() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property location in model JsonDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property location in model JsonDatasetTypeProperties")); } else { location().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseLinkedServiceTypeProperties.java index 094a7b9c31183..f599c6ef7fc6d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseLinkedServiceTypeProperties.java @@ -26,8 +26,7 @@ public final class LakeHouseLinkedServiceTypeProperties { private Object artifactId; /* - * The ID of the application used to authenticate against Microsoft Fabric LakeHouse. Type: string (or Expression - * with resultType string). + * The ID of the application used to authenticate against Microsoft Fabric LakeHouse. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; @@ -39,31 +38,25 @@ public final class LakeHouseLinkedServiceTypeProperties { private SecretBase servicePrincipalKey; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* - * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for - * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). + * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* - * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is - * 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be - * AzureKeyVaultSecretReference. + * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; @@ -185,8 +178,8 @@ public LakeHouseLinkedServiceTypeProperties withTenant(Object tenant) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -195,8 +188,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the LakeHouseLinkedServiceTypeProperties object itself. @@ -207,9 +200,9 @@ public LakeHouseLinkedServiceTypeProperties withEncryptedCredential(String encry } /** - * Get the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Get the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @return the servicePrincipalCredentialType value. */ @@ -218,9 +211,9 @@ public Object servicePrincipalCredentialType() { } /** - * Set the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Set the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @param servicePrincipalCredentialType the servicePrincipalCredentialType value to set. * @return the LakeHouseLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseTableDatasetTypeProperties.java index 2a60697185382..819a979c1495d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseTableDatasetTypeProperties.java @@ -25,8 +25,8 @@ public LakeHouseTableDatasetTypeProperties() { } /** - * Get the table property: The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with - * resultType string). + * Get the table property: The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with resultType + * string). * * @return the table value. */ @@ -35,8 +35,8 @@ public Object table() { } /** - * Set the table property: The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with - * resultType string). + * Set the table property: The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with resultType + * string). * * @param table the table value to set. * @return the LakeHouseTableDatasetTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LicensedComponentSetupTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LicensedComponentSetupTypeProperties.java index ae2b423053760..ce0f9658ed580 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LicensedComponentSetupTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LicensedComponentSetupTypeProperties.java @@ -79,8 +79,9 @@ public LicensedComponentSetupTypeProperties withLicenseKey(SecretBase licenseKey */ public void validate() { if (componentName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property componentName in model LicensedComponentSetupTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property componentName in model LicensedComponentSetupTypeProperties")); } if (licenseKey() != null) { licenseKey().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LinkedServiceResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LinkedServiceResourceInner.java index c6f6006735aef..f62169a801d38 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LinkedServiceResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LinkedServiceResourceInner.java @@ -108,8 +108,9 @@ public LinkedServiceResourceInner withId(String id) { */ public void validate() { if (properties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property properties in model LinkedServiceResourceInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model LinkedServiceResourceInner")); } else { properties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LookupActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LookupActivityTypeProperties.java index 6d3843ecd04e0..e192203df0318 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LookupActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LookupActivityTypeProperties.java @@ -28,8 +28,7 @@ public final class LookupActivityTypeProperties { private DatasetReference dataset; /* - * Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType - * boolean). + * Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "firstRowOnly") private Object firstRowOnly; @@ -109,14 +108,16 @@ public LookupActivityTypeProperties withFirstRowOnly(Object firstRowOnly) { */ public void validate() { if (source() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property source in model LookupActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property source in model LookupActivityTypeProperties")); } else { source().validate(); } if (dataset() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property dataset in model LookupActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property dataset in model LookupActivityTypeProperties")); } else { dataset().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MagentoLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MagentoLinkedServiceTypeProperties.java index 183e42a401ff5..90be4e6e4bb6a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MagentoLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MagentoLinkedServiceTypeProperties.java @@ -33,8 +33,7 @@ public final class MagentoLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; @@ -46,8 +45,7 @@ public final class MagentoLinkedServiceTypeProperties { private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -143,8 +141,8 @@ public MagentoLinkedServiceTypeProperties withUseHostVerification(Object useHost } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -153,8 +151,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the MagentoLinkedServiceTypeProperties object itself. @@ -165,8 +163,8 @@ public MagentoLinkedServiceTypeProperties withUsePeerVerification(Object usePeer } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -175,8 +173,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the MagentoLinkedServiceTypeProperties object itself. @@ -193,8 +191,9 @@ public MagentoLinkedServiceTypeProperties withEncryptedCredential(String encrypt */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model MagentoLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model MagentoLinkedServiceTypeProperties")); } if (accessToken() != null) { accessToken().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIntegrationRuntimeTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIntegrationRuntimeTypeProperties.java index 5be147e239b8c..a6be27991ce37 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIntegrationRuntimeTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIntegrationRuntimeTypeProperties.java @@ -81,8 +81,8 @@ public ManagedIntegrationRuntimeTypeProperties withSsisProperties(IntegrationRun } /** - * Get the customerVirtualNetwork property: The name of virtual network to which Azure-SSIS integration runtime - * will join. + * Get the customerVirtualNetwork property: The name of virtual network to which Azure-SSIS integration runtime will + * join. * * @return the customerVirtualNetwork value. */ @@ -91,8 +91,8 @@ public IntegrationRuntimeCustomerVirtualNetwork customerVirtualNetwork() { } /** - * Set the customerVirtualNetwork property: The name of virtual network to which Azure-SSIS integration runtime - * will join. + * Set the customerVirtualNetwork property: The name of virtual network to which Azure-SSIS integration runtime will + * join. * * @param customerVirtualNetwork the customerVirtualNetwork value to set. * @return the ManagedIntegrationRuntimeTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedPrivateEndpointResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedPrivateEndpointResourceInner.java index b721282450bb5..2049f083e2399 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedPrivateEndpointResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedPrivateEndpointResourceInner.java @@ -108,8 +108,9 @@ public ManagedPrivateEndpointResourceInner withId(String id) { */ public void validate() { if (properties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property properties in model ManagedPrivateEndpointResourceInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model ManagedPrivateEndpointResourceInner")); } else { properties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedVirtualNetworkResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedVirtualNetworkResourceInner.java index 3a3809c1e6ba2..c212dc0392719 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedVirtualNetworkResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedVirtualNetworkResourceInner.java @@ -108,8 +108,9 @@ public ManagedVirtualNetworkResourceInner withId(String id) { */ public void validate() { if (properties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property properties in model ManagedVirtualNetworkResourceInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model ManagedVirtualNetworkResourceInner")); } else { properties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MariaDBLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MariaDBLinkedServiceTypeProperties.java index c240da1e65c9d..6f9f8ccd9c0c6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MariaDBLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MariaDBLinkedServiceTypeProperties.java @@ -14,8 +14,7 @@ @Fluent public final class MariaDBLinkedServiceTypeProperties { /* - * The version of the MariaDB driver. Type: string. V1 or empty for legacy driver, V2 for new driver. V1 can - * support connection string and property bag, V2 can only support connection string. + * The version of the MariaDB driver. Type: string. V1 or empty for legacy driver, V2 for new driver. V1 can support connection string and property bag, V2 can only support connection string. */ @JsonProperty(value = "driverVersion") private Object driverVersion; @@ -57,8 +56,7 @@ public final class MariaDBLinkedServiceTypeProperties { private AzureKeyVaultSecretReference password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -214,8 +212,8 @@ public MariaDBLinkedServiceTypeProperties withPassword(AzureKeyVaultSecretRefere } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -224,8 +222,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the MariaDBLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MarketoLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MarketoLinkedServiceTypeProperties.java index 8014237159c4f..c8e8419d3dd76 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MarketoLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MarketoLinkedServiceTypeProperties.java @@ -39,8 +39,7 @@ public final class MarketoLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; @@ -52,8 +51,7 @@ public final class MarketoLinkedServiceTypeProperties { private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -169,8 +167,8 @@ public MarketoLinkedServiceTypeProperties withUseHostVerification(Object useHost } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -179,8 +177,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the MarketoLinkedServiceTypeProperties object itself. @@ -191,8 +189,8 @@ public MarketoLinkedServiceTypeProperties withUsePeerVerification(Object usePeer } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -201,8 +199,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the MarketoLinkedServiceTypeProperties object itself. @@ -219,12 +217,14 @@ public MarketoLinkedServiceTypeProperties withEncryptedCredential(String encrypt */ public void validate() { if (endpoint() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property endpoint in model MarketoLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property endpoint in model MarketoLinkedServiceTypeProperties")); } if (clientId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property clientId in model MarketoLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property clientId in model MarketoLinkedServiceTypeProperties")); } if (clientSecret() != null) { clientSecret().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessLinkedServiceTypeProperties.java index 782fcdb605c79..416286445f1a8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessLinkedServiceTypeProperties.java @@ -15,15 +15,13 @@ @Fluent public final class MicrosoftAccessLinkedServiceTypeProperties { /* - * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: - * string, or SecureString, or AzureKeyVaultSecretReference, or Expression with resultType string. + * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with resultType string. */ @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* - * Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: - * Anonymous and Basic. Type: string (or Expression with resultType string). + * Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */ @JsonProperty(value = "authenticationType") private Object authenticationType; @@ -47,8 +45,7 @@ public final class MicrosoftAccessLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -61,8 +58,8 @@ public MicrosoftAccessLinkedServiceTypeProperties() { /** * Get the connectionString property: The non-access credential portion of the connection string as well as an - * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression - * with resultType string. + * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with + * resultType string. * * @return the connectionString value. */ @@ -72,8 +69,8 @@ public Object connectionString() { /** * Set the connectionString property: The non-access credential portion of the connection string as well as an - * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression - * with resultType string. + * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with + * resultType string. * * @param connectionString the connectionString value to set. * @return the MicrosoftAccessLinkedServiceTypeProperties object itself. @@ -170,8 +167,8 @@ public MicrosoftAccessLinkedServiceTypeProperties withPassword(SecretBase passwo } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -180,8 +177,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the MicrosoftAccessLinkedServiceTypeProperties object itself. @@ -198,8 +195,9 @@ public MicrosoftAccessLinkedServiceTypeProperties withEncryptedCredential(String */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model MicrosoftAccessLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model MicrosoftAccessLinkedServiceTypeProperties")); } if (credential() != null) { credential().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessTableDatasetTypeProperties.java index e1a299d2135de..ebb1b7bd1acbb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessTableDatasetTypeProperties.java @@ -25,8 +25,7 @@ public MicrosoftAccessTableDatasetTypeProperties() { } /** - * Get the tableName property: The Microsoft Access table name. Type: string (or Expression with resultType - * string). + * Get the tableName property: The Microsoft Access table name. Type: string (or Expression with resultType string). * * @return the tableName value. */ @@ -35,8 +34,7 @@ public Object tableName() { } /** - * Set the tableName property: The Microsoft Access table name. Type: string (or Expression with resultType - * string). + * Set the tableName property: The Microsoft Access table name. Type: string (or Expression with resultType string). * * @param tableName the tableName value to set. * @return the MicrosoftAccessTableDatasetTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasCollectionDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasCollectionDatasetTypeProperties.java index 67f3d48a678b8..03eb39c8e05c6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasCollectionDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasCollectionDatasetTypeProperties.java @@ -54,8 +54,9 @@ public MongoDbAtlasCollectionDatasetTypeProperties withCollection(Object collect */ public void validate() { if (collection() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property collection in model MongoDbAtlasCollectionDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property collection in model MongoDbAtlasCollectionDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasLinkedServiceTypeProperties.java index 17894692b9cfe..0b827492376cf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasLinkedServiceTypeProperties.java @@ -14,22 +14,19 @@ @Fluent public final class MongoDbAtlasLinkedServiceTypeProperties { /* - * The MongoDB Atlas connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, - * SecureString or AzureKeyVaultSecretReference. + * The MongoDB Atlas connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* - * The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType - * string). + * The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType string). */ @JsonProperty(value = "database", required = true) private Object database; /* - * The driver version that you want to choose. Allowed value are v1 and v2. Type: string (or Expression with - * resultType string). + * The driver version that you want to choose. Allowed value are v1 and v2. Type: string (or Expression with resultType string). */ @JsonProperty(value = "driverVersion") private Object driverVersion; @@ -113,12 +110,14 @@ public MongoDbAtlasLinkedServiceTypeProperties withDriverVersion(Object driverVe */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model MongoDbAtlasLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model MongoDbAtlasLinkedServiceTypeProperties")); } if (database() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property database in model MongoDbAtlasLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property database in model MongoDbAtlasLinkedServiceTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbCollectionDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbCollectionDatasetTypeProperties.java index 36359dc0ae79c..d3378c415e903 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbCollectionDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbCollectionDatasetTypeProperties.java @@ -54,8 +54,9 @@ public MongoDbCollectionDatasetTypeProperties withCollectionName(Object collecti */ public void validate() { if (collectionName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property collectionName in model MongoDbCollectionDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property collectionName in model MongoDbCollectionDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbLinkedServiceTypeProperties.java index 919510b23fdbb..f4550a9426dd6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbLinkedServiceTypeProperties.java @@ -52,29 +52,25 @@ public final class MongoDbLinkedServiceTypeProperties { private Object authSource; /* - * The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. - * Type: integer (or Expression with resultType integer), minimum: 0. + * The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. */ @JsonProperty(value = "port") private Object port; /* - * Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: - * boolean (or Expression with resultType boolean). + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "enableSsl") private Object enableSsl; /* - * Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean - * (or Expression with resultType boolean). + * Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "allowSelfSignedServerCert") private Object allowSelfSignedServerCert; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -278,8 +274,8 @@ public MongoDbLinkedServiceTypeProperties withAllowSelfSignedServerCert(Object a } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -288,8 +284,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the MongoDbLinkedServiceTypeProperties object itself. @@ -306,12 +302,14 @@ public MongoDbLinkedServiceTypeProperties withEncryptedCredential(String encrypt */ public void validate() { if (server() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property server in model MongoDbLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property server in model MongoDbLinkedServiceTypeProperties")); } if (databaseName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property databaseName in model MongoDbLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property databaseName in model MongoDbLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2CollectionDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2CollectionDatasetTypeProperties.java index b23cb97eaee88..1077f38da50da 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2CollectionDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2CollectionDatasetTypeProperties.java @@ -54,8 +54,9 @@ public MongoDbV2CollectionDatasetTypeProperties withCollection(Object collection */ public void validate() { if (collection() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property collection in model MongoDbV2CollectionDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property collection in model MongoDbV2CollectionDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2LinkedServiceTypeProperties.java index ea2bc4bf62420..7526974c07811 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2LinkedServiceTypeProperties.java @@ -14,8 +14,7 @@ @Fluent public final class MongoDbV2LinkedServiceTypeProperties { /* - * The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, - * SecureString or AzureKeyVaultSecretReference. + * The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ @JsonProperty(value = "connectionString", required = true) private Object connectionString; @@ -83,12 +82,14 @@ public MongoDbV2LinkedServiceTypeProperties withDatabase(Object database) { */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model MongoDbV2LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model MongoDbV2LinkedServiceTypeProperties")); } if (database() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property database in model MongoDbV2LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property database in model MongoDbV2LinkedServiceTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MySqlLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MySqlLinkedServiceTypeProperties.java index eb10e59bbf098..b3f1ef911a080 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MySqlLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MySqlLinkedServiceTypeProperties.java @@ -14,8 +14,7 @@ @Fluent public final class MySqlLinkedServiceTypeProperties { /* - * The version of the MySQL driver. Type: string. V1 or empty for legacy driver, V2 for new driver. V1 can support - * connection string and property bag, V2 can only support connection string. + * The version of the MySQL driver. Type: string. V1 or empty for legacy driver, V2 for new driver. V1 can support connection string and property bag, V2 can only support connection string. */ @JsonProperty(value = "driverVersion") private Object driverVersion; @@ -69,8 +68,7 @@ public final class MySqlLinkedServiceTypeProperties { private AzureKeyVaultSecretReference password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -270,8 +268,8 @@ public MySqlLinkedServiceTypeProperties withPassword(AzureKeyVaultSecretReferenc } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -280,8 +278,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the MySqlLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/NetezzaLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/NetezzaLinkedServiceTypeProperties.java index 07f222f9919b4..8e00fa7adbeaf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/NetezzaLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/NetezzaLinkedServiceTypeProperties.java @@ -26,8 +26,7 @@ public final class NetezzaLinkedServiceTypeProperties { private AzureKeyVaultSecretReference pwd; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -81,8 +80,8 @@ public NetezzaLinkedServiceTypeProperties withPwd(AzureKeyVaultSecretReference p } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -91,8 +90,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the NetezzaLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ODataLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ODataLinkedServiceTypeProperties.java index 857ee384f5860..600f97602347e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ODataLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ODataLinkedServiceTypeProperties.java @@ -41,37 +41,31 @@ public final class ODataLinkedServiceTypeProperties { private SecretBase password; /* - * The additional HTTP headers in the request to RESTful API used for authorization. Type: key value pairs (value - * should be string type). + * The additional HTTP headers in the request to RESTful API used for authorization. Type: key value pairs (value should be string type). */ @JsonProperty(value = "authHeaders") private Object authHeaders; /* - * Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or - * Expression with resultType string). + * Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression - * with resultType string). + * Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* - * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, - * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or - * Expression with resultType string). + * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* - * Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with - * resultType string). + * Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). */ @JsonProperty(value = "aadResourceId") private Object aadResourceId; @@ -83,29 +77,25 @@ public final class ODataLinkedServiceTypeProperties { private ODataAadServicePrincipalCredentialType aadServicePrincipalCredentialType; /* - * Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with - * resultType string). + * Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* - * Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string - * (or Expression with resultType string). + * Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalEmbeddedCert") private SecretBase servicePrincipalEmbeddedCert; /* - * Specify the password of your certificate if your certificate has a password and you are using - * AadServicePrincipal authentication. Type: string (or Expression with resultType string). + * Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalEmbeddedCertPassword") private SecretBase servicePrincipalEmbeddedCertPassword; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -117,8 +107,7 @@ public ODataLinkedServiceTypeProperties() { } /** - * Get the url property: The URL of the OData service endpoint. Type: string (or Expression with resultType - * string). + * Get the url property: The URL of the OData service endpoint. Type: string (or Expression with resultType string). * * @return the url value. */ @@ -127,8 +116,7 @@ public Object url() { } /** - * Set the url property: The URL of the OData service endpoint. Type: string (or Expression with resultType - * string). + * Set the url property: The URL of the OData service endpoint. Type: string (or Expression with resultType string). * * @param url the url value to set. * @return the ODataLinkedServiceTypeProperties object itself. @@ -265,9 +253,9 @@ public ODataLinkedServiceTypeProperties withServicePrincipalId(Object servicePri } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -276,9 +264,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the ODataLinkedServiceTypeProperties object itself. @@ -311,8 +299,8 @@ public ODataLinkedServiceTypeProperties withAadResourceId(Object aadResourceId) } /** - * Get the aadServicePrincipalCredentialType property: Specify the credential type (key or cert) is used for - * service principal. + * Get the aadServicePrincipalCredentialType property: Specify the credential type (key or cert) is used for service + * principal. * * @return the aadServicePrincipalCredentialType value. */ @@ -321,8 +309,8 @@ public ODataAadServicePrincipalCredentialType aadServicePrincipalCredentialType( } /** - * Set the aadServicePrincipalCredentialType property: Specify the credential type (key or cert) is used for - * service principal. + * Set the aadServicePrincipalCredentialType property: Specify the credential type (key or cert) is used for service + * principal. * * @param aadServicePrincipalCredentialType the aadServicePrincipalCredentialType value to set. * @return the ODataLinkedServiceTypeProperties object itself. @@ -379,8 +367,8 @@ public ODataLinkedServiceTypeProperties withServicePrincipalEmbeddedCert(SecretB /** * Get the servicePrincipalEmbeddedCertPassword property: Specify the password of your certificate if your - * certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression - * with resultType string). + * certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with + * resultType string). * * @return the servicePrincipalEmbeddedCertPassword value. */ @@ -390,8 +378,8 @@ public SecretBase servicePrincipalEmbeddedCertPassword() { /** * Set the servicePrincipalEmbeddedCertPassword property: Specify the password of your certificate if your - * certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression - * with resultType string). + * certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with + * resultType string). * * @param servicePrincipalEmbeddedCertPassword the servicePrincipalEmbeddedCertPassword value to set. * @return the ODataLinkedServiceTypeProperties object itself. @@ -403,8 +391,8 @@ public SecretBase servicePrincipalEmbeddedCertPassword() { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -413,8 +401,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ODataLinkedServiceTypeProperties object itself. @@ -431,8 +419,9 @@ public ODataLinkedServiceTypeProperties withEncryptedCredential(String encrypted */ public void validate() { if (url() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property url in model ODataLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property url in model ODataLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OdbcLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OdbcLinkedServiceTypeProperties.java index 1d04cc51449b4..7d2e19efd85ec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OdbcLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OdbcLinkedServiceTypeProperties.java @@ -15,15 +15,13 @@ @Fluent public final class OdbcLinkedServiceTypeProperties { /* - * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: - * string, or SecureString, or AzureKeyVaultSecretReference, or Expression with resultType string. + * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with resultType string. */ @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* - * Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: - * string (or Expression with resultType string). + * Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */ @JsonProperty(value = "authenticationType") private Object authenticationType; @@ -47,8 +45,7 @@ public final class OdbcLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -61,8 +58,8 @@ public OdbcLinkedServiceTypeProperties() { /** * Get the connectionString property: The non-access credential portion of the connection string as well as an - * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression - * with resultType string. + * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with + * resultType string. * * @return the connectionString value. */ @@ -72,8 +69,8 @@ public Object connectionString() { /** * Set the connectionString property: The non-access credential portion of the connection string as well as an - * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression - * with resultType string. + * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with + * resultType string. * * @param connectionString the connectionString value to set. * @return the OdbcLinkedServiceTypeProperties object itself. @@ -170,8 +167,8 @@ public OdbcLinkedServiceTypeProperties withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -180,8 +177,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the OdbcLinkedServiceTypeProperties object itself. @@ -198,8 +195,9 @@ public OdbcLinkedServiceTypeProperties withEncryptedCredential(String encryptedC */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model OdbcLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model OdbcLinkedServiceTypeProperties")); } if (credential() != null) { credential().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365DatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365DatasetTypeProperties.java index cd660ed0119ec..0f4cf85e3df19 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365DatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365DatasetTypeProperties.java @@ -20,8 +20,7 @@ public final class Office365DatasetTypeProperties { private Object tableName; /* - * A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or - * Expression with resultType string). + * A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with resultType string). */ @JsonProperty(value = "predicate") private Object predicate; @@ -83,8 +82,9 @@ public Office365DatasetTypeProperties withPredicate(Object predicate) { */ public void validate() { if (tableName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property tableName in model Office365DatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property tableName in model Office365DatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365LinkedServiceTypeProperties.java index f57681b7216f7..0f0987b2b3248 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365LinkedServiceTypeProperties.java @@ -21,8 +21,7 @@ public final class Office365LinkedServiceTypeProperties { private Object office365TenantId; /* - * Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression - * with resultType string). + * Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalTenantId", required = true) private Object servicePrincipalTenantId; @@ -40,8 +39,7 @@ public final class Office365LinkedServiceTypeProperties { private SecretBase servicePrincipalKey; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -139,8 +137,8 @@ public Office365LinkedServiceTypeProperties withServicePrincipalKey(SecretBase s } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -149,8 +147,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the Office365LinkedServiceTypeProperties object itself. @@ -167,20 +165,24 @@ public Office365LinkedServiceTypeProperties withEncryptedCredential(String encry */ public void validate() { if (office365TenantId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property office365TenantId in model Office365LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property office365TenantId in model Office365LinkedServiceTypeProperties")); } if (servicePrincipalTenantId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property servicePrincipalTenantId in model Office365LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property servicePrincipalTenantId in model Office365LinkedServiceTypeProperties")); } if (servicePrincipalId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property servicePrincipalId in model Office365LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property servicePrincipalId in model Office365LinkedServiceTypeProperties")); } if (servicePrincipalKey() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property servicePrincipalKey in model Office365LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property servicePrincipalKey in model Office365LinkedServiceTypeProperties")); } else { servicePrincipalKey().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleCloudStorageLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleCloudStorageLinkedServiceTypeProperties.java index de9167fe745ad..6a31b3db86d89 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleCloudStorageLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleCloudStorageLinkedServiceTypeProperties.java @@ -14,8 +14,7 @@ @Fluent public final class OracleCloudStorageLinkedServiceTypeProperties { /* - * The access key identifier of the Oracle Cloud Storage Identity and Access Management (IAM) user. Type: string - * (or Expression with resultType string). + * The access key identifier of the Oracle Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */ @JsonProperty(value = "accessKeyId") private Object accessKeyId; @@ -27,16 +26,13 @@ public final class OracleCloudStorageLinkedServiceTypeProperties { private SecretBase secretAccessKey; /* - * This value specifies the endpoint to access with the Oracle Cloud Storage Connector. This is an optional - * property; change it only if you want to try a different service endpoint or want to switch between https and - * http. Type: string (or Expression with resultType string). + * This value specifies the endpoint to access with the Oracle Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */ @JsonProperty(value = "serviceUrl") private Object serviceUrl; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -92,9 +88,9 @@ public OracleCloudStorageLinkedServiceTypeProperties withSecretAccessKey(SecretB } /** - * Get the serviceUrl property: This value specifies the endpoint to access with the Oracle Cloud Storage - * Connector. This is an optional property; change it only if you want to try a different service endpoint or want - * to switch between https and http. Type: string (or Expression with resultType string). + * Get the serviceUrl property: This value specifies the endpoint to access with the Oracle Cloud Storage Connector. + * This is an optional property; change it only if you want to try a different service endpoint or want to switch + * between https and http. Type: string (or Expression with resultType string). * * @return the serviceUrl value. */ @@ -103,9 +99,9 @@ public Object serviceUrl() { } /** - * Set the serviceUrl property: This value specifies the endpoint to access with the Oracle Cloud Storage - * Connector. This is an optional property; change it only if you want to try a different service endpoint or want - * to switch between https and http. Type: string (or Expression with resultType string). + * Set the serviceUrl property: This value specifies the endpoint to access with the Oracle Cloud Storage Connector. + * This is an optional property; change it only if you want to try a different service endpoint or want to switch + * between https and http. Type: string (or Expression with resultType string). * * @param serviceUrl the serviceUrl value to set. * @return the OracleCloudStorageLinkedServiceTypeProperties object itself. @@ -116,8 +112,8 @@ public OracleCloudStorageLinkedServiceTypeProperties withServiceUrl(Object servi } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -126,8 +122,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the OracleCloudStorageLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleLinkedServiceTypeProperties.java index 21380b337b869..0951f13357f73 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleLinkedServiceTypeProperties.java @@ -27,8 +27,7 @@ public final class OracleLinkedServiceTypeProperties { private AzureKeyVaultSecretReference password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -82,8 +81,8 @@ public OracleLinkedServiceTypeProperties withPassword(AzureKeyVaultSecretReferen } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -92,8 +91,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the OracleLinkedServiceTypeProperties object itself. @@ -110,8 +109,9 @@ public OracleLinkedServiceTypeProperties withEncryptedCredential(String encrypte */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model OracleLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model OracleLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleServiceCloudLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleServiceCloudLinkedServiceTypeProperties.java index dfac50b4e310d..393d3f61412a8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleServiceCloudLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleServiceCloudLinkedServiceTypeProperties.java @@ -33,29 +33,25 @@ public final class OracleServiceCloudLinkedServiceTypeProperties { private SecretBase password; /* - * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean - * (or Expression with resultType boolean). + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* - * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - * Type: boolean (or Expression with resultType boolean). + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -173,8 +169,8 @@ public OracleServiceCloudLinkedServiceTypeProperties withUseHostVerification(Obj } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. Type: boolean (or Expression with resultType boolean). * * @return the usePeerVerification value. */ @@ -183,8 +179,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. Type: boolean (or Expression with resultType boolean). * * @param usePeerVerification the usePeerVerification value to set. * @return the OracleServiceCloudLinkedServiceTypeProperties object itself. @@ -195,8 +191,8 @@ public OracleServiceCloudLinkedServiceTypeProperties withUsePeerVerification(Obj } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -205,8 +201,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the OracleServiceCloudLinkedServiceTypeProperties object itself. @@ -223,16 +219,19 @@ public OracleServiceCloudLinkedServiceTypeProperties withEncryptedCredential(Str */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model OracleServiceCloudLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model OracleServiceCloudLinkedServiceTypeProperties")); } if (username() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property username in model OracleServiceCloudLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property username in model OracleServiceCloudLinkedServiceTypeProperties")); } if (password() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property password in model OracleServiceCloudLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property password in model OracleServiceCloudLinkedServiceTypeProperties")); } else { password().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OrcDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OrcDatasetTypeProperties.java index 2376b3b27529e..ac65b24d0270c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OrcDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OrcDatasetTypeProperties.java @@ -81,8 +81,9 @@ public OrcDatasetTypeProperties withOrcCompressionCodec(Object orcCompressionCod */ public void validate() { if (location() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property location in model OrcDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property location in model OrcDatasetTypeProperties")); } else { location().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ParquetDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ParquetDatasetTypeProperties.java index b2f5306a551d3..c1c89660dc77d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ParquetDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ParquetDatasetTypeProperties.java @@ -81,8 +81,9 @@ public ParquetDatasetTypeProperties withCompressionCodec(Object compressionCodec */ public void validate() { if (location() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property location in model ParquetDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property location in model ParquetDatasetTypeProperties")); } else { location().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PaypalLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PaypalLinkedServiceTypeProperties.java index 8b5e549879859..c33d3ce81be99 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PaypalLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PaypalLinkedServiceTypeProperties.java @@ -39,8 +39,7 @@ public final class PaypalLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; @@ -52,8 +51,7 @@ public final class PaypalLinkedServiceTypeProperties { private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -169,8 +167,8 @@ public PaypalLinkedServiceTypeProperties withUseHostVerification(Object useHostV } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -179,8 +177,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the PaypalLinkedServiceTypeProperties object itself. @@ -191,8 +189,8 @@ public PaypalLinkedServiceTypeProperties withUsePeerVerification(Object usePeerV } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -201,8 +199,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the PaypalLinkedServiceTypeProperties object itself. @@ -219,12 +217,14 @@ public PaypalLinkedServiceTypeProperties withEncryptedCredential(String encrypte */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model PaypalLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model PaypalLinkedServiceTypeProperties")); } if (clientId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property clientId in model PaypalLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property clientId in model PaypalLinkedServiceTypeProperties")); } if (clientSecret() != null) { clientSecret().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PhoenixLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PhoenixLinkedServiceTypeProperties.java index 371bfa9c727ed..5d4d1dfe70023 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PhoenixLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PhoenixLinkedServiceTypeProperties.java @@ -28,8 +28,7 @@ public final class PhoenixLinkedServiceTypeProperties { private Object port; /* - * The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value - * is hbasephoenix if using WindowsAzureHDInsightService. + * The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. */ @JsonProperty(value = "httpPath") private Object httpPath; @@ -59,23 +58,19 @@ public final class PhoenixLinkedServiceTypeProperties { private Object enableSsl; /* - * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over - * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file - * installed with the IR. + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* - * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default - * value is false. + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* - * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when - * connecting over SSL. The default value is false. + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ @JsonProperty(value = "allowHostNameCNMismatch") private Object allowHostnameCNMismatch; @@ -87,8 +82,7 @@ public final class PhoenixLinkedServiceTypeProperties { private Object allowSelfSignedServerCert; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -246,9 +240,9 @@ public PhoenixLinkedServiceTypeProperties withEnableSsl(Object enableSsl) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @return the trustedCertPath value. */ @@ -257,9 +251,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @param trustedCertPath the trustedCertPath value to set. * @return the PhoenixLinkedServiceTypeProperties object itself. @@ -336,8 +330,8 @@ public PhoenixLinkedServiceTypeProperties withAllowSelfSignedServerCert(Object a } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -346,8 +340,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the PhoenixLinkedServiceTypeProperties object itself. @@ -364,12 +358,14 @@ public PhoenixLinkedServiceTypeProperties withEncryptedCredential(String encrypt */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model PhoenixLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model PhoenixLinkedServiceTypeProperties")); } if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model PhoenixLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model PhoenixLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineResourceInner.java index 9bb24805b00e2..476ebbafa09bc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineResourceInner.java @@ -351,8 +351,9 @@ public PipelineResourceInner withPolicy(PipelinePolicy policy) { */ public void validate() { if (innerProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerProperties in model PipelineResourceInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerProperties in model PipelineResourceInner")); } else { innerProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineRunsQueryResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineRunsQueryResponseInner.java index 8e9824858dec8..e072cc481be04 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineRunsQueryResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineRunsQueryResponseInner.java @@ -53,8 +53,8 @@ public PipelineRunsQueryResponseInner withValue(List value) { } /** - * Get the continuationToken property: The continuation token for getting the next page of results, if any - * remaining results exist, null otherwise. + * Get the continuationToken property: The continuation token for getting the next page of results, if any remaining + * results exist, null otherwise. * * @return the continuationToken value. */ @@ -63,8 +63,8 @@ public String continuationToken() { } /** - * Set the continuationToken property: The continuation token for getting the next page of results, if any - * remaining results exist, null otherwise. + * Set the continuationToken property: The continuation token for getting the next page of results, if any remaining + * results exist, null otherwise. * * @param continuationToken the continuationToken value to set. * @return the PipelineRunsQueryResponseInner object itself. @@ -81,8 +81,9 @@ public PipelineRunsQueryResponseInner withContinuationToken(String continuationT */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property value in model PipelineRunsQueryResponseInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property value in model PipelineRunsQueryResponseInner")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlLinkedServiceTypeProperties.java index 3215f8412076e..f88d9046ef7b0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlLinkedServiceTypeProperties.java @@ -27,8 +27,7 @@ public final class PostgreSqlLinkedServiceTypeProperties { private AzureKeyVaultSecretReference password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -82,8 +81,8 @@ public PostgreSqlLinkedServiceTypeProperties withPassword(AzureKeyVaultSecretRef } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -92,8 +91,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the PostgreSqlLinkedServiceTypeProperties object itself. @@ -110,8 +109,9 @@ public PostgreSqlLinkedServiceTypeProperties withEncryptedCredential(String encr */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model PostgreSqlLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model PostgreSqlLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlV2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlV2LinkedServiceTypeProperties.java index e9bb6c95133db..fab123ee9bfae 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlV2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlV2LinkedServiceTypeProperties.java @@ -39,8 +39,7 @@ public final class PostgreSqlV2LinkedServiceTypeProperties { private Object database; /* - * SSL mode for connection. Type: integer. 0: disable, 1:allow, 2: prefer, 3: require, 4: verify-ca, 5: - * verify-full. Type: integer. + * SSL mode for connection. Type: integer. 0: disable, 1:allow, 2: prefer, 3: require, 4: verify-ca, 5: verify-full. Type: integer. */ @JsonProperty(value = "sslMode", required = true) private Object sslMode; @@ -58,15 +57,13 @@ public final class PostgreSqlV2LinkedServiceTypeProperties { private Object pooling; /* - * The time to wait (in seconds) while trying to establish a connection before terminating the attempt and - * generating an error. Type: integer. + * The time to wait (in seconds) while trying to establish a connection before terminating the attempt and generating an error. Type: integer. */ @JsonProperty(value = "connectionTimeout") private Object connectionTimeout; /* - * The time to wait (in seconds) while trying to execute a command before terminating the attempt and generating an - * error. Set to zero for infinity. Type: integer. + * The time to wait (in seconds) while trying to execute a command before terminating the attempt and generating an error. Set to zero for infinity. Type: integer. */ @JsonProperty(value = "commandTimeout") private Object commandTimeout; @@ -96,8 +93,7 @@ public final class PostgreSqlV2LinkedServiceTypeProperties { private Object sslPassword; /* - * Determines the size of the internal buffer uses when reading. Increasing may improve performance if transferring - * large values from the database. Type: integer. + * Determines the size of the internal buffer uses when reading. Increasing may improve performance if transferring large values from the database. Type: integer. */ @JsonProperty(value = "readBufferSize") private Object readBufferSize; @@ -127,8 +123,7 @@ public final class PostgreSqlV2LinkedServiceTypeProperties { private AzureKeyVaultSecretReference password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -516,8 +511,8 @@ public PostgreSqlV2LinkedServiceTypeProperties withPassword(AzureKeyVaultSecretR } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -526,8 +521,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the PostgreSqlV2LinkedServiceTypeProperties object itself. @@ -544,20 +539,24 @@ public PostgreSqlV2LinkedServiceTypeProperties withEncryptedCredential(String en */ public void validate() { if (server() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property server in model PostgreSqlV2LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property server in model PostgreSqlV2LinkedServiceTypeProperties")); } if (username() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property username in model PostgreSqlV2LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property username in model PostgreSqlV2LinkedServiceTypeProperties")); } if (database() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property database in model PostgreSqlV2LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property database in model PostgreSqlV2LinkedServiceTypeProperties")); } if (sslMode() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property sslMode in model PostgreSqlV2LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property sslMode in model PostgreSqlV2LinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrestoLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrestoLinkedServiceTypeProperties.java index 089e9254da6e8..1e5f884e27bbb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrestoLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrestoLinkedServiceTypeProperties.java @@ -64,23 +64,19 @@ public final class PrestoLinkedServiceTypeProperties { private Object enableSsl; /* - * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over - * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file - * installed with the IR. + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* - * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default - * value is false. + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* - * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when - * connecting over SSL. The default value is false. + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ @JsonProperty(value = "allowHostNameCNMismatch") private Object allowHostnameCNMismatch; @@ -92,15 +88,13 @@ public final class PrestoLinkedServiceTypeProperties { private Object allowSelfSignedServerCert; /* - * The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone - * Database. The default value is the system time zone. + * The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. */ @JsonProperty(value = "timeZoneID") private Object timeZoneId; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -276,9 +270,9 @@ public PrestoLinkedServiceTypeProperties withEnableSsl(Object enableSsl) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @return the trustedCertPath value. */ @@ -287,9 +281,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @param trustedCertPath the trustedCertPath value to set. * @return the PrestoLinkedServiceTypeProperties object itself. @@ -388,8 +382,8 @@ public PrestoLinkedServiceTypeProperties withTimeZoneId(Object timeZoneId) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -398,8 +392,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the PrestoLinkedServiceTypeProperties object itself. @@ -416,20 +410,24 @@ public PrestoLinkedServiceTypeProperties withEncryptedCredential(String encrypte */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model PrestoLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model PrestoLinkedServiceTypeProperties")); } if (serverVersion() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property serverVersion in model PrestoLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property serverVersion in model PrestoLinkedServiceTypeProperties")); } if (catalog() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property catalog in model PrestoLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property catalog in model PrestoLinkedServiceTypeProperties")); } if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model PrestoLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model PrestoLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrivateLinkResourcesWrapperInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrivateLinkResourcesWrapperInner.java index 0bdf28b5010b9..8a00d9f0cf51c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrivateLinkResourcesWrapperInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrivateLinkResourcesWrapperInner.java @@ -54,8 +54,9 @@ public PrivateLinkResourcesWrapperInner withValue(List valu */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property value in model PrivateLinkResourcesWrapperInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property value in model PrivateLinkResourcesWrapperInner")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickBooksLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickBooksLinkedServiceTypeProperties.java index 906aa935eabbd..e03398a47482f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickBooksLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickBooksLinkedServiceTypeProperties.java @@ -14,8 +14,7 @@ @Fluent public final class QuickBooksLinkedServiceTypeProperties { /* - * Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked - * service. Type: object. + * Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked service. Type: object. */ @JsonProperty(value = "connectionProperties") private Object connectionProperties; @@ -63,8 +62,7 @@ public final class QuickBooksLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -240,8 +238,8 @@ public QuickBooksLinkedServiceTypeProperties withUseEncryptedEndpoints(Object us } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -250,8 +248,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the QuickBooksLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickbaseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickbaseLinkedServiceTypeProperties.java index b01bd3aa828a3..258157b1a7946 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickbaseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickbaseLinkedServiceTypeProperties.java @@ -27,8 +27,7 @@ public final class QuickbaseLinkedServiceTypeProperties { private SecretBase userToken; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -80,8 +79,8 @@ public QuickbaseLinkedServiceTypeProperties withUserToken(SecretBase userToken) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -90,8 +89,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the QuickbaseLinkedServiceTypeProperties object itself. @@ -108,12 +107,14 @@ public QuickbaseLinkedServiceTypeProperties withEncryptedCredential(String encry */ public void validate() { if (url() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property url in model QuickbaseLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property url in model QuickbaseLinkedServiceTypeProperties")); } if (userToken() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property userToken in model QuickbaseLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property userToken in model QuickbaseLinkedServiceTypeProperties")); } else { userToken().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RerunTumblingWindowTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RerunTumblingWindowTriggerTypeProperties.java index b8fe1b549f10d..d9b7edbdfc132 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RerunTumblingWindowTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RerunTumblingWindowTriggerTypeProperties.java @@ -137,16 +137,19 @@ public RerunTumblingWindowTriggerTypeProperties withRerunConcurrency(int rerunCo */ public void validate() { if (parentTrigger() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property parentTrigger in model RerunTumblingWindowTriggerTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property parentTrigger in model RerunTumblingWindowTriggerTypeProperties")); } if (requestedStartTime() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property requestedStartTime in model RerunTumblingWindowTriggerTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property requestedStartTime in model RerunTumblingWindowTriggerTypeProperties")); } if (requestedEndTime() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property requestedEndTime in model RerunTumblingWindowTriggerTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property requestedEndTime in model RerunTumblingWindowTriggerTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ResponsysLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ResponsysLinkedServiceTypeProperties.java index d7b77c10a7edd..e7dd48ae0512c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ResponsysLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ResponsysLinkedServiceTypeProperties.java @@ -27,36 +27,31 @@ public final class ResponsysLinkedServiceTypeProperties { private Object clientId; /* - * The client secret associated with the Responsys application. Type: string (or Expression with resultType - * string). + * The client secret associated with the Responsys application. Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* - * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean - * (or Expression with resultType boolean). + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* - * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - * Type: boolean (or Expression with resultType boolean). + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -178,8 +173,8 @@ public ResponsysLinkedServiceTypeProperties withUseHostVerification(Object useHo } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. Type: boolean (or Expression with resultType boolean). * * @return the usePeerVerification value. */ @@ -188,8 +183,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. Type: boolean (or Expression with resultType boolean). * * @param usePeerVerification the usePeerVerification value to set. * @return the ResponsysLinkedServiceTypeProperties object itself. @@ -200,8 +195,8 @@ public ResponsysLinkedServiceTypeProperties withUsePeerVerification(Object usePe } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -210,8 +205,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ResponsysLinkedServiceTypeProperties object itself. @@ -228,12 +223,14 @@ public ResponsysLinkedServiceTypeProperties withEncryptedCredential(String encry */ public void validate() { if (endpoint() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property endpoint in model ResponsysLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property endpoint in model ResponsysLinkedServiceTypeProperties")); } if (clientId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property clientId in model ResponsysLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property clientId in model ResponsysLinkedServiceTypeProperties")); } if (clientSecret() != null) { clientSecret().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestResourceDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestResourceDatasetTypeProperties.java index a3026f6ad81bd..717c13aacc5ff 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestResourceDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestResourceDatasetTypeProperties.java @@ -15,22 +15,19 @@ @Fluent public final class RestResourceDatasetTypeProperties { /* - * The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType - * string). + * The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). */ @JsonProperty(value = "relativeUrl") private Object relativeUrl; /* - * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType - * string). + * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). */ @JsonProperty(value = "requestMethod") private Object requestMethod; /* - * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType - * string). + * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). */ @JsonProperty(value = "requestBody") private Object requestBody; @@ -100,8 +97,8 @@ public RestResourceDatasetTypeProperties withRequestMethod(Object requestMethod) } /** - * Get the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string - * (or Expression with resultType string). + * Get the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or + * Expression with resultType string). * * @return the requestBody value. */ @@ -110,8 +107,8 @@ public Object requestBody() { } /** - * Set the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string - * (or Expression with resultType string). + * Set the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or + * Expression with resultType string). * * @param requestBody the requestBody value to set. * @return the RestResourceDatasetTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestServiceLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestServiceLinkedServiceTypeProperties.java index 19297b8f03a82..930afb2da08ec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestServiceLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestServiceLinkedServiceTypeProperties.java @@ -23,8 +23,7 @@ public final class RestServiceLinkedServiceTypeProperties { private Object url; /* - * Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: - * boolean (or Expression with resultType boolean). + * Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "enableServerCertificateValidation") private Object enableServerCertificateValidation; @@ -48,15 +47,13 @@ public final class RestServiceLinkedServiceTypeProperties { private SecretBase password; /* - * The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression - * with resultType object). + * The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). */ @JsonProperty(value = "authHeaders") private Object authHeaders; /* - * The application's client ID used in AadServicePrincipal authentication type. Type: string (or Expression with - * resultType string). + * The application's client ID used in AadServicePrincipal authentication type. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; @@ -68,16 +65,13 @@ public final class RestServiceLinkedServiceTypeProperties { private SecretBase servicePrincipalKey; /* - * The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which - * your application resides. Type: string (or Expression with resultType string). + * The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, - * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or - * Expression with resultType string). + * Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ @JsonProperty(value = "azureCloudType") private Object azureCloudType; @@ -89,8 +83,7 @@ public final class RestServiceLinkedServiceTypeProperties { private Object aadResourceId; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -114,22 +107,19 @@ public final class RestServiceLinkedServiceTypeProperties { private SecretBase clientSecret; /* - * The token endpoint of the authorization server to acquire access token. Type: string (or Expression with - * resultType string). + * The token endpoint of the authorization server to acquire access token. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tokenEndpoint") private Object tokenEndpoint; /* - * The target service or resource to which the access will be requested. Type: string (or Expression with - * resultType string). + * The target service or resource to which the access will be requested. Type: string (or Expression with resultType string). */ @JsonProperty(value = "resource") private Object resource; /* - * The scope of the access required. It describes what kind of access will be requested. Type: string (or - * Expression with resultType string). + * The scope of the access required. It describes what kind of access will be requested. Type: string (or Expression with resultType string). */ @JsonProperty(value = "scope") private Object scope; @@ -269,8 +259,8 @@ public RestServiceLinkedServiceTypeProperties withAuthHeaders(Object authHeaders } /** - * Get the servicePrincipalId property: The application's client ID used in AadServicePrincipal authentication - * type. Type: string (or Expression with resultType string). + * Get the servicePrincipalId property: The application's client ID used in AadServicePrincipal authentication type. + * Type: string (or Expression with resultType string). * * @return the servicePrincipalId value. */ @@ -279,8 +269,8 @@ public Object servicePrincipalId() { } /** - * Set the servicePrincipalId property: The application's client ID used in AadServicePrincipal authentication - * type. Type: string (or Expression with resultType string). + * Set the servicePrincipalId property: The application's client ID used in AadServicePrincipal authentication type. + * Type: string (or Expression with resultType string). * * @param servicePrincipalId the servicePrincipalId value to set. * @return the RestServiceLinkedServiceTypeProperties object itself. @@ -333,9 +323,9 @@ public RestServiceLinkedServiceTypeProperties withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -344,9 +334,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the RestServiceLinkedServiceTypeProperties object itself. @@ -357,8 +347,8 @@ public RestServiceLinkedServiceTypeProperties withAzureCloudType(Object azureClo } /** - * Get the aadResourceId property: The resource you are requesting authorization to use. Type: string (or - * Expression with resultType string). + * Get the aadResourceId property: The resource you are requesting authorization to use. Type: string (or Expression + * with resultType string). * * @return the aadResourceId value. */ @@ -367,8 +357,8 @@ public Object aadResourceId() { } /** - * Set the aadResourceId property: The resource you are requesting authorization to use. Type: string (or - * Expression with resultType string). + * Set the aadResourceId property: The resource you are requesting authorization to use. Type: string (or Expression + * with resultType string). * * @param aadResourceId the aadResourceId value to set. * @return the RestServiceLinkedServiceTypeProperties object itself. @@ -379,8 +369,8 @@ public RestServiceLinkedServiceTypeProperties withAadResourceId(Object aadResour } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -389,8 +379,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the RestServiceLinkedServiceTypeProperties object itself. @@ -485,8 +475,8 @@ public RestServiceLinkedServiceTypeProperties withTokenEndpoint(Object tokenEndp } /** - * Get the resource property: The target service or resource to which the access will be requested. Type: string - * (or Expression with resultType string). + * Get the resource property: The target service or resource to which the access will be requested. Type: string (or + * Expression with resultType string). * * @return the resource value. */ @@ -495,8 +485,8 @@ public Object resource() { } /** - * Set the resource property: The target service or resource to which the access will be requested. Type: string - * (or Expression with resultType string). + * Set the resource property: The target service or resource to which the access will be requested. Type: string (or + * Expression with resultType string). * * @param resource the resource value to set. * @return the RestServiceLinkedServiceTypeProperties object itself. @@ -535,12 +525,14 @@ public RestServiceLinkedServiceTypeProperties withScope(Object scope) { */ public void validate() { if (url() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property url in model RestServiceLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property url in model RestServiceLinkedServiceTypeProperties")); } if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model RestServiceLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model RestServiceLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceLinkedServiceTypeProperties.java index cb6fe73056a55..7cec1a2ed71f7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceLinkedServiceTypeProperties.java @@ -14,16 +14,13 @@ @Fluent public final class SalesforceLinkedServiceTypeProperties { /* - * The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify - * 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, - * 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + * The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */ @JsonProperty(value = "environmentUrl") private Object environmentUrl; /* - * The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType - * string). + * The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). */ @JsonProperty(value = "username") private Object username; @@ -47,8 +44,7 @@ public final class SalesforceLinkedServiceTypeProperties { private Object apiVersion; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -168,8 +164,8 @@ public SalesforceLinkedServiceTypeProperties withApiVersion(Object apiVersion) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -178,8 +174,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SalesforceLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceMarketingCloudLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceMarketingCloudLinkedServiceTypeProperties.java index d140d9dec8211..98ac26a97af60 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceMarketingCloudLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceMarketingCloudLinkedServiceTypeProperties.java @@ -14,50 +14,43 @@ @Fluent public final class SalesforceMarketingCloudLinkedServiceTypeProperties { /* - * Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in - * the linked service. Type: object. + * Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. */ @JsonProperty(value = "connectionProperties") private Object connectionProperties; /* - * The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with - * resultType string). + * The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientId") private Object clientId; /* - * The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with - * resultType string). + * The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* - * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean - * (or Expression with resultType boolean). + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* - * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - * Type: boolean (or Expression with resultType boolean). + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -91,8 +84,8 @@ public SalesforceMarketingCloudLinkedServiceTypeProperties withConnectionPropert } /** - * Get the clientId property: The client ID associated with the Salesforce Marketing Cloud application. Type: - * string (or Expression with resultType string). + * Get the clientId property: The client ID associated with the Salesforce Marketing Cloud application. Type: string + * (or Expression with resultType string). * * @return the clientId value. */ @@ -101,8 +94,8 @@ public Object clientId() { } /** - * Set the clientId property: The client ID associated with the Salesforce Marketing Cloud application. Type: - * string (or Expression with resultType string). + * Set the clientId property: The client ID associated with the Salesforce Marketing Cloud application. Type: string + * (or Expression with resultType string). * * @param clientId the clientId value to set. * @return the SalesforceMarketingCloudLinkedServiceTypeProperties object itself. @@ -181,8 +174,8 @@ public SalesforceMarketingCloudLinkedServiceTypeProperties withUseHostVerificati } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. Type: boolean (or Expression with resultType boolean). * * @return the usePeerVerification value. */ @@ -191,8 +184,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. Type: boolean (or Expression with resultType boolean). * * @param usePeerVerification the usePeerVerification value to set. * @return the SalesforceMarketingCloudLinkedServiceTypeProperties object itself. @@ -203,8 +196,8 @@ public SalesforceMarketingCloudLinkedServiceTypeProperties withUsePeerVerificati } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -213,8 +206,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SalesforceMarketingCloudLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudLinkedServiceTypeProperties.java index fcb1a95088048..5665175e9a5a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudLinkedServiceTypeProperties.java @@ -14,16 +14,13 @@ @Fluent public final class SalesforceServiceCloudLinkedServiceTypeProperties { /* - * The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from - * sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, - * 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + * The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */ @JsonProperty(value = "environmentUrl") private Object environmentUrl; /* - * The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType - * string). + * The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). */ @JsonProperty(value = "username") private Object username; @@ -53,8 +50,7 @@ public final class SalesforceServiceCloudLinkedServiceTypeProperties { private Object extendedProperties; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -198,8 +194,8 @@ public SalesforceServiceCloudLinkedServiceTypeProperties withExtendedProperties( } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -208,8 +204,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SalesforceServiceCloudLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2LinkedServiceTypeProperties.java index d6ae25e945b5d..c5a16d351d22b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2LinkedServiceTypeProperties.java @@ -14,22 +14,19 @@ @Fluent public final class SalesforceServiceCloudV2LinkedServiceTypeProperties { /* - * The URL of Salesforce Service Cloud instance. For example, 'https://[domain].my.salesforce.com'. Type: string - * (or Expression with resultType string). + * The URL of Salesforce Service Cloud instance. For example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */ @JsonProperty(value = "environmentUrl") private Object environmentUrl; /* - * The authentication type to be used to connect to the Salesforce. Currently, we only support - * OAuth2ClientCredentials, it is also the default value + * The authentication type to be used to connect to the Salesforce. Currently, we only support OAuth2ClientCredentials, it is also the default value */ @JsonProperty(value = "authenticationType") private Object authenticationType; /* - * The client Id for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. Type: string (or - * Expression with resultType string). + * The client Id for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientId") private Object clientId; @@ -41,15 +38,13 @@ public final class SalesforceServiceCloudV2LinkedServiceTypeProperties { private SecretBase clientSecret; /* - * The Salesforce API version used in ADF. The version must be larger than or equal to 47.0 which is required by - * Salesforce BULK API 2.0. Type: string (or Expression with resultType string). + * The Salesforce API version used in ADF. The version must be larger than or equal to 47.0 which is required by Salesforce BULK API 2.0. Type: string (or Expression with resultType string). */ @JsonProperty(value = "apiVersion") private Object apiVersion; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -171,8 +166,8 @@ public SalesforceServiceCloudV2LinkedServiceTypeProperties withApiVersion(Object } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -181,8 +176,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SalesforceServiceCloudV2LinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2ObjectDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2ObjectDatasetTypeProperties.java index 2a42888c8a9b0..c2e07084f0202 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2ObjectDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2ObjectDatasetTypeProperties.java @@ -31,8 +31,8 @@ public SalesforceServiceCloudV2ObjectDatasetTypeProperties() { } /** - * Get the objectApiName property: The Salesforce Service Cloud V2 object API name. Type: string (or Expression - * with resultType string). + * Get the objectApiName property: The Salesforce Service Cloud V2 object API name. Type: string (or Expression with + * resultType string). * * @return the objectApiName value. */ @@ -41,8 +41,8 @@ public Object objectApiName() { } /** - * Set the objectApiName property: The Salesforce Service Cloud V2 object API name. Type: string (or Expression - * with resultType string). + * Set the objectApiName property: The Salesforce Service Cloud V2 object API name. Type: string (or Expression with + * resultType string). * * @param objectApiName the objectApiName value to set. * @return the SalesforceServiceCloudV2ObjectDatasetTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceV2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceV2LinkedServiceTypeProperties.java index 8f6445f2fbc61..7f7f00dce8278 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceV2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceV2LinkedServiceTypeProperties.java @@ -14,22 +14,19 @@ @Fluent public final class SalesforceV2LinkedServiceTypeProperties { /* - * The URL of Salesforce instance. For example, 'https://[domain].my.salesforce.com'. Type: string (or Expression - * with resultType string). + * The URL of Salesforce instance. For example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */ @JsonProperty(value = "environmentUrl") private Object environmentUrl; /* - * The authentication type to be used to connect to the Salesforce. Currently, we only support - * OAuth2ClientCredentials, it is also the default value + * The authentication type to be used to connect to the Salesforce. Currently, we only support OAuth2ClientCredentials, it is also the default value */ @JsonProperty(value = "authenticationType") private Object authenticationType; /* - * The client Id for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. Type: string (or - * Expression with resultType string). + * The client Id for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientId") private Object clientId; @@ -41,15 +38,13 @@ public final class SalesforceV2LinkedServiceTypeProperties { private SecretBase clientSecret; /* - * The Salesforce API version used in ADF. The version must be larger than or equal to 47.0 which is required by - * Salesforce BULK API 2.0. Type: string (or Expression with resultType string). + * The Salesforce API version used in ADF. The version must be larger than or equal to 47.0 which is required by Salesforce BULK API 2.0. Type: string (or Expression with resultType string). */ @JsonProperty(value = "apiVersion") private Object apiVersion; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -171,8 +166,8 @@ public SalesforceV2LinkedServiceTypeProperties withApiVersion(Object apiVersion) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -181,8 +176,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SalesforceV2LinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapBWLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapBWLinkedServiceTypeProperties.java index f7810fb4c2470..8a8e3d7d8c9d5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapBWLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapBWLinkedServiceTypeProperties.java @@ -21,15 +21,13 @@ public final class SapBWLinkedServiceTypeProperties { private Object server; /* - * System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or - * Expression with resultType string). + * System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */ @JsonProperty(value = "systemNumber", required = true) private Object systemNumber; /* - * Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: - * string (or Expression with resultType string). + * Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientId", required = true) private Object clientId; @@ -47,8 +45,7 @@ public final class SapBWLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -80,8 +77,8 @@ public SapBWLinkedServiceTypeProperties withServer(Object server) { } /** - * Get the systemNumber property: System number of the BW system. (Usually a two-digit decimal number represented - * as a string.) Type: string (or Expression with resultType string). + * Get the systemNumber property: System number of the BW system. (Usually a two-digit decimal number represented as + * a string.) Type: string (or Expression with resultType string). * * @return the systemNumber value. */ @@ -90,8 +87,8 @@ public Object systemNumber() { } /** - * Set the systemNumber property: System number of the BW system. (Usually a two-digit decimal number represented - * as a string.) Type: string (or Expression with resultType string). + * Set the systemNumber property: System number of the BW system. (Usually a two-digit decimal number represented as + * a string.) Type: string (or Expression with resultType string). * * @param systemNumber the systemNumber value to set. * @return the SapBWLinkedServiceTypeProperties object itself. @@ -166,8 +163,8 @@ public SapBWLinkedServiceTypeProperties withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -176,8 +173,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapBWLinkedServiceTypeProperties object itself. @@ -194,16 +191,19 @@ public SapBWLinkedServiceTypeProperties withEncryptedCredential(String encrypted */ public void validate() { if (server() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property server in model SapBWLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property server in model SapBWLinkedServiceTypeProperties")); } if (systemNumber() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property systemNumber in model SapBWLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property systemNumber in model SapBWLinkedServiceTypeProperties")); } if (clientId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property clientId in model SapBWLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property clientId in model SapBWLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerLinkedServiceTypeProperties.java index 1af35aa03158c..ef8a36ced6e12 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerLinkedServiceTypeProperties.java @@ -15,8 +15,7 @@ @Fluent public final class SapCloudForCustomerLinkedServiceTypeProperties { /* - * The URL of SAP Cloud for Customer OData API. For example, - * '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). + * The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). */ @JsonProperty(value = "url", required = true) private Object url; @@ -34,8 +33,7 @@ public final class SapCloudForCustomerLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Either encryptedCredential or username/password must be provided. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -111,9 +109,9 @@ public SapCloudForCustomerLinkedServiceTypeProperties withPassword(SecretBase pa } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must - * be provided. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Either encryptedCredential or username/password must be + * provided. Type: string. * * @return the encryptedCredential value. */ @@ -122,9 +120,9 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must - * be provided. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Either encryptedCredential or username/password must be + * provided. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapCloudForCustomerLinkedServiceTypeProperties object itself. @@ -141,8 +139,9 @@ public SapCloudForCustomerLinkedServiceTypeProperties withEncryptedCredential(St */ public void validate() { if (url() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property url in model SapCloudForCustomerLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property url in model SapCloudForCustomerLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerResourceDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerResourceDatasetTypeProperties.java index dcbf5f602b9d8..51544a13e5784 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerResourceDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerResourceDatasetTypeProperties.java @@ -54,8 +54,9 @@ public SapCloudForCustomerResourceDatasetTypeProperties withPath(Object path) { */ public void validate() { if (path() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property path in model SapCloudForCustomerResourceDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property path in model SapCloudForCustomerResourceDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccLinkedServiceTypeProperties.java index d5b88a3d95f05..3957634e9793d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccLinkedServiceTypeProperties.java @@ -15,8 +15,7 @@ @Fluent public final class SapEccLinkedServiceTypeProperties { /* - * The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: - * string (or Expression with resultType string). + * The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). */ @JsonProperty(value = "url", required = true) private Object url; @@ -34,8 +33,7 @@ public final class SapEccLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Either encryptedCredential or username/password must be provided. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -111,9 +109,9 @@ public SapEccLinkedServiceTypeProperties withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must - * be provided. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Either encryptedCredential or username/password must be + * provided. Type: string. * * @return the encryptedCredential value. */ @@ -122,9 +120,9 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must - * be provided. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Either encryptedCredential or username/password must be + * provided. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapEccLinkedServiceTypeProperties object itself. @@ -141,8 +139,9 @@ public SapEccLinkedServiceTypeProperties withEncryptedCredential(String encrypte */ public void validate() { if (url() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property url in model SapEccLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property url in model SapEccLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccResourceDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccResourceDatasetTypeProperties.java index abb97662aff29..ab07d8c8d4da0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccResourceDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccResourceDatasetTypeProperties.java @@ -26,8 +26,7 @@ public SapEccResourceDatasetTypeProperties() { } /** - * Get the path property: The path of the SAP ECC OData entity. Type: string (or Expression with resultType - * string). + * Get the path property: The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). * * @return the path value. */ @@ -36,8 +35,7 @@ public Object path() { } /** - * Set the path property: The path of the SAP ECC OData entity. Type: string (or Expression with resultType - * string). + * Set the path property: The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). * * @param path the path value to set. * @return the SapEccResourceDatasetTypeProperties object itself. @@ -54,8 +52,9 @@ public SapEccResourceDatasetTypeProperties withPath(Object path) { */ public void validate() { if (path() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property path in model SapEccResourceDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property path in model SapEccResourceDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapHanaLinkedServiceProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapHanaLinkedServiceProperties.java index 7274ee288f803..398687c5cdc58 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapHanaLinkedServiceProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapHanaLinkedServiceProperties.java @@ -45,8 +45,7 @@ public final class SapHanaLinkedServiceProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -162,8 +161,8 @@ public SapHanaLinkedServiceProperties withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -172,8 +171,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapHanaLinkedServiceProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpLinkedServiceTypeProperties.java index 91c97ee336482..5340655e9f7c7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpLinkedServiceTypeProperties.java @@ -20,22 +20,19 @@ public final class SapOdpLinkedServiceTypeProperties { private Object server; /* - * System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a - * string.) Type: string (or Expression with resultType string). + * System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */ @JsonProperty(value = "systemNumber") private Object systemNumber; /* - * Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number - * represented as a string) Type: string (or Expression with resultType string). + * Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientId") private Object clientId; /* - * Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with - * resultType string). + * Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). */ @JsonProperty(value = "language") private Object language; @@ -47,8 +44,7 @@ public final class SapOdpLinkedServiceTypeProperties { private Object systemId; /* - * Username to access the SAP server where the table is located. Type: string (or Expression with resultType - * string). + * Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ @JsonProperty(value = "userName") private Object username; @@ -72,36 +68,31 @@ public final class SapOdpLinkedServiceTypeProperties { private Object messageServerService; /* - * SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). - * Type: string (or Expression with resultType string). + * SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). */ @JsonProperty(value = "sncMode") private Object sncMode; /* - * Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with - * resultType string). + * Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sncMyName") private Object sncMyName; /* - * Communication partner's SNC name to access the SAP server where the table is located. Type: string (or - * Expression with resultType string). + * Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sncPartnerName") private Object sncPartnerName; /* - * External security product's library to access the SAP server where the table is located. Type: string (or - * Expression with resultType string). + * External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sncLibraryPath") private Object sncLibraryPath; /* - * SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType - * string). + * SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sncQop") private Object sncQop; @@ -125,8 +116,7 @@ public final class SapOdpLinkedServiceTypeProperties { private Object subscriberName; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -444,8 +434,8 @@ public SapOdpLinkedServiceTypeProperties withSncQop(Object sncQop) { } /** - * Get the x509CertificatePath property: SNC X509 certificate file path. Type: string (or Expression with - * resultType string). + * Get the x509CertificatePath property: SNC X509 certificate file path. Type: string (or Expression with resultType + * string). * * @return the x509CertificatePath value. */ @@ -454,8 +444,8 @@ public Object x509CertificatePath() { } /** - * Set the x509CertificatePath property: SNC X509 certificate file path. Type: string (or Expression with - * resultType string). + * Set the x509CertificatePath property: SNC X509 certificate file path. Type: string (or Expression with resultType + * string). * * @param x509CertificatePath the x509CertificatePath value to set. * @return the SapOdpLinkedServiceTypeProperties object itself. @@ -508,8 +498,8 @@ public SapOdpLinkedServiceTypeProperties withSubscriberName(Object subscriberNam } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -518,8 +508,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapOdpLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpResourceDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpResourceDatasetTypeProperties.java index c512d213b4429..ae80989598a6c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpResourceDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpResourceDatasetTypeProperties.java @@ -32,8 +32,7 @@ public SapOdpResourceDatasetTypeProperties() { } /** - * Get the context property: The context of the SAP ODP Object. Type: string (or Expression with resultType - * string). + * Get the context property: The context of the SAP ODP Object. Type: string (or Expression with resultType string). * * @return the context value. */ @@ -42,8 +41,7 @@ public Object context() { } /** - * Set the context property: The context of the SAP ODP Object. Type: string (or Expression with resultType - * string). + * Set the context property: The context of the SAP ODP Object. Type: string (or Expression with resultType string). * * @param context the context value to set. * @return the SapOdpResourceDatasetTypeProperties object itself. @@ -54,8 +52,7 @@ public SapOdpResourceDatasetTypeProperties withContext(Object context) { } /** - * Get the objectName property: The name of the SAP ODP Object. Type: string (or Expression with resultType - * string). + * Get the objectName property: The name of the SAP ODP Object. Type: string (or Expression with resultType string). * * @return the objectName value. */ @@ -64,8 +61,7 @@ public Object objectName() { } /** - * Set the objectName property: The name of the SAP ODP Object. Type: string (or Expression with resultType - * string). + * Set the objectName property: The name of the SAP ODP Object. Type: string (or Expression with resultType string). * * @param objectName the objectName value to set. * @return the SapOdpResourceDatasetTypeProperties object itself. @@ -82,12 +78,14 @@ public SapOdpResourceDatasetTypeProperties withObjectName(Object objectName) { */ public void validate() { if (context() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property context in model SapOdpResourceDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property context in model SapOdpResourceDatasetTypeProperties")); } if (objectName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property objectName in model SapOdpResourceDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property objectName in model SapOdpResourceDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubLinkedServiceTypeProperties.java index 7ed637c50eb4e..85bb3132ca9f8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubLinkedServiceTypeProperties.java @@ -14,29 +14,25 @@ @Fluent public final class SapOpenHubLinkedServiceTypeProperties { /* - * Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with - * resultType string). + * Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with resultType string). */ @JsonProperty(value = "server") private Object server; /* - * System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number - * represented as a string.) Type: string (or Expression with resultType string). + * System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */ @JsonProperty(value = "systemNumber") private Object systemNumber; /* - * Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit - * decimal number represented as a string) Type: string (or Expression with resultType string). + * Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientId") private Object clientId; /* - * Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or - * Expression with resultType string). + * Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). */ @JsonProperty(value = "language") private Object language; @@ -48,8 +44,7 @@ public final class SapOpenHubLinkedServiceTypeProperties { private Object systemId; /* - * Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with - * resultType string). + * Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). */ @JsonProperty(value = "userName") private Object username; @@ -79,8 +74,7 @@ public final class SapOpenHubLinkedServiceTypeProperties { private Object logonGroup; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -92,8 +86,8 @@ public SapOpenHubLinkedServiceTypeProperties() { } /** - * Get the server property: Host name of the SAP BW instance where the open hub destination is located. Type: - * string (or Expression with resultType string). + * Get the server property: Host name of the SAP BW instance where the open hub destination is located. Type: string + * (or Expression with resultType string). * * @return the server value. */ @@ -102,8 +96,8 @@ public Object server() { } /** - * Set the server property: Host name of the SAP BW instance where the open hub destination is located. Type: - * string (or Expression with resultType string). + * Set the server property: Host name of the SAP BW instance where the open hub destination is located. Type: string + * (or Expression with resultType string). * * @param server the server value to set. * @return the SapOpenHubLinkedServiceTypeProperties object itself. @@ -114,9 +108,8 @@ public SapOpenHubLinkedServiceTypeProperties withServer(Object server) { } /** - * Get the systemNumber property: System number of the BW system where the open hub destination is located. - * (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType - * string). + * Get the systemNumber property: System number of the BW system where the open hub destination is located. (Usually + * a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). * * @return the systemNumber value. */ @@ -125,9 +118,8 @@ public Object systemNumber() { } /** - * Set the systemNumber property: System number of the BW system where the open hub destination is located. - * (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType - * string). + * Set the systemNumber property: System number of the BW system where the open hub destination is located. (Usually + * a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). * * @param systemNumber the systemNumber value to set. * @return the SapOpenHubLinkedServiceTypeProperties object itself. @@ -162,8 +154,8 @@ public SapOpenHubLinkedServiceTypeProperties withClientId(Object clientId) { } /** - * Get the language property: Language of the BW system where the open hub destination is located. The default - * value is EN. Type: string (or Expression with resultType string). + * Get the language property: Language of the BW system where the open hub destination is located. The default value + * is EN. Type: string (or Expression with resultType string). * * @return the language value. */ @@ -172,8 +164,8 @@ public Object language() { } /** - * Set the language property: Language of the BW system where the open hub destination is located. The default - * value is EN. Type: string (or Expression with resultType string). + * Set the language property: Language of the BW system where the open hub destination is located. The default value + * is EN. Type: string (or Expression with resultType string). * * @param language the language value to set. * @return the SapOpenHubLinkedServiceTypeProperties object itself. @@ -314,8 +306,8 @@ public SapOpenHubLinkedServiceTypeProperties withLogonGroup(Object logonGroup) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -324,8 +316,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapOpenHubLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubTableDatasetTypeProperties.java index fcf9d2833e8a0..de27089d33f2f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubTableDatasetTypeProperties.java @@ -14,22 +14,19 @@ @Fluent public final class SapOpenHubTableDatasetTypeProperties { /* - * The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with - * resultType string). + * The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). */ @JsonProperty(value = "openHubDestinationName", required = true) private Object openHubDestinationName; /* - * Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with - * resultType boolean). + * Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "excludeLastRequest") private Object excludeLastRequest; /* - * The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this - * property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). + * The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). */ @JsonProperty(value = "baseRequestId") private Object baseRequestId; @@ -115,8 +112,9 @@ public SapOpenHubTableDatasetTypeProperties withBaseRequestId(Object baseRequest */ public void validate() { if (openHubDestinationName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property openHubDestinationName in model SapOpenHubTableDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property openHubDestinationName in model SapOpenHubTableDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableLinkedServiceTypeProperties.java index 55faa5f133349..14e711b1adb1e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableLinkedServiceTypeProperties.java @@ -20,22 +20,19 @@ public final class SapTableLinkedServiceTypeProperties { private Object server; /* - * System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a - * string.) Type: string (or Expression with resultType string). + * System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */ @JsonProperty(value = "systemNumber") private Object systemNumber; /* - * Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number - * represented as a string) Type: string (or Expression with resultType string). + * Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */ @JsonProperty(value = "clientId") private Object clientId; /* - * Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with - * resultType string). + * Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). */ @JsonProperty(value = "language") private Object language; @@ -47,8 +44,7 @@ public final class SapTableLinkedServiceTypeProperties { private Object systemId; /* - * Username to access the SAP server where the table is located. Type: string (or Expression with resultType - * string). + * Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ @JsonProperty(value = "userName") private Object username; @@ -72,36 +68,31 @@ public final class SapTableLinkedServiceTypeProperties { private Object messageServerService; /* - * SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). - * Type: string (or Expression with resultType string). + * SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). */ @JsonProperty(value = "sncMode") private Object sncMode; /* - * Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with - * resultType string). + * Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sncMyName") private Object sncMyName; /* - * Communication partner's SNC name to access the SAP server where the table is located. Type: string (or - * Expression with resultType string). + * Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sncPartnerName") private Object sncPartnerName; /* - * External security product's library to access the SAP server where the table is located. Type: string (or - * Expression with resultType string). + * External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sncLibraryPath") private Object sncLibraryPath; /* - * SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType - * string). + * SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sncQop") private Object sncQop; @@ -113,8 +104,7 @@ public final class SapTableLinkedServiceTypeProperties { private Object logonGroup; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -454,8 +444,8 @@ public SapTableLinkedServiceTypeProperties withLogonGroup(Object logonGroup) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -464,8 +454,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapTableLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableResourceDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableResourceDatasetTypeProperties.java index 9175e722625a0..900cbdc0acc8d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableResourceDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableResourceDatasetTypeProperties.java @@ -52,8 +52,9 @@ public SapTableResourceDatasetTypeProperties withTableName(Object tableName) { */ public void validate() { if (tableName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property tableName in model SapTableResourceDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property tableName in model SapTableResourceDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScheduleTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScheduleTriggerTypeProperties.java index 7a44f51ad0b93..ea90aabc7e03e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScheduleTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScheduleTriggerTypeProperties.java @@ -53,8 +53,9 @@ public ScheduleTriggerTypeProperties withRecurrence(ScheduleTriggerRecurrence re */ public void validate() { if (recurrence() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property recurrence in model ScheduleTriggerTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property recurrence in model ScheduleTriggerTypeProperties")); } else { recurrence().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScriptActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScriptActivityTypeProperties.java index 57719cbc8ecee..1ce30968d0c51 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScriptActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScriptActivityTypeProperties.java @@ -16,8 +16,7 @@ @Fluent public final class ScriptActivityTypeProperties { /* - * ScriptBlock execution timeout. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * ScriptBlock execution timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "scriptBlockExecutionTimeout") private Object scriptBlockExecutionTimeout; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeNodeInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeNodeInner.java index bb9ad2d41b00f..5f35796e77e19 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeNodeInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeNodeInner.java @@ -279,8 +279,8 @@ public OffsetDateTime lastEndUpdateTime() { } /** - * Get the isActiveDispatcher property: Indicates whether this node is the active dispatcher for integration - * runtime requests. + * Get the isActiveDispatcher property: Indicates whether this node is the active dispatcher for integration runtime + * requests. * * @return the isActiveDispatcher value. */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeStatusTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeStatusTypeProperties.java index 5f0ec2e896704..13d9c4dd403b9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeStatusTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeStatusTypeProperties.java @@ -32,8 +32,7 @@ public final class SelfHostedIntegrationRuntimeStatusTypeProperties { private String taskQueueId; /* - * It is used to set the encryption mode for node-node communication channel (when more than 2 self-hosted - * integration runtime nodes exist). + * It is used to set the encryption mode for node-node communication channel (when more than 2 self-hosted integration runtime nodes exist). */ @JsonProperty(value = "internalChannelEncryption", access = JsonProperty.Access.WRITE_ONLY) private IntegrationRuntimeInternalChannelEncryptionMode internalChannelEncryption; @@ -118,8 +117,7 @@ public final class SelfHostedIntegrationRuntimeStatusTypeProperties { private OffsetDateTime autoUpdateEta; /* - * An alternative option to ensure interactive authoring function when your self-hosted integration runtime is - * unable to establish a connection with Azure Relay. + * An alternative option to ensure interactive authoring function when your self-hosted integration runtime is unable to establish a connection with Azure Relay. */ @JsonProperty(value = "selfContainedInteractiveAuthoringEnabled", access = JsonProperty.Access.WRITE_ONLY) private Boolean selfContainedInteractiveAuthoringEnabled; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeTypeProperties.java index 604f717396561..183fc8d012956 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeTypeProperties.java @@ -20,8 +20,7 @@ public final class SelfHostedIntegrationRuntimeTypeProperties { private LinkedIntegrationRuntimeType linkedInfo; /* - * An alternative option to ensure interactive authoring function when your self-hosted integration runtime is - * unable to establish a connection with Azure Relay. + * An alternative option to ensure interactive authoring function when your self-hosted integration runtime is unable to establish a connection with Azure Relay. */ @JsonProperty(value = "selfContainedInteractiveAuthoringEnabled") private Boolean selfContainedInteractiveAuthoringEnabled; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowLinkedServiceTypeProperties.java index fa7a2d218acf5..9d3e447ffb514 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowLinkedServiceTypeProperties.java @@ -58,8 +58,7 @@ public final class ServiceNowLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; @@ -71,8 +70,7 @@ public final class ServiceNowLinkedServiceTypeProperties { private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -251,8 +249,8 @@ public ServiceNowLinkedServiceTypeProperties withUseHostVerification(Object useH } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -261,8 +259,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the ServiceNowLinkedServiceTypeProperties object itself. @@ -273,8 +271,8 @@ public ServiceNowLinkedServiceTypeProperties withUsePeerVerification(Object useP } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -283,8 +281,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ServiceNowLinkedServiceTypeProperties object itself. @@ -301,12 +299,14 @@ public ServiceNowLinkedServiceTypeProperties withEncryptedCredential(String encr */ public void validate() { if (endpoint() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property endpoint in model ServiceNowLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property endpoint in model ServiceNowLinkedServiceTypeProperties")); } if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model ServiceNowLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model ServiceNowLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowV2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowV2LinkedServiceTypeProperties.java index e9a1009ba0977..20a91dcbc3ecd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowV2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowV2LinkedServiceTypeProperties.java @@ -58,8 +58,7 @@ public final class ServiceNowV2LinkedServiceTypeProperties { private Object grantType; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -214,8 +213,8 @@ public ServiceNowV2LinkedServiceTypeProperties withGrantType(Object grantType) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -224,8 +223,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ServiceNowV2LinkedServiceTypeProperties object itself. @@ -242,12 +241,14 @@ public ServiceNowV2LinkedServiceTypeProperties withEncryptedCredential(String en */ public void validate() { if (endpoint() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property endpoint in model ServiceNowV2LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property endpoint in model ServiceNowV2LinkedServiceTypeProperties")); } if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model ServiceNowV2LinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model ServiceNowV2LinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SftpServerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SftpServerLinkedServiceTypeProperties.java index bed9371b14d72..91cc202f0f23b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SftpServerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SftpServerLinkedServiceTypeProperties.java @@ -22,8 +22,7 @@ public final class SftpServerLinkedServiceTypeProperties { private Object host; /* - * The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: - * integer (or Expression with resultType integer), minimum: 0. + * The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. */ @JsonProperty(value = "port") private Object port; @@ -47,24 +46,19 @@ public final class SftpServerLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* - * The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises - * copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH - * private key should be OpenSSH format. Type: string (or Expression with resultType string). + * The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). */ @JsonProperty(value = "privateKeyPath") private Object privateKeyPath; /* - * Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey - * authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be - * OpenSSH format. + * Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. */ @JsonProperty(value = "privateKeyContent") private SecretBase privateKeyContent; @@ -76,15 +70,13 @@ public final class SftpServerLinkedServiceTypeProperties { private SecretBase passPhrase; /* - * If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType - * boolean). + * If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "skipHostKeyValidation") private Object skipHostKeyValidation; /* - * The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be - * specified. Type: string (or Expression with resultType string). + * The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). */ @JsonProperty(value = "hostKeyFingerprint") private Object hostKeyFingerprint; @@ -200,8 +192,8 @@ public SftpServerLinkedServiceTypeProperties withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -210,8 +202,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SftpServerLinkedServiceTypeProperties object itself. @@ -224,8 +216,8 @@ public SftpServerLinkedServiceTypeProperties withEncryptedCredential(String encr /** * Get the privateKeyPath property: The SSH private key file path for SshPublicKey authentication. Only valid for * on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or - * PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression - * with resultType string). + * PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with + * resultType string). * * @return the privateKeyPath value. */ @@ -236,8 +228,8 @@ public Object privateKeyPath() { /** * Set the privateKeyPath property: The SSH private key file path for SshPublicKey authentication. Only valid for * on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or - * PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression - * with resultType string). + * PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with + * resultType string). * * @param privateKeyPath the privateKeyPath value to set. * @return the SftpServerLinkedServiceTypeProperties object itself. @@ -342,8 +334,9 @@ public SftpServerLinkedServiceTypeProperties withHostKeyFingerprint(Object hostK */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model SftpServerLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model SftpServerLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SharePointOnlineListLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SharePointOnlineListLinkedServiceTypeProperties.java index c0f3212c74fb5..b8d7eb49e9c41 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SharePointOnlineListLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SharePointOnlineListLinkedServiceTypeProperties.java @@ -15,36 +15,31 @@ @Fluent public final class SharePointOnlineListLinkedServiceTypeProperties { /* - * The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string - * (or Expression with resultType string). + * The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType string). */ @JsonProperty(value = "siteUrl", required = true) private Object siteUrl; /* - * The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview - * page. Type: string (or Expression with resultType string). + * The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview page. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenantId", required = true) private Object tenantId; /* - * The application (client) ID of your application registered in Azure Active Directory. Make sure to grant - * SharePoint site permission to this application. Type: string (or Expression with resultType string). + * The application (client) ID of your application registered in Azure Active Directory. Make sure to grant SharePoint site permission to this application. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId", required = true) private Object servicePrincipalId; /* - * The client secret of your application registered in Azure Active Directory. Type: string (or Expression with - * resultType string). + * The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalKey", required = true) private SecretBase servicePrincipalKey; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -124,8 +119,8 @@ public SharePointOnlineListLinkedServiceTypeProperties withServicePrincipalId(Ob } /** - * Get the servicePrincipalKey property: The client secret of your application registered in Azure Active - * Directory. Type: string (or Expression with resultType string). + * Get the servicePrincipalKey property: The client secret of your application registered in Azure Active Directory. + * Type: string (or Expression with resultType string). * * @return the servicePrincipalKey value. */ @@ -134,8 +129,8 @@ public SecretBase servicePrincipalKey() { } /** - * Set the servicePrincipalKey property: The client secret of your application registered in Azure Active - * Directory. Type: string (or Expression with resultType string). + * Set the servicePrincipalKey property: The client secret of your application registered in Azure Active Directory. + * Type: string (or Expression with resultType string). * * @param servicePrincipalKey the servicePrincipalKey value to set. * @return the SharePointOnlineListLinkedServiceTypeProperties object itself. @@ -146,8 +141,8 @@ public SharePointOnlineListLinkedServiceTypeProperties withServicePrincipalKey(S } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -156,8 +151,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SharePointOnlineListLinkedServiceTypeProperties object itself. @@ -174,20 +169,24 @@ public SharePointOnlineListLinkedServiceTypeProperties withEncryptedCredential(S */ public void validate() { if (siteUrl() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property siteUrl in model SharePointOnlineListLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property siteUrl in model SharePointOnlineListLinkedServiceTypeProperties")); } if (tenantId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property tenantId in model SharePointOnlineListLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property tenantId in model SharePointOnlineListLinkedServiceTypeProperties")); } if (servicePrincipalId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property servicePrincipalId in model SharePointOnlineListLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property servicePrincipalId in model SharePointOnlineListLinkedServiceTypeProperties")); } if (servicePrincipalKey() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property servicePrincipalKey in model SharePointOnlineListLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property servicePrincipalKey in model SharePointOnlineListLinkedServiceTypeProperties")); } else { servicePrincipalKey().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ShopifyLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ShopifyLinkedServiceTypeProperties.java index d47a0347f99ee..6d04733f9cd12 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ShopifyLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ShopifyLinkedServiceTypeProperties.java @@ -33,8 +33,7 @@ public final class ShopifyLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; @@ -46,8 +45,7 @@ public final class ShopifyLinkedServiceTypeProperties { private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -145,8 +143,8 @@ public ShopifyLinkedServiceTypeProperties withUseHostVerification(Object useHost } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -155,8 +153,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the ShopifyLinkedServiceTypeProperties object itself. @@ -167,8 +165,8 @@ public ShopifyLinkedServiceTypeProperties withUsePeerVerification(Object usePeer } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -177,8 +175,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ShopifyLinkedServiceTypeProperties object itself. @@ -195,8 +193,9 @@ public ShopifyLinkedServiceTypeProperties withEncryptedCredential(String encrypt */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model ShopifyLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model ShopifyLinkedServiceTypeProperties")); } if (accessToken() != null) { accessToken().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SmartsheetLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SmartsheetLinkedServiceTypeProperties.java index 3f12e73ba5b46..44fa64cf83359 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SmartsheetLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SmartsheetLinkedServiceTypeProperties.java @@ -21,8 +21,7 @@ public final class SmartsheetLinkedServiceTypeProperties { private SecretBase apiToken; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -54,8 +53,8 @@ public SmartsheetLinkedServiceTypeProperties withApiToken(SecretBase apiToken) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -64,8 +63,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SmartsheetLinkedServiceTypeProperties object itself. @@ -82,8 +81,9 @@ public SmartsheetLinkedServiceTypeProperties withEncryptedCredential(String encr */ public void validate() { if (apiToken() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property apiToken in model SmartsheetLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property apiToken in model SmartsheetLinkedServiceTypeProperties")); } else { apiToken().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedServiceTypeProperties.java index c9a089536e462..05f4a01eeda80 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedServiceTypeProperties.java @@ -27,8 +27,7 @@ public final class SnowflakeLinkedServiceTypeProperties { private AzureKeyVaultSecretReference password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -80,8 +79,8 @@ public SnowflakeLinkedServiceTypeProperties withPassword(AzureKeyVaultSecretRefe } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -90,8 +89,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SnowflakeLinkedServiceTypeProperties object itself. @@ -108,8 +107,9 @@ public SnowflakeLinkedServiceTypeProperties withEncryptedCredential(String encry */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model SnowflakeLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model SnowflakeLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedV2ServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedV2ServiceTypeProperties.java index 65133a9f4715d..268f799834064 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedV2ServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedV2ServiceTypeProperties.java @@ -88,8 +88,7 @@ public final class SnowflakeLinkedV2ServiceTypeProperties { private SecretBase privateKeyPassphrase; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -290,8 +289,8 @@ public SnowflakeLinkedV2ServiceTypeProperties withTenantId(Object tenantId) { } /** - * Get the scope property: The scope of the application registered in Azure Active Directory for - * AADServicePrincipal authentication. + * Get the scope property: The scope of the application registered in Azure Active Directory for AADServicePrincipal + * authentication. * * @return the scope value. */ @@ -300,8 +299,8 @@ public Object scope() { } /** - * Set the scope property: The scope of the application registered in Azure Active Directory for - * AADServicePrincipal authentication. + * Set the scope property: The scope of the application registered in Azure Active Directory for AADServicePrincipal + * authentication. * * @param scope the scope value to set. * @return the SnowflakeLinkedV2ServiceTypeProperties object itself. @@ -354,8 +353,8 @@ public SnowflakeLinkedV2ServiceTypeProperties withPrivateKeyPassphrase(SecretBas } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -364,8 +363,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SnowflakeLinkedV2ServiceTypeProperties object itself. @@ -382,19 +381,22 @@ public SnowflakeLinkedV2ServiceTypeProperties withEncryptedCredential(String enc */ public void validate() { if (accountIdentifier() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property accountIdentifier in model SnowflakeLinkedV2ServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property accountIdentifier in model SnowflakeLinkedV2ServiceTypeProperties")); } if (password() != null) { password().validate(); } if (database() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property database in model SnowflakeLinkedV2ServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property database in model SnowflakeLinkedV2ServiceTypeProperties")); } if (warehouse() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property warehouse in model SnowflakeLinkedV2ServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property warehouse in model SnowflakeLinkedV2ServiceTypeProperties")); } if (clientSecret() != null) { clientSecret().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SparkLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SparkLinkedServiceTypeProperties.java index 2c43283bc3c83..2d812f817cad7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SparkLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SparkLinkedServiceTypeProperties.java @@ -72,23 +72,19 @@ public final class SparkLinkedServiceTypeProperties { private Object enableSsl; /* - * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over - * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file - * installed with the IR. + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* - * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default - * value is false. + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* - * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when - * connecting over SSL. The default value is false. + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ @JsonProperty(value = "allowHostNameCNMismatch") private Object allowHostnameCNMismatch; @@ -100,8 +96,7 @@ public final class SparkLinkedServiceTypeProperties { private Object allowSelfSignedServerCert; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -296,9 +291,9 @@ public SparkLinkedServiceTypeProperties withEnableSsl(Object enableSsl) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @return the trustedCertPath value. */ @@ -307,9 +302,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @param trustedCertPath the trustedCertPath value to set. * @return the SparkLinkedServiceTypeProperties object itself. @@ -386,8 +381,8 @@ public SparkLinkedServiceTypeProperties withAllowSelfSignedServerCert(Object all } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -396,8 +391,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SparkLinkedServiceTypeProperties object itself. @@ -414,16 +409,19 @@ public SparkLinkedServiceTypeProperties withEncryptedCredential(String encrypted */ public void validate() { if (host() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property host in model SparkLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property host in model SparkLinkedServiceTypeProperties")); } if (port() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property port in model SparkLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property port in model SparkLinkedServiceTypeProperties")); } if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model SparkLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model SparkLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerLinkedServiceTypeProperties.java index fcc3809892c5c..8305ba5af6629 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerLinkedServiceTypeProperties.java @@ -34,8 +34,7 @@ public final class SqlServerLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -117,8 +116,8 @@ public SqlServerLinkedServiceTypeProperties withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -127,8 +126,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SqlServerLinkedServiceTypeProperties object itself. @@ -166,8 +165,9 @@ public SqlAlwaysEncryptedProperties alwaysEncryptedSettings() { */ public void validate() { if (connectionString() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property connectionString in model SqlServerLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property connectionString in model SqlServerLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerStoredProcedureActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerStoredProcedureActivityTypeProperties.java index baadfd2c26315..c3327e1ef423a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerStoredProcedureActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerStoredProcedureActivityTypeProperties.java @@ -32,8 +32,7 @@ public SqlServerStoredProcedureActivityTypeProperties() { } /** - * Get the storedProcedureName property: Stored procedure name. Type: string (or Expression with resultType - * string). + * Get the storedProcedureName property: Stored procedure name. Type: string (or Expression with resultType string). * * @return the storedProcedureName value. */ @@ -42,8 +41,7 @@ public Object storedProcedureName() { } /** - * Set the storedProcedureName property: Stored procedure name. Type: string (or Expression with resultType - * string). + * Set the storedProcedureName property: Stored procedure name. Type: string (or Expression with resultType string). * * @param storedProcedureName the storedProcedureName value to set. * @return the SqlServerStoredProcedureActivityTypeProperties object itself. @@ -83,8 +81,9 @@ public Object storedProcedureParameters() { */ public void validate() { if (storedProcedureName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property storedProcedureName in model SqlServerStoredProcedureActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property storedProcedureName in model SqlServerStoredProcedureActivityTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SquareLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SquareLinkedServiceTypeProperties.java index bf80790772e81..69ee3b8beec18 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SquareLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SquareLinkedServiceTypeProperties.java @@ -14,8 +14,7 @@ @Fluent public final class SquareLinkedServiceTypeProperties { /* - * Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. - * Type: object. + * Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. */ @JsonProperty(value = "connectionProperties") private Object connectionProperties; @@ -51,8 +50,7 @@ public final class SquareLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; @@ -64,8 +62,7 @@ public final class SquareLinkedServiceTypeProperties { private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -225,8 +222,8 @@ public SquareLinkedServiceTypeProperties withUseHostVerification(Object useHostV } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -235,8 +232,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the SquareLinkedServiceTypeProperties object itself. @@ -247,8 +244,8 @@ public SquareLinkedServiceTypeProperties withUsePeerVerification(Object usePeerV } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -257,8 +254,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SquareLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisLogLocationTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisLogLocationTypeProperties.java index 44552374e3188..16791c0012c94 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisLogLocationTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisLogLocationTypeProperties.java @@ -20,8 +20,7 @@ public final class SsisLogLocationTypeProperties { private SsisAccessCredential accessCredential; /* - * Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with - * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "logRefreshInterval") private Object logRefreshInterval; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SwitchActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SwitchActivityTypeProperties.java index 7dfee2914c783..869f538b45467 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SwitchActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SwitchActivityTypeProperties.java @@ -18,22 +18,19 @@ @Fluent public final class SwitchActivityTypeProperties { /* - * An expression that would evaluate to a string or integer. This is used to determine the block of activities in - * cases that will be executed. + * An expression that would evaluate to a string or integer. This is used to determine the block of activities in cases that will be executed. */ @JsonProperty(value = "on", required = true) private Expression on; /* - * List of cases that correspond to expected values of the 'on' property. This is an optional property and if not - * provided, the activity will execute activities provided in defaultActivities. + * List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in defaultActivities. */ @JsonProperty(value = "cases") private List cases; /* - * List of activities to execute if no case condition is satisfied. This is an optional property and if not - * provided, the activity will exit without any action. + * List of activities to execute if no case condition is satisfied. This is an optional property and if not provided, the activity will exit without any action. */ @JsonProperty(value = "defaultActivities") private List defaultActivities; @@ -117,8 +114,9 @@ public SwitchActivityTypeProperties withDefaultActivities(List default */ public void validate() { if (on() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property on in model SwitchActivityTypeProperties")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property on in model SwitchActivityTypeProperties")); } else { on().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SybaseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SybaseLinkedServiceTypeProperties.java index 3450dda912299..3b64167312a1a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SybaseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SybaseLinkedServiceTypeProperties.java @@ -52,8 +52,7 @@ public final class SybaseLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -185,8 +184,8 @@ public SybaseLinkedServiceTypeProperties withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -195,8 +194,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SybaseLinkedServiceTypeProperties object itself. @@ -213,12 +212,14 @@ public SybaseLinkedServiceTypeProperties withEncryptedCredential(String encrypte */ public void validate() { if (server() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property server in model SybaseLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property server in model SybaseLinkedServiceTypeProperties")); } if (database() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property database in model SybaseLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property database in model SybaseLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseNotebookActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseNotebookActivityTypeProperties.java index 5334581c6c011..f0bc2444606f5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseNotebookActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseNotebookActivityTypeProperties.java @@ -40,9 +40,7 @@ public final class SynapseNotebookActivityTypeProperties { private Map parameters; /* - * Number of core and memory to be used for executors allocated in the specified Spark pool for the session, which - * will be used for overriding 'executorCores' and 'executorMemory' of the notebook you provide. Type: string (or - * Expression with resultType string). + * Number of core and memory to be used for executors allocated in the specified Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the notebook you provide. Type: string (or Expression with resultType string). */ @JsonProperty(value = "executorSize") private Object executorSize; @@ -54,16 +52,13 @@ public final class SynapseNotebookActivityTypeProperties { private Object conf; /* - * Number of core and memory to be used for driver allocated in the specified Spark pool for the session, which - * will be used for overriding 'driverCores' and 'driverMemory' of the notebook you provide. Type: string (or - * Expression with resultType string). + * Number of core and memory to be used for driver allocated in the specified Spark pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you provide. Type: string (or Expression with resultType string). */ @JsonProperty(value = "driverSize") private Object driverSize; /* - * Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you - * provide. Type: integer (or Expression with resultType integer). + * Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you provide. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "numExecutors") private Object numExecutors; @@ -313,8 +308,9 @@ public SynapseNotebookActivityTypeProperties withSparkConfig(Map */ public void validate() { if (notebook() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property notebook in model SynapseNotebookActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property notebook in model SynapseNotebookActivityTypeProperties")); } else { notebook().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java index 670d45e615a86..d82334660cad8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java @@ -33,59 +33,49 @@ public final class SynapseSparkJobActivityTypeProperties { private List arguments; /* - * The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: - * string (or Expression with resultType string). + * The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string). */ @JsonProperty(value = "file") private Object file; /* - * Scanning subfolders from the root folder of the main definition file, these files will be added as reference - * files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case - * sensitive. Type: boolean (or Expression with resultType boolean). + * Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "scanFolder") private Object scanFolder; /* - * The fully-qualified identifier or the main class that is in the main definition file, which will override the - * 'className' of the spark job definition you provide. Type: string (or Expression with resultType string). + * The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string). */ @JsonProperty(value = "className") private Object className; /* - * (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main - * definition file, which will override the 'files' of the spark job definition you provide. + * (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide. */ @JsonProperty(value = "files") private List files; /* - * Additional python code files used for reference in the main definition file, which will override the 'pyFiles' - * of the spark job definition you provide. + * Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of the spark job definition you provide. */ @JsonProperty(value = "pythonCodeReference") private List pythonCodeReference; /* - * Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of - * the spark job definition you provide. + * Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of the spark job definition you provide. */ @JsonProperty(value = "filesV2") private List filesV2; /* - * The name of the big data pool which will be used to execute the spark batch job, which will override the - * 'targetBigDataPool' of the spark job definition you provide. + * The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide. */ @JsonProperty(value = "targetBigDataPool") private BigDataPoolParametrizationReference targetBigDataPool; /* - * Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will - * be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: - * string (or Expression with resultType string). + * Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string). */ @JsonProperty(value = "executorSize") private Object executorSize; @@ -97,16 +87,13 @@ public final class SynapseSparkJobActivityTypeProperties { private Object conf; /* - * Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be - * used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or - * Expression with resultType string). + * Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string). */ @JsonProperty(value = "driverSize") private Object driverSize; /* - * Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition - * you provide. Type: integer (or Expression with resultType integer). + * Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "numExecutors") private Object numExecutors; @@ -177,8 +164,8 @@ public SynapseSparkJobActivityTypeProperties withArguments(List argument } /** - * Get the file property: The main file used for the job, which will override the 'file' of the spark job - * definition you provide. Type: string (or Expression with resultType string). + * Get the file property: The main file used for the job, which will override the 'file' of the spark job definition + * you provide. Type: string (or Expression with resultType string). * * @return the file value. */ @@ -187,8 +174,8 @@ public Object file() { } /** - * Set the file property: The main file used for the job, which will override the 'file' of the spark job - * definition you provide. Type: string (or Expression with resultType string). + * Set the file property: The main file used for the job, which will override the 'file' of the spark job definition + * you provide. Type: string (or Expression with resultType string). * * @param file the file value to set. * @return the SynapseSparkJobActivityTypeProperties object itself. @@ -200,8 +187,8 @@ public SynapseSparkJobActivityTypeProperties withFile(Object file) { /** * Get the scanFolder property: Scanning subfolders from the root folder of the main definition file, these files - * will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, - * and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean). + * will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and + * the folders name are case sensitive. Type: boolean (or Expression with resultType boolean). * * @return the scanFolder value. */ @@ -211,8 +198,8 @@ public Object scanFolder() { /** * Set the scanFolder property: Scanning subfolders from the root folder of the main definition file, these files - * will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, - * and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean). + * will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and + * the folders name are case sensitive. Type: boolean (or Expression with resultType boolean). * * @param scanFolder the scanFolder value to set. * @return the SynapseSparkJobActivityTypeProperties object itself. @@ -223,9 +210,9 @@ public SynapseSparkJobActivityTypeProperties withScanFolder(Object scanFolder) { } /** - * Get the className property: The fully-qualified identifier or the main class that is in the main definition - * file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression - * with resultType string). + * Get the className property: The fully-qualified identifier or the main class that is in the main definition file, + * which will override the 'className' of the spark job definition you provide. Type: string (or Expression with + * resultType string). * * @return the className value. */ @@ -234,9 +221,9 @@ public Object className() { } /** - * Set the className property: The fully-qualified identifier or the main class that is in the main definition - * file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression - * with resultType string). + * Set the className property: The fully-qualified identifier or the main class that is in the main definition file, + * which will override the 'className' of the spark job definition you provide. Type: string (or Expression with + * resultType string). * * @param className the className value to set. * @return the SynapseSparkJobActivityTypeProperties object itself. @@ -360,8 +347,8 @@ public SynapseSparkJobActivityTypeProperties withExecutorSize(Object executorSiz } /** - * Get the conf property: Spark configuration properties, which will override the 'conf' of the spark job - * definition you provide. + * Get the conf property: Spark configuration properties, which will override the 'conf' of the spark job definition + * you provide. * * @return the conf value. */ @@ -370,8 +357,8 @@ public Object conf() { } /** - * Set the conf property: Spark configuration properties, which will override the 'conf' of the spark job - * definition you provide. + * Set the conf property: Spark configuration properties, which will override the 'conf' of the spark job definition + * you provide. * * @param conf the conf value to set. * @return the SynapseSparkJobActivityTypeProperties object itself. @@ -406,8 +393,8 @@ public SynapseSparkJobActivityTypeProperties withDriverSize(Object driverSize) { } /** - * Get the numExecutors property: Number of executors to launch for this job, which will override the - * 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer). + * Get the numExecutors property: Number of executors to launch for this job, which will override the 'numExecutors' + * of the spark job definition you provide. Type: integer (or Expression with resultType integer). * * @return the numExecutors value. */ @@ -416,8 +403,8 @@ public Object numExecutors() { } /** - * Set the numExecutors property: Number of executors to launch for this job, which will override the - * 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer). + * Set the numExecutors property: Number of executors to launch for this job, which will override the 'numExecutors' + * of the spark job definition you provide. Type: integer (or Expression with resultType integer). * * @param numExecutors the numExecutors value to set. * @return the SynapseSparkJobActivityTypeProperties object itself. @@ -495,8 +482,9 @@ public SynapseSparkJobActivityTypeProperties withSparkConfig(Map */ public void validate() { if (sparkJob() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property sparkJob in model SynapseSparkJobActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property sparkJob in model SynapseSparkJobActivityTypeProperties")); } else { sparkJob().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeamDeskLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeamDeskLinkedServiceTypeProperties.java index ad36df09f78fd..eb1f1ac3c96db 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeamDeskLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeamDeskLinkedServiceTypeProperties.java @@ -46,8 +46,7 @@ public final class TeamDeskLinkedServiceTypeProperties { private SecretBase apiToken; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -161,8 +160,8 @@ public TeamDeskLinkedServiceTypeProperties withApiToken(SecretBase apiToken) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -171,8 +170,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the TeamDeskLinkedServiceTypeProperties object itself. @@ -189,12 +188,14 @@ public TeamDeskLinkedServiceTypeProperties withEncryptedCredential(String encryp */ public void validate() { if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model TeamDeskLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model TeamDeskLinkedServiceTypeProperties")); } if (url() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property url in model TeamDeskLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property url in model TeamDeskLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeradataLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeradataLinkedServiceTypeProperties.java index b625cd7b4b17a..238411ed6e4b8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeradataLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeradataLinkedServiceTypeProperties.java @@ -45,8 +45,7 @@ public final class TeradataLinkedServiceTypeProperties { private SecretBase password; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -160,8 +159,8 @@ public TeradataLinkedServiceTypeProperties withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -170,8 +169,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the TeradataLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerQueryResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerQueryResponseInner.java index 03b8039fe8f59..06711d6c31f1f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerQueryResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerQueryResponseInner.java @@ -53,8 +53,8 @@ public TriggerQueryResponseInner withValue(List value) { } /** - * Get the continuationToken property: The continuation token for getting the next page of results, if any - * remaining results exist, null otherwise. + * Get the continuationToken property: The continuation token for getting the next page of results, if any remaining + * results exist, null otherwise. * * @return the continuationToken value. */ @@ -63,8 +63,8 @@ public String continuationToken() { } /** - * Set the continuationToken property: The continuation token for getting the next page of results, if any - * remaining results exist, null otherwise. + * Set the continuationToken property: The continuation token for getting the next page of results, if any remaining + * results exist, null otherwise. * * @param continuationToken the continuationToken value to set. * @return the TriggerQueryResponseInner object itself. @@ -81,8 +81,9 @@ public TriggerQueryResponseInner withContinuationToken(String continuationToken) */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model TriggerQueryResponseInner")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property value in model TriggerQueryResponseInner")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerResourceInner.java index a2ffc54831e5f..c577ba00b1f88 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerResourceInner.java @@ -108,8 +108,9 @@ public TriggerResourceInner withId(String id) { */ public void validate() { if (properties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property properties in model TriggerResourceInner")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property properties in model TriggerResourceInner")); } else { properties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerRunsQueryResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerRunsQueryResponseInner.java index 9ac40ebf67f1b..98f4fea6d67f8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerRunsQueryResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerRunsQueryResponseInner.java @@ -54,8 +54,8 @@ public TriggerRunsQueryResponseInner withValue(List value) { } /** - * Get the continuationToken property: The continuation token for getting the next page of results, if any - * remaining results exist, null otherwise. + * Get the continuationToken property: The continuation token for getting the next page of results, if any remaining + * results exist, null otherwise. * * @return the continuationToken value. */ @@ -64,8 +64,8 @@ public String continuationToken() { } /** - * Set the continuationToken property: The continuation token for getting the next page of results, if any - * remaining results exist, null otherwise. + * Set the continuationToken property: The continuation token for getting the next page of results, if any remaining + * results exist, null otherwise. * * @param continuationToken the continuationToken value to set. * @return the TriggerRunsQueryResponseInner object itself. @@ -82,8 +82,9 @@ public TriggerRunsQueryResponseInner withContinuationToken(String continuationTo */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model TriggerRunsQueryResponseInner")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property value in model TriggerRunsQueryResponseInner")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TumblingWindowTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TumblingWindowTriggerTypeProperties.java index af3f84c275c22..8a485bfa3c2f5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TumblingWindowTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TumblingWindowTriggerTypeProperties.java @@ -31,23 +31,19 @@ public final class TumblingWindowTriggerTypeProperties { private int interval; /* - * The start time for the time period for the trigger during which events are fired for windows that are ready. - * Only UTC time is currently supported. + * The start time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. */ @JsonProperty(value = "startTime", required = true) private OffsetDateTime startTime; /* - * The end time for the time period for the trigger during which events are fired for windows that are ready. Only - * UTC time is currently supported. + * The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. */ @JsonProperty(value = "endTime") private OffsetDateTime endTime; /* - * Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and - * end time. The default is 0. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "delay") private Object delay; @@ -253,12 +249,14 @@ public TumblingWindowTriggerTypeProperties withDependsOn(List activities) { */ public void validate() { if (expression() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property expression in model UntilActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property expression in model UntilActivityTypeProperties")); } else { expression().validate(); } if (activities() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property activities in model UntilActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property activities in model UntilActivityTypeProperties")); } else { activities().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ValidationActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ValidationActivityTypeProperties.java index bba83fc77c33d..73cc664f7bb75 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ValidationActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ValidationActivityTypeProperties.java @@ -15,30 +15,25 @@ @Fluent public final class ValidationActivityTypeProperties { /* - * Specifies the timeout for the activity to run. If there is no value specified, it takes the value of - * TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "timeout") private Object timeout; /* - * A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the - * default. Type: integer (or Expression with resultType integer). + * A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "sleep") private Object sleep; /* - * Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. - * Type: integer (or Expression with resultType integer). + * Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "minimumSize") private Object minimumSize; /* - * Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to - * false, the folder must be empty. Type: boolean (or Expression with resultType boolean). + * Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "childItems") private Object childItems; @@ -56,8 +51,8 @@ public ValidationActivityTypeProperties() { } /** - * Get the timeout property: Specifies the timeout for the activity to run. If there is no value specified, it - * takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType + * Get the timeout property: Specifies the timeout for the activity to run. If there is no value specified, it takes + * the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType * string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). * * @return the timeout value. @@ -67,8 +62,8 @@ public Object timeout() { } /** - * Set the timeout property: Specifies the timeout for the activity to run. If there is no value specified, it - * takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType + * Set the timeout property: Specifies the timeout for the activity to run. If there is no value specified, it takes + * the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType * string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). * * @param timeout the timeout value to set. @@ -80,8 +75,8 @@ public ValidationActivityTypeProperties withTimeout(Object timeout) { } /** - * Get the sleep property: A delay in seconds between validation attempts. If no value is specified, 10 seconds - * will be used as the default. Type: integer (or Expression with resultType integer). + * Get the sleep property: A delay in seconds between validation attempts. If no value is specified, 10 seconds will + * be used as the default. Type: integer (or Expression with resultType integer). * * @return the sleep value. */ @@ -90,8 +85,8 @@ public Object sleep() { } /** - * Set the sleep property: A delay in seconds between validation attempts. If no value is specified, 10 seconds - * will be used as the default. Type: integer (or Expression with resultType integer). + * Set the sleep property: A delay in seconds between validation attempts. If no value is specified, 10 seconds will + * be used as the default. Type: integer (or Expression with resultType integer). * * @param sleep the sleep value to set. * @return the ValidationActivityTypeProperties object itself. @@ -125,8 +120,7 @@ public ValidationActivityTypeProperties withMinimumSize(Object minimumSize) { /** * Get the childItems property: Can be used if dataset points to a folder. If set to true, the folder must have at - * least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType - * boolean). + * least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). * * @return the childItems value. */ @@ -136,8 +130,7 @@ public Object childItems() { /** * Set the childItems property: Can be used if dataset points to a folder. If set to true, the folder must have at - * least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType - * boolean). + * least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). * * @param childItems the childItems value to set. * @return the ValidationActivityTypeProperties object itself. @@ -174,8 +167,9 @@ public ValidationActivityTypeProperties withDataset(DatasetReference dataset) { */ public void validate() { if (dataset() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property dataset in model ValidationActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property dataset in model ValidationActivityTypeProperties")); } else { dataset().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/VerticaLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/VerticaLinkedServiceTypeProperties.java index f5636bb7407fa..ebbbba158c169 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/VerticaLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/VerticaLinkedServiceTypeProperties.java @@ -26,8 +26,7 @@ public final class VerticaLinkedServiceTypeProperties { private AzureKeyVaultSecretReference pwd; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -81,8 +80,8 @@ public VerticaLinkedServiceTypeProperties withPwd(AzureKeyVaultSecretReference p } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -91,8 +90,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the VerticaLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WaitActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WaitActivityTypeProperties.java index 703c96d321eca..68f2868f18ed0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WaitActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WaitActivityTypeProperties.java @@ -52,8 +52,9 @@ public WaitActivityTypeProperties withWaitTimeInSeconds(Object waitTimeInSeconds */ public void validate() { if (waitTimeInSeconds() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property waitTimeInSeconds in model WaitActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property waitTimeInSeconds in model WaitActivityTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WarehouseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WarehouseLinkedServiceTypeProperties.java index ddc1433e12d48..fdf47150b1f81 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WarehouseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WarehouseLinkedServiceTypeProperties.java @@ -33,8 +33,7 @@ public final class WarehouseLinkedServiceTypeProperties { private Object workspaceId; /* - * The ID of the application used to authenticate against Microsoft Fabric Warehouse. Type: string (or Expression - * with resultType string). + * The ID of the application used to authenticate against Microsoft Fabric Warehouse. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; @@ -46,31 +45,25 @@ public final class WarehouseLinkedServiceTypeProperties { private SecretBase servicePrincipalKey; /* - * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType - * string). + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tenant") private Object tenant; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* - * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for - * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). + * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* - * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is - * 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be - * AzureKeyVaultSecretReference. + * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; @@ -214,8 +207,8 @@ public WarehouseLinkedServiceTypeProperties withTenant(Object tenant) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -224,8 +217,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the WarehouseLinkedServiceTypeProperties object itself. @@ -236,9 +229,9 @@ public WarehouseLinkedServiceTypeProperties withEncryptedCredential(String encry } /** - * Get the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Get the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @return the servicePrincipalCredentialType value. */ @@ -247,9 +240,9 @@ public Object servicePrincipalCredentialType() { } /** - * Set the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Set the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @param servicePrincipalCredentialType the servicePrincipalCredentialType value to set. * @return the WarehouseLinkedServiceTypeProperties object itself. @@ -293,12 +286,14 @@ public WarehouseLinkedServiceTypeProperties withServicePrincipalCredential(Secre */ public void validate() { if (artifactId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property artifactId in model WarehouseLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property artifactId in model WarehouseLinkedServiceTypeProperties")); } if (endpoint() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property endpoint in model WarehouseLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property endpoint in model WarehouseLinkedServiceTypeProperties")); } if (servicePrincipalKey() != null) { servicePrincipalKey().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebActivityTypeProperties.java index 5ae6f731846a6..8b8a0ac3bc59e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebActivityTypeProperties.java @@ -34,17 +34,14 @@ public final class WebActivityTypeProperties { private Object url; /* - * Represents the headers that will be sent to the request. For example, to set the language and type on a request: - * "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with - * resultType string). + * Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). */ @JsonProperty(value = "headers") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) - private Map headers; + private Map headers; /* - * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET - * method Type: string (or Expression with resultType string). + * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). */ @JsonProperty(value = "body") private Object body; @@ -62,17 +59,13 @@ public final class WebActivityTypeProperties { private Boolean disableCertValidation; /* - * Timeout for the HTTP request to get a response. Format is in TimeSpan (hh:mm:ss). This value is the timeout to - * get a response, not the activity timeout. The default value is 00:01:00 (1 minute). The range is from 1 to 10 - * minutes + * Timeout for the HTTP request to get a response. Format is in TimeSpan (hh:mm:ss). This value is the timeout to get a response, not the activity timeout. The default value is 00:01:00 (1 minute). The range is from 1 to 10 minutes */ @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /* - * Option to disable invoking HTTP GET on location given in response header of a HTTP 202 Response. If set true, it - * stops invoking HTTP GET on http location given in response header. If set false then continues to invoke HTTP - * GET call on location given in http response headers. + * Option to disable invoking HTTP GET on location given in response header of a HTTP 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set false then continues to invoke HTTP GET call on location given in http response headers. */ @JsonProperty(value = "turnOffAsync") private Boolean turnOffAsync; @@ -122,8 +115,7 @@ public WebActivityTypeProperties withMethod(WebActivityMethod method) { } /** - * Get the url property: Web activity target endpoint and path. Type: string (or Expression with resultType - * string). + * Get the url property: Web activity target endpoint and path. Type: string (or Expression with resultType string). * * @return the url value. */ @@ -132,8 +124,7 @@ public Object url() { } /** - * Set the url property: Web activity target endpoint and path. Type: string (or Expression with resultType - * string). + * Set the url property: Web activity target endpoint and path. Type: string (or Expression with resultType string). * * @param url the url value to set. * @return the WebActivityTypeProperties object itself. @@ -150,7 +141,7 @@ public WebActivityTypeProperties withUrl(Object url) { * * @return the headers value. */ - public Map headers() { + public Map headers() { return this.headers; } @@ -162,7 +153,7 @@ public Map headers() { * @param headers the headers value to set. * @return the WebActivityTypeProperties object itself. */ - public WebActivityTypeProperties withHeaders(Map headers) { + public WebActivityTypeProperties withHeaders(Map headers) { this.headers = headers; return this; } @@ -254,9 +245,9 @@ public WebActivityTypeProperties withHttpRequestTimeout(Object httpRequestTimeou } /** - * Get the turnOffAsync property: Option to disable invoking HTTP GET on location given in response header of a - * HTTP 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set - * false then continues to invoke HTTP GET call on location given in http response headers. + * Get the turnOffAsync property: Option to disable invoking HTTP GET on location given in response header of a HTTP + * 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set false + * then continues to invoke HTTP GET call on location given in http response headers. * * @return the turnOffAsync value. */ @@ -265,9 +256,9 @@ public Boolean turnOffAsync() { } /** - * Set the turnOffAsync property: Option to disable invoking HTTP GET on location given in response header of a - * HTTP 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set - * false then continues to invoke HTTP GET call on location given in http response headers. + * Set the turnOffAsync property: Option to disable invoking HTTP GET on location given in response header of a HTTP + * 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set false + * then continues to invoke HTTP GET call on location given in http response headers. * * @param turnOffAsync the turnOffAsync value to set. * @return the WebActivityTypeProperties object itself. @@ -344,12 +335,13 @@ public WebActivityTypeProperties withConnectVia(IntegrationRuntimeReference conn */ public void validate() { if (method() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property method in model WebActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property method in model WebActivityTypeProperties")); } if (url() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property url in model WebActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property url in model WebActivityTypeProperties")); } if (authentication() != null) { authentication().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebTableDatasetTypeProperties.java index e19652bfb37ce..3bef9230b5019 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebTableDatasetTypeProperties.java @@ -14,15 +14,13 @@ @Fluent public final class WebTableDatasetTypeProperties { /* - * The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), - * minimum: 0. + * The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. */ @JsonProperty(value = "index", required = true) private Object index; /* - * The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType - * string). + * The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). */ @JsonProperty(value = "path") private Object path; @@ -84,8 +82,9 @@ public WebTableDatasetTypeProperties withPath(Object path) { */ public void validate() { if (index() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property index in model WebTableDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property index in model WebTableDatasetTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebhookActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebhookActivityTypeProperties.java index 21c2b2e7033f0..6db23e83a538d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebhookActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebhookActivityTypeProperties.java @@ -30,24 +30,20 @@ public final class WebhookActivityTypeProperties { private Object url; /* - * The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 - * minutes. Type: string. Pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "timeout") private String timeout; /* - * Represents the headers that will be sent to the request. For example, to set the language and type on a request: - * "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with - * resultType string). + * Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). */ @JsonProperty(value = "headers") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) - private Map headers; + private Map headers; /* - * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET - * method Type: string (or Expression with resultType string). + * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). */ @JsonProperty(value = "body") private Object body; @@ -59,9 +55,7 @@ public final class WebhookActivityTypeProperties { private WebActivityAuthentication authentication; /* - * When set to true, statusCode, output and error in callback request body will be consumed by activity. The - * activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: - * boolean (or Expression with resultType boolean). + * When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "reportStatusOnCallBack") private Object reportStatusOnCallBack; @@ -145,7 +139,7 @@ public WebhookActivityTypeProperties withTimeout(String timeout) { * * @return the headers value. */ - public Map headers() { + public Map headers() { return this.headers; } @@ -157,7 +151,7 @@ public Map headers() { * @param headers the headers value to set. * @return the WebhookActivityTypeProperties object itself. */ - public WebhookActivityTypeProperties withHeaders(Map headers) { + public WebhookActivityTypeProperties withHeaders(Map headers) { this.headers = headers; return this; } @@ -235,12 +229,14 @@ public WebhookActivityTypeProperties withReportStatusOnCallBack(Object reportSta */ public void validate() { if (method() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property method in model WebhookActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property method in model WebhookActivityTypeProperties")); } if (url() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property url in model WebhookActivityTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property url in model WebhookActivityTypeProperties")); } if (authentication() != null) { authentication().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XeroLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XeroLinkedServiceTypeProperties.java index edfcffd8a333c..6dff37344ad67 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XeroLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XeroLinkedServiceTypeProperties.java @@ -14,8 +14,7 @@ @Fluent public final class XeroLinkedServiceTypeProperties { /* - * Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. - * Type: object. + * Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. */ @JsonProperty(value = "connectionProperties") private Object connectionProperties; @@ -33,8 +32,7 @@ public final class XeroLinkedServiceTypeProperties { private SecretBase consumerKey; /* - * The private key from the .pem file that was generated for your Xero private application. You must include all - * the text from the .pem file, including the Unix line endings( + * The private key from the .pem file that was generated for your Xero private application. You must include all the text from the .pem file, including the Unix line endings( * ). */ @JsonProperty(value = "privateKey") @@ -47,8 +45,7 @@ public final class XeroLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; @@ -60,8 +57,7 @@ public final class XeroLinkedServiceTypeProperties { private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -203,8 +199,8 @@ public XeroLinkedServiceTypeProperties withUseHostVerification(Object useHostVer } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -213,8 +209,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the XeroLinkedServiceTypeProperties object itself. @@ -225,8 +221,8 @@ public XeroLinkedServiceTypeProperties withUsePeerVerification(Object usePeerVer } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -235,8 +231,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the XeroLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XmlDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XmlDatasetTypeProperties.java index 7341f8c7f0359..33809467a8356 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XmlDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XmlDatasetTypeProperties.java @@ -22,10 +22,7 @@ public final class XmlDatasetTypeProperties { private DatasetLocation location; /* - * The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes - * another Unicode encoding. Refer to the name column of the table in the following link to set supported values: - * https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType - * string). + * The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */ @JsonProperty(value = "encodingName") private Object encodingName; @@ -141,8 +138,9 @@ public XmlDatasetTypeProperties withCompression(DatasetCompression compression) */ public void validate() { if (location() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property location in model XmlDatasetTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property location in model XmlDatasetTypeProperties")); } else { location().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZendeskLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZendeskLinkedServiceTypeProperties.java index 10068a2dc8331..712cde24c0ef9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZendeskLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZendeskLinkedServiceTypeProperties.java @@ -46,8 +46,7 @@ public final class ZendeskLinkedServiceTypeProperties { private SecretBase apiToken; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -161,8 +160,8 @@ public ZendeskLinkedServiceTypeProperties withApiToken(SecretBase apiToken) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -171,8 +170,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ZendeskLinkedServiceTypeProperties object itself. @@ -189,12 +188,14 @@ public ZendeskLinkedServiceTypeProperties withEncryptedCredential(String encrypt */ public void validate() { if (authenticationType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property authenticationType in model ZendeskLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property authenticationType in model ZendeskLinkedServiceTypeProperties")); } if (url() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property url in model ZendeskLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property url in model ZendeskLinkedServiceTypeProperties")); } if (password() != null) { password().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZohoLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZohoLinkedServiceTypeProperties.java index 6933243f33f28..4e05605ad37fc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZohoLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZohoLinkedServiceTypeProperties.java @@ -14,8 +14,7 @@ @Fluent public final class ZohoLinkedServiceTypeProperties { /* - * Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. - * Type: object. + * Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. */ @JsonProperty(value = "connectionProperties") private Object connectionProperties; @@ -39,8 +38,7 @@ public final class ZohoLinkedServiceTypeProperties { private Object useEncryptedEndpoints; /* - * Specifies whether to require the host name in the server's certificate to match the host name of the server when - * connecting over SSL. The default value is true. + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ @JsonProperty(value = "useHostVerification") private Object useHostVerification; @@ -52,8 +50,7 @@ public final class ZohoLinkedServiceTypeProperties { private Object usePeerVerification; /* - * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - * credential manager. Type: string. + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. */ @JsonProperty(value = "encryptedCredential") private String encryptedCredential; @@ -171,8 +168,8 @@ public ZohoLinkedServiceTypeProperties withUseHostVerification(Object useHostVer } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -181,8 +178,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the ZohoLinkedServiceTypeProperties object itself. @@ -193,8 +190,8 @@ public ZohoLinkedServiceTypeProperties withUsePeerVerification(Object usePeerVer } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -203,8 +200,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ZohoLinkedServiceTypeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/package-info.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/package-info.java index 686c1ccf0f1a6..9d7cbe511619c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/package-info.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/package-info.java @@ -4,7 +4,7 @@ /** * Package containing the inner data models for DataFactoryManagementClient. - * The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data - * Factory V2 services. + * The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory + * V2 services. */ package com.azure.resourcemanager.datafactory.fluent.models; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/package-info.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/package-info.java index 86b2ec833b9b8..e0586542efcf4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/package-info.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/package-info.java @@ -4,7 +4,7 @@ /** * Package containing the service clients for DataFactoryManagementClient. - * The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data - * Factory V2 services. + * The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory + * V2 services. */ package com.azure.resourcemanager.datafactory.fluent; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCaptureResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCaptureResourceImpl.java index a46e878a3a71a..63d71027fa017 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCaptureResourceImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCaptureResourceImpl.java @@ -114,16 +114,20 @@ public ChangeDataCaptureResourceImpl withExistingFactory(String resourceGroupNam } public ChangeDataCaptureResource create() { - this.innerObject - = serviceManager.serviceClient().getChangeDataCaptures().createOrUpdateWithResponse(resourceGroupName, - factoryName, changeDataCaptureName, this.innerModel(), createIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getChangeDataCaptures() + .createOrUpdateWithResponse(resourceGroupName, factoryName, changeDataCaptureName, this.innerModel(), + createIfMatch, Context.NONE) + .getValue(); return this; } public ChangeDataCaptureResource create(Context context) { - this.innerObject - = serviceManager.serviceClient().getChangeDataCaptures().createOrUpdateWithResponse(resourceGroupName, - factoryName, changeDataCaptureName, this.innerModel(), createIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getChangeDataCaptures() + .createOrUpdateWithResponse(resourceGroupName, factoryName, changeDataCaptureName, this.innerModel(), + createIfMatch, context) + .getValue(); return this; } @@ -141,16 +145,20 @@ public ChangeDataCaptureResourceImpl update() { } public ChangeDataCaptureResource apply() { - this.innerObject - = serviceManager.serviceClient().getChangeDataCaptures().createOrUpdateWithResponse(resourceGroupName, - factoryName, changeDataCaptureName, this.innerModel(), updateIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getChangeDataCaptures() + .createOrUpdateWithResponse(resourceGroupName, factoryName, changeDataCaptureName, this.innerModel(), + updateIfMatch, Context.NONE) + .getValue(); return this; } public ChangeDataCaptureResource apply(Context context) { - this.innerObject - = serviceManager.serviceClient().getChangeDataCaptures().createOrUpdateWithResponse(resourceGroupName, - factoryName, changeDataCaptureName, this.innerModel(), updateIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getChangeDataCaptures() + .createOrUpdateWithResponse(resourceGroupName, factoryName, changeDataCaptureName, this.innerModel(), + updateIfMatch, context) + .getValue(); return this; } @@ -165,7 +173,8 @@ public ChangeDataCaptureResource apply(Context context) { public ChangeDataCaptureResource refresh() { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getChangeDataCaptures() + this.innerObject = serviceManager.serviceClient() + .getChangeDataCaptures() .getWithResponse(resourceGroupName, factoryName, changeDataCaptureName, localIfNoneMatch, Context.NONE) .getValue(); return this; @@ -173,15 +182,16 @@ public ChangeDataCaptureResource refresh() { public ChangeDataCaptureResource refresh(Context context) { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getChangeDataCaptures() + this.innerObject = serviceManager.serviceClient() + .getChangeDataCaptures() .getWithResponse(resourceGroupName, factoryName, changeDataCaptureName, localIfNoneMatch, context) .getValue(); return this; } public Response startWithResponse(Context context) { - return serviceManager.changeDataCaptures().startWithResponse(resourceGroupName, factoryName, - changeDataCaptureName, context); + return serviceManager.changeDataCaptures() + .startWithResponse(resourceGroupName, factoryName, changeDataCaptureName, context); } public void start() { @@ -189,8 +199,8 @@ public void start() { } public Response stopWithResponse(Context context) { - return serviceManager.changeDataCaptures().stopWithResponse(resourceGroupName, factoryName, - changeDataCaptureName, context); + return serviceManager.changeDataCaptures() + .stopWithResponse(resourceGroupName, factoryName, changeDataCaptureName, context); } public void stop() { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCapturesClientImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCapturesClientImpl.java index bef3eac949483..8d5246e3770a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCapturesClientImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCapturesClientImpl.java @@ -60,8 +60,8 @@ public final class ChangeDataCapturesClientImpl implements ChangeDataCapturesCli } /** - * The interface defining all the services for DataFactoryManagementClientChangeDataCaptures to be used by the - * proxy service to perform REST calls. + * The interface defining all the services for DataFactoryManagementClientChangeDataCaptures to be used by the proxy + * service to perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "DataFactoryManagemen") diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCapturesImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCapturesImpl.java index 0aa8ec6098584..15e777b8f8adf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCapturesImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCapturesImpl.java @@ -42,8 +42,8 @@ public PagedIterable listByFactory(String resourceGro public Response getWithResponse(String resourceGroupName, String factoryName, String changeDataCaptureName, String ifNoneMatch, Context context) { - Response inner = this.serviceClient().getWithResponse(resourceGroupName, - factoryName, changeDataCaptureName, ifNoneMatch, context); + Response inner = this.serviceClient() + .getWithResponse(resourceGroupName, factoryName, changeDataCaptureName, ifNoneMatch, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new ChangeDataCaptureResourceImpl(inner.getValue(), this.manager())); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialOperationsClientImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialOperationsClientImpl.java index b262f42f7e2ed..151d3f1ca5a26 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialOperationsClientImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialOperationsClientImpl.java @@ -29,7 +29,7 @@ import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.resourcemanager.datafactory.fluent.CredentialOperationsClient; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityCredentialResourceInner; +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; import com.azure.resourcemanager.datafactory.models.CredentialListResponse; import reactor.core.publisher.Mono; @@ -78,19 +78,18 @@ Mono> listByFactory(@HostParam("$host") String @Put("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/credentials/{credentialName}") @ExpectedResponses({ 200 }) @UnexpectedResponseExceptionType(ManagementException.class) - Mono> createOrUpdate(@HostParam("$host") String endpoint, + Mono> createOrUpdate(@HostParam("$host") String endpoint, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName, @PathParam("credentialName") String credentialName, @QueryParam("api-version") String apiVersion, - @HeaderParam("If-Match") String ifMatch, - @BodyParam("application/json") ManagedIdentityCredentialResourceInner credential, + @HeaderParam("If-Match") String ifMatch, @BodyParam("application/json") CredentialResourceInner credential, @HeaderParam("Accept") String accept, Context context); @Headers({ "Content-Type: application/json" }) @Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/credentials/{credentialName}") @ExpectedResponses({ 200, 304 }) @UnexpectedResponseExceptionType(ManagementException.class) - Mono> get(@HostParam("$host") String endpoint, + Mono> get(@HostParam("$host") String endpoint, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName, @PathParam("credentialName") String credentialName, @QueryParam("api-version") String apiVersion, @@ -126,8 +125,8 @@ Mono> listByFactoryNext( * @return a list of credential resources along with {@link PagedResponse} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) - private Mono> - listByFactorySinglePageAsync(String resourceGroupName, String factoryName) { + private Mono> listByFactorySinglePageAsync(String resourceGroupName, + String factoryName) { if (this.client.getEndpoint() == null) { return Mono.error( new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); @@ -147,7 +146,7 @@ Mono> listByFactoryNext( return FluxUtil .withContext(context -> service.listByFactory(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, factoryName, this.client.getApiVersion(), accept, context)) - .>map(res -> new PagedResponseBase<>(res.getRequest(), + .>map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } @@ -164,8 +163,8 @@ Mono> listByFactoryNext( * @return a list of credential resources along with {@link PagedResponse} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) - private Mono> - listByFactorySinglePageAsync(String resourceGroupName, String factoryName, Context context) { + private Mono> listByFactorySinglePageAsync(String resourceGroupName, + String factoryName, Context context) { if (this.client.getEndpoint() == null) { return Mono.error( new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); @@ -201,8 +200,7 @@ Mono> listByFactoryNext( * @return a list of credential resources as paginated response with {@link PagedFlux}. */ @ServiceMethod(returns = ReturnType.COLLECTION) - private PagedFlux listByFactoryAsync(String resourceGroupName, - String factoryName) { + private PagedFlux listByFactoryAsync(String resourceGroupName, String factoryName) { return new PagedFlux<>(() -> listByFactorySinglePageAsync(resourceGroupName, factoryName), nextLink -> listByFactoryNextSinglePageAsync(nextLink)); } @@ -219,8 +217,8 @@ private PagedFlux listByFactoryAsync(Str * @return a list of credential resources as paginated response with {@link PagedFlux}. */ @ServiceMethod(returns = ReturnType.COLLECTION) - private PagedFlux listByFactoryAsync(String resourceGroupName, - String factoryName, Context context) { + private PagedFlux listByFactoryAsync(String resourceGroupName, String factoryName, + Context context) { return new PagedFlux<>(() -> listByFactorySinglePageAsync(resourceGroupName, factoryName, context), nextLink -> listByFactoryNextSinglePageAsync(nextLink, context)); } @@ -236,8 +234,7 @@ private PagedFlux listByFactoryAsync(Str * @return a list of credential resources as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) - public PagedIterable listByFactory(String resourceGroupName, - String factoryName) { + public PagedIterable listByFactory(String resourceGroupName, String factoryName) { return new PagedIterable<>(listByFactoryAsync(resourceGroupName, factoryName)); } @@ -253,8 +250,8 @@ public PagedIterable listByFactory(Strin * @return a list of credential resources as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) - public PagedIterable listByFactory(String resourceGroupName, - String factoryName, Context context) { + public PagedIterable listByFactory(String resourceGroupName, String factoryName, + Context context) { return new PagedIterable<>(listByFactoryAsync(resourceGroupName, factoryName, context)); } @@ -273,9 +270,8 @@ public PagedIterable listByFactory(Strin * @return credential resource type along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) - private Mono> createOrUpdateWithResponseAsync( - String resourceGroupName, String factoryName, String credentialName, - ManagedIdentityCredentialResourceInner credential, String ifMatch) { + private Mono> createOrUpdateWithResponseAsync(String resourceGroupName, + String factoryName, String credentialName, CredentialResourceInner credential, String ifMatch) { if (this.client.getEndpoint() == null) { return Mono.error( new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); @@ -323,9 +319,9 @@ private Mono> createOrUpdateWit * @return credential resource type along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) - private Mono> createOrUpdateWithResponseAsync( - String resourceGroupName, String factoryName, String credentialName, - ManagedIdentityCredentialResourceInner credential, String ifMatch, Context context) { + private Mono> createOrUpdateWithResponseAsync(String resourceGroupName, + String factoryName, String credentialName, CredentialResourceInner credential, String ifMatch, + Context context) { if (this.client.getEndpoint() == null) { return Mono.error( new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); @@ -368,8 +364,8 @@ private Mono> createOrUpdateWit * @return credential resource type on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) - private Mono createOrUpdateAsync(String resourceGroupName, - String factoryName, String credentialName, ManagedIdentityCredentialResourceInner credential) { + private Mono createOrUpdateAsync(String resourceGroupName, String factoryName, + String credentialName, CredentialResourceInner credential) { final String ifMatch = null; return createOrUpdateWithResponseAsync(resourceGroupName, factoryName, credentialName, credential, ifMatch) .flatMap(res -> Mono.justOrEmpty(res.getValue())); @@ -391,9 +387,8 @@ private Mono createOrUpdateAsync(String * @return credential resource type along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Response createOrUpdateWithResponse(String resourceGroupName, - String factoryName, String credentialName, ManagedIdentityCredentialResourceInner credential, String ifMatch, - Context context) { + public Response createOrUpdateWithResponse(String resourceGroupName, String factoryName, + String credentialName, CredentialResourceInner credential, String ifMatch, Context context) { return createOrUpdateWithResponseAsync(resourceGroupName, factoryName, credentialName, credential, ifMatch, context).block(); } @@ -411,8 +406,8 @@ public Response createOrUpdateWithRespon * @return credential resource type. */ @ServiceMethod(returns = ReturnType.SINGLE) - public ManagedIdentityCredentialResourceInner createOrUpdate(String resourceGroupName, String factoryName, - String credentialName, ManagedIdentityCredentialResourceInner credential) { + public CredentialResourceInner createOrUpdate(String resourceGroupName, String factoryName, String credentialName, + CredentialResourceInner credential) { final String ifMatch = null; return createOrUpdateWithResponse(resourceGroupName, factoryName, credentialName, credential, ifMatch, Context.NONE).getValue(); @@ -432,8 +427,8 @@ public ManagedIdentityCredentialResourceInner createOrUpdate(String resourceGrou * @return a credential along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) - private Mono> getWithResponseAsync(String resourceGroupName, - String factoryName, String credentialName, String ifNoneMatch) { + private Mono> getWithResponseAsync(String resourceGroupName, String factoryName, + String credentialName, String ifNoneMatch) { if (this.client.getEndpoint() == null) { return Mono.error( new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); @@ -475,8 +470,8 @@ private Mono> getWithResponseAs * @return a credential along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) - private Mono> getWithResponseAsync(String resourceGroupName, - String factoryName, String credentialName, String ifNoneMatch, Context context) { + private Mono> getWithResponseAsync(String resourceGroupName, String factoryName, + String credentialName, String ifNoneMatch, Context context) { if (this.client.getEndpoint() == null) { return Mono.error( new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); @@ -513,7 +508,7 @@ private Mono> getWithResponseAs * @return a credential on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) - private Mono getAsync(String resourceGroupName, String factoryName, + private Mono getAsync(String resourceGroupName, String factoryName, String credentialName) { final String ifNoneMatch = null; return getWithResponseAsync(resourceGroupName, factoryName, credentialName, ifNoneMatch) @@ -535,8 +530,8 @@ private Mono getAsync(String resourceGro * @return a credential along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) - public Response getWithResponse(String resourceGroupName, - String factoryName, String credentialName, String ifNoneMatch, Context context) { + public Response getWithResponse(String resourceGroupName, String factoryName, + String credentialName, String ifNoneMatch, Context context) { return getWithResponseAsync(resourceGroupName, factoryName, credentialName, ifNoneMatch, context).block(); } @@ -552,8 +547,7 @@ public Response getWithResponse(String r * @return a credential. */ @ServiceMethod(returns = ReturnType.SINGLE) - public ManagedIdentityCredentialResourceInner get(String resourceGroupName, String factoryName, - String credentialName) { + public CredentialResourceInner get(String resourceGroupName, String factoryName, String credentialName) { final String ifNoneMatch = null; return getWithResponse(resourceGroupName, factoryName, credentialName, ifNoneMatch, Context.NONE).getValue(); } @@ -697,8 +691,7 @@ public void delete(String resourceGroupName, String factoryName, String credenti * @return a list of credential resources along with {@link PagedResponse} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) - private Mono> - listByFactoryNextSinglePageAsync(String nextLink) { + private Mono> listByFactoryNextSinglePageAsync(String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } @@ -709,7 +702,7 @@ public void delete(String resourceGroupName, String factoryName, String credenti final String accept = "application/json"; return FluxUtil .withContext(context -> service.listByFactoryNext(nextLink, this.client.getEndpoint(), accept, context)) - .>map(res -> new PagedResponseBase<>(res.getRequest(), + .>map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } @@ -727,8 +720,8 @@ public void delete(String resourceGroupName, String factoryName, String credenti * @return a list of credential resources along with {@link PagedResponse} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) - private Mono> - listByFactoryNextSinglePageAsync(String nextLink, Context context) { + private Mono> listByFactoryNextSinglePageAsync(String nextLink, + Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialOperationsImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialOperationsImpl.java index 2b4ff29743ed5..01da9bcb48015 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialOperationsImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialOperationsImpl.java @@ -10,9 +10,9 @@ import com.azure.core.util.Context; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.CredentialOperationsClient; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityCredentialResourceInner; +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; import com.azure.resourcemanager.datafactory.models.CredentialOperations; -import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredentialResource; +import com.azure.resourcemanager.datafactory.models.CredentialResource; public final class CredentialOperationsImpl implements CredentialOperations { private static final ClientLogger LOGGER = new ClientLogger(CredentialOperationsImpl.class); @@ -27,130 +27,70 @@ public CredentialOperationsImpl(CredentialOperationsClient innerClient, this.serviceManager = serviceManager; } - public PagedIterable listByFactory(String resourceGroupName, - String factoryName) { - PagedIterable inner + public PagedIterable listByFactory(String resourceGroupName, String factoryName) { + PagedIterable inner = this.serviceClient().listByFactory(resourceGroupName, factoryName); - return ResourceManagerUtils.mapPage(inner, - inner1 -> new ManagedIdentityCredentialResourceImpl(inner1, this.manager())); + return ResourceManagerUtils.mapPage(inner, inner1 -> new CredentialResourceImpl(inner1, this.manager())); } - public PagedIterable listByFactory(String resourceGroupName, String factoryName, + public PagedIterable listByFactory(String resourceGroupName, String factoryName, Context context) { - PagedIterable inner + PagedIterable inner = this.serviceClient().listByFactory(resourceGroupName, factoryName, context); - return ResourceManagerUtils.mapPage(inner, - inner1 -> new ManagedIdentityCredentialResourceImpl(inner1, this.manager())); + return ResourceManagerUtils.mapPage(inner, inner1 -> new CredentialResourceImpl(inner1, this.manager())); } - public Response getWithResponse(String resourceGroupName, String factoryName, - String credentialName, String ifNoneMatch, Context context) { - Response inner = this.serviceClient().getWithResponse(resourceGroupName, - factoryName, credentialName, ifNoneMatch, context); + public Response createOrUpdateWithResponse(String resourceGroupName, String factoryName, + String credentialName, CredentialResourceInner credential, String ifMatch, Context context) { + Response inner = this.serviceClient() + .createOrUpdateWithResponse(resourceGroupName, factoryName, credentialName, credential, ifMatch, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), - new ManagedIdentityCredentialResourceImpl(inner.getValue(), this.manager())); + new CredentialResourceImpl(inner.getValue(), this.manager())); } else { return null; } } - public ManagedIdentityCredentialResource get(String resourceGroupName, String factoryName, String credentialName) { - ManagedIdentityCredentialResourceInner inner - = this.serviceClient().get(resourceGroupName, factoryName, credentialName); + public CredentialResource createOrUpdate(String resourceGroupName, String factoryName, String credentialName, + CredentialResourceInner credential) { + CredentialResourceInner inner + = this.serviceClient().createOrUpdate(resourceGroupName, factoryName, credentialName, credential); if (inner != null) { - return new ManagedIdentityCredentialResourceImpl(inner, this.manager()); + return new CredentialResourceImpl(inner, this.manager()); } else { return null; } } - public Response deleteWithResponse(String resourceGroupName, String factoryName, String credentialName, - Context context) { - return this.serviceClient().deleteWithResponse(resourceGroupName, factoryName, credentialName, context); - } - - public void delete(String resourceGroupName, String factoryName, String credentialName) { - this.serviceClient().delete(resourceGroupName, factoryName, credentialName); - } - - public ManagedIdentityCredentialResource getById(String id) { - String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); - if (resourceGroupName == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); - } - String factoryName = ResourceManagerUtils.getValueFromIdByName(id, "factories"); - if (factoryName == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - String.format("The resource ID '%s' is not valid. Missing path segment 'factories'.", id))); - } - String credentialName = ResourceManagerUtils.getValueFromIdByName(id, "credentials"); - if (credentialName == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - String.format("The resource ID '%s' is not valid. Missing path segment 'credentials'.", id))); + public Response getWithResponse(String resourceGroupName, String factoryName, + String credentialName, String ifNoneMatch, Context context) { + Response inner = this.serviceClient() + .getWithResponse(resourceGroupName, factoryName, credentialName, ifNoneMatch, context); + if (inner != null) { + return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), + new CredentialResourceImpl(inner.getValue(), this.manager())); + } else { + return null; } - String localIfNoneMatch = null; - return this.getWithResponse(resourceGroupName, factoryName, credentialName, localIfNoneMatch, Context.NONE) - .getValue(); } - public Response getByIdWithResponse(String id, String ifNoneMatch, - Context context) { - String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); - if (resourceGroupName == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); - } - String factoryName = ResourceManagerUtils.getValueFromIdByName(id, "factories"); - if (factoryName == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - String.format("The resource ID '%s' is not valid. Missing path segment 'factories'.", id))); - } - String credentialName = ResourceManagerUtils.getValueFromIdByName(id, "credentials"); - if (credentialName == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - String.format("The resource ID '%s' is not valid. Missing path segment 'credentials'.", id))); + public CredentialResource get(String resourceGroupName, String factoryName, String credentialName) { + CredentialResourceInner inner = this.serviceClient().get(resourceGroupName, factoryName, credentialName); + if (inner != null) { + return new CredentialResourceImpl(inner, this.manager()); + } else { + return null; } - return this.getWithResponse(resourceGroupName, factoryName, credentialName, ifNoneMatch, context); } - public void deleteById(String id) { - String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); - if (resourceGroupName == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); - } - String factoryName = ResourceManagerUtils.getValueFromIdByName(id, "factories"); - if (factoryName == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - String.format("The resource ID '%s' is not valid. Missing path segment 'factories'.", id))); - } - String credentialName = ResourceManagerUtils.getValueFromIdByName(id, "credentials"); - if (credentialName == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - String.format("The resource ID '%s' is not valid. Missing path segment 'credentials'.", id))); - } - this.deleteWithResponse(resourceGroupName, factoryName, credentialName, Context.NONE); + public Response deleteWithResponse(String resourceGroupName, String factoryName, String credentialName, + Context context) { + return this.serviceClient().deleteWithResponse(resourceGroupName, factoryName, credentialName, context); } - public Response deleteByIdWithResponse(String id, Context context) { - String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); - if (resourceGroupName == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); - } - String factoryName = ResourceManagerUtils.getValueFromIdByName(id, "factories"); - if (factoryName == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - String.format("The resource ID '%s' is not valid. Missing path segment 'factories'.", id))); - } - String credentialName = ResourceManagerUtils.getValueFromIdByName(id, "credentials"); - if (credentialName == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - String.format("The resource ID '%s' is not valid. Missing path segment 'credentials'.", id))); - } - return this.deleteWithResponse(resourceGroupName, factoryName, credentialName, context); + public void delete(String resourceGroupName, String factoryName, String credentialName) { + this.serviceClient().delete(resourceGroupName, factoryName, credentialName); } private CredentialOperationsClient serviceClient() { @@ -160,8 +100,4 @@ private CredentialOperationsClient serviceClient() { private com.azure.resourcemanager.datafactory.DataFactoryManager manager() { return this.serviceManager; } - - public ManagedIdentityCredentialResourceImpl define(String name) { - return new ManagedIdentityCredentialResourceImpl(name, this.manager()); - } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialResourceImpl.java new file mode 100644 index 0000000000000..bb383c17809ab --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialResourceImpl.java @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.implementation; + +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; +import com.azure.resourcemanager.datafactory.models.Credential; +import com.azure.resourcemanager.datafactory.models.CredentialResource; + +public final class CredentialResourceImpl implements CredentialResource { + private CredentialResourceInner innerObject; + + private final com.azure.resourcemanager.datafactory.DataFactoryManager serviceManager; + + CredentialResourceImpl(CredentialResourceInner innerObject, + com.azure.resourcemanager.datafactory.DataFactoryManager serviceManager) { + this.innerObject = innerObject; + this.serviceManager = serviceManager; + } + + public String id() { + return this.innerModel().id(); + } + + public Credential properties() { + return this.innerModel().properties(); + } + + public String name() { + return this.innerModel().name(); + } + + public String type() { + return this.innerModel().type(); + } + + public String etag() { + return this.innerModel().etag(); + } + + public CredentialResourceInner innerModel() { + return this.innerObject; + } + + private com.azure.resourcemanager.datafactory.DataFactoryManager manager() { + return this.serviceManager; + } +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFactoryManagementClientBuilder.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFactoryManagementClientBuilder.java index 87a84ff6bcc7a..1e1b64eca9b8a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFactoryManagementClientBuilder.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFactoryManagementClientBuilder.java @@ -123,11 +123,13 @@ public DataFactoryManagementClientBuilder serializerAdapter(SerializerAdapter se public DataFactoryManagementClientImpl buildClient() { String localEndpoint = (endpoint != null) ? endpoint : "https://management.azure.com"; AzureEnvironment localEnvironment = (environment != null) ? environment : AzureEnvironment.AZURE; - HttpPipeline localPipeline = (pipeline != null) ? pipeline + HttpPipeline localPipeline = (pipeline != null) + ? pipeline : new HttpPipelineBuilder().policies(new UserAgentPolicy(), new RetryPolicy()).build(); Duration localDefaultPollInterval = (defaultPollInterval != null) ? defaultPollInterval : Duration.ofSeconds(30); - SerializerAdapter localSerializerAdapter = (serializerAdapter != null) ? serializerAdapter + SerializerAdapter localSerializerAdapter = (serializerAdapter != null) + ? serializerAdapter : SerializerFactory.createDefaultManagementSerializerAdapter(); DataFactoryManagementClientImpl client = new DataFactoryManagementClientImpl(localPipeline, localSerializerAdapter, localDefaultPollInterval, localEnvironment, this.subscriptionId, localEndpoint); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFactoryManagementClientImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFactoryManagementClientImpl.java index 498341405747b..f7aa8ce115419 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFactoryManagementClientImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFactoryManagementClientImpl.java @@ -5,6 +5,7 @@ package com.azure.resourcemanager.datafactory.implementation; import com.azure.core.annotation.ServiceClient; +import com.azure.core.http.HttpHeaderName; import com.azure.core.http.HttpHeaders; import com.azure.core.http.HttpPipeline; import com.azure.core.http.HttpResponse; @@ -12,8 +13,8 @@ import com.azure.core.management.AzureEnvironment; import com.azure.core.management.exception.ManagementError; import com.azure.core.management.exception.ManagementException; -import com.azure.core.management.polling.PollResult; import com.azure.core.management.polling.PollerFactory; +import com.azure.core.management.polling.PollResult; import com.azure.core.util.Context; import com.azure.core.util.CoreUtils; import com.azure.core.util.logging.ClientLogger; @@ -41,8 +42,8 @@ import com.azure.resourcemanager.datafactory.fluent.OperationsClient; import com.azure.resourcemanager.datafactory.fluent.PipelineRunsClient; import com.azure.resourcemanager.datafactory.fluent.PipelinesClient; -import com.azure.resourcemanager.datafactory.fluent.PrivateEndPointConnectionsClient; import com.azure.resourcemanager.datafactory.fluent.PrivateEndpointConnectionOperationsClient; +import com.azure.resourcemanager.datafactory.fluent.PrivateEndPointConnectionsClient; import com.azure.resourcemanager.datafactory.fluent.PrivateLinkResourcesClient; import com.azure.resourcemanager.datafactory.fluent.TriggerRunsClient; import com.azure.resourcemanager.datafactory.fluent.TriggersClient; @@ -569,8 +570,8 @@ public Mono getLroFinalResultOrError(AsyncPollResponse, if (errorBody != null) { // try to deserialize error body to ManagementError try { - managementError = this.getSerializerAdapter().deserialize(errorBody, ManagementError.class, - SerializerEncoding.JSON); + managementError = this.getSerializerAdapter() + .deserialize(errorBody, ManagementError.class, SerializerEncoding.JSON); if (managementError.getCode() == null || managementError.getMessage() == null) { managementError = null; } @@ -611,7 +612,7 @@ public int getStatusCode() { } public String getHeaderValue(String s) { - return httpHeaders.getValue(s); + return httpHeaders.getValue(HttpHeaderName.fromString(s)); } public HttpHeaders getHeaders() { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFlowResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFlowResourceImpl.java index 1ad04f1edc110..2c64b07b7d6d1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFlowResourceImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFlowResourceImpl.java @@ -64,14 +64,20 @@ public DataFlowResourceImpl withExistingFactory(String resourceGroupName, String } public DataFlowResource create() { - this.innerObject = serviceManager.serviceClient().getDataFlows().createOrUpdateWithResponse(resourceGroupName, - factoryName, dataFlowName, this.innerModel(), createIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getDataFlows() + .createOrUpdateWithResponse(resourceGroupName, factoryName, dataFlowName, this.innerModel(), createIfMatch, + Context.NONE) + .getValue(); return this; } public DataFlowResource create(Context context) { - this.innerObject = serviceManager.serviceClient().getDataFlows().createOrUpdateWithResponse(resourceGroupName, - factoryName, dataFlowName, this.innerModel(), createIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getDataFlows() + .createOrUpdateWithResponse(resourceGroupName, factoryName, dataFlowName, this.innerModel(), createIfMatch, + context) + .getValue(); return this; } @@ -88,14 +94,20 @@ public DataFlowResourceImpl update() { } public DataFlowResource apply() { - this.innerObject = serviceManager.serviceClient().getDataFlows().createOrUpdateWithResponse(resourceGroupName, - factoryName, dataFlowName, this.innerModel(), updateIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getDataFlows() + .createOrUpdateWithResponse(resourceGroupName, factoryName, dataFlowName, this.innerModel(), updateIfMatch, + Context.NONE) + .getValue(); return this; } public DataFlowResource apply(Context context) { - this.innerObject = serviceManager.serviceClient().getDataFlows().createOrUpdateWithResponse(resourceGroupName, - factoryName, dataFlowName, this.innerModel(), updateIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getDataFlows() + .createOrUpdateWithResponse(resourceGroupName, factoryName, dataFlowName, this.innerModel(), updateIfMatch, + context) + .getValue(); return this; } @@ -110,15 +122,19 @@ public DataFlowResource apply(Context context) { public DataFlowResource refresh() { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getDataFlows() - .getWithResponse(resourceGroupName, factoryName, dataFlowName, localIfNoneMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getDataFlows() + .getWithResponse(resourceGroupName, factoryName, dataFlowName, localIfNoneMatch, Context.NONE) + .getValue(); return this; } public DataFlowResource refresh(Context context) { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getDataFlows() - .getWithResponse(resourceGroupName, factoryName, dataFlowName, localIfNoneMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getDataFlows() + .getWithResponse(resourceGroupName, factoryName, dataFlowName, localIfNoneMatch, context) + .getValue(); return this; } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DatasetResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DatasetResourceImpl.java index 731bcabaaf945..867df7a2dfd62 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DatasetResourceImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DatasetResourceImpl.java @@ -63,14 +63,20 @@ public DatasetResourceImpl withExistingFactory(String resourceGroupName, String } public DatasetResource create() { - this.innerObject = serviceManager.serviceClient().getDatasets().createOrUpdateWithResponse(resourceGroupName, - factoryName, datasetName, this.innerModel(), createIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getDatasets() + .createOrUpdateWithResponse(resourceGroupName, factoryName, datasetName, this.innerModel(), createIfMatch, + Context.NONE) + .getValue(); return this; } public DatasetResource create(Context context) { - this.innerObject = serviceManager.serviceClient().getDatasets().createOrUpdateWithResponse(resourceGroupName, - factoryName, datasetName, this.innerModel(), createIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getDatasets() + .createOrUpdateWithResponse(resourceGroupName, factoryName, datasetName, this.innerModel(), createIfMatch, + context) + .getValue(); return this; } @@ -87,14 +93,20 @@ public DatasetResourceImpl update() { } public DatasetResource apply() { - this.innerObject = serviceManager.serviceClient().getDatasets().createOrUpdateWithResponse(resourceGroupName, - factoryName, datasetName, this.innerModel(), updateIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getDatasets() + .createOrUpdateWithResponse(resourceGroupName, factoryName, datasetName, this.innerModel(), updateIfMatch, + Context.NONE) + .getValue(); return this; } public DatasetResource apply(Context context) { - this.innerObject = serviceManager.serviceClient().getDatasets().createOrUpdateWithResponse(resourceGroupName, - factoryName, datasetName, this.innerModel(), updateIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getDatasets() + .createOrUpdateWithResponse(resourceGroupName, factoryName, datasetName, this.innerModel(), updateIfMatch, + context) + .getValue(); return this; } @@ -109,15 +121,19 @@ public DatasetResource apply(Context context) { public DatasetResource refresh() { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getDatasets() - .getWithResponse(resourceGroupName, factoryName, datasetName, localIfNoneMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getDatasets() + .getWithResponse(resourceGroupName, factoryName, datasetName, localIfNoneMatch, Context.NONE) + .getValue(); return this; } public DatasetResource refresh(Context context) { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getDatasets() - .getWithResponse(resourceGroupName, factoryName, datasetName, localIfNoneMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getDatasets() + .getWithResponse(resourceGroupName, factoryName, datasetName, localIfNoneMatch, context) + .getValue(); return this; } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ExposureControlBatchResponseImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ExposureControlBatchResponseImpl.java index f05e943506ee3..094ff19dc2eca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ExposureControlBatchResponseImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ExposureControlBatchResponseImpl.java @@ -27,7 +27,8 @@ public List exposureControlResponses() { List inner = this.innerModel().exposureControlResponses(); if (inner != null) { return Collections.unmodifiableList(inner.stream() - .map(inner1 -> new ExposureControlResponseImpl(inner1, this.manager())).collect(Collectors.toList())); + .map(inner1 -> new ExposureControlResponseImpl(inner1, this.manager())) + .collect(Collectors.toList())); } else { return Collections.emptyList(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ExposureControlsImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ExposureControlsImpl.java index 7d17e704ac5df..4ec0b65ed62c8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ExposureControlsImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ExposureControlsImpl.java @@ -76,9 +76,9 @@ public ExposureControlResponse getFeatureValueByFactory(String resourceGroupName public Response queryFeatureValuesByFactoryWithResponse(String resourceGroupName, String factoryName, ExposureControlBatchRequest exposureControlBatchRequest, Context context) { - Response inner - = this.serviceClient().queryFeatureValuesByFactoryWithResponse(resourceGroupName, factoryName, - exposureControlBatchRequest, context); + Response inner = this.serviceClient() + .queryFeatureValuesByFactoryWithResponse(resourceGroupName, factoryName, exposureControlBatchRequest, + context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new ExposureControlBatchResponseImpl(inner.getValue(), this.manager())); @@ -89,8 +89,8 @@ public Response queryFeatureValuesByFactoryWithRes public ExposureControlBatchResponse queryFeatureValuesByFactory(String resourceGroupName, String factoryName, ExposureControlBatchRequest exposureControlBatchRequest) { - ExposureControlBatchResponseInner inner = this.serviceClient().queryFeatureValuesByFactory(resourceGroupName, - factoryName, exposureControlBatchRequest); + ExposureControlBatchResponseInner inner = this.serviceClient() + .queryFeatureValuesByFactory(resourceGroupName, factoryName, exposureControlBatchRequest); if (inner != null) { return new ExposureControlBatchResponseImpl(inner, this.manager()); } else { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/FactoryImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/FactoryImpl.java index eb9445f5e64f9..0b143c2dbb454 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/FactoryImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/FactoryImpl.java @@ -142,14 +142,16 @@ public FactoryImpl withExistingResourceGroup(String resourceGroupName) { } public Factory create() { - this.innerObject = serviceManager.serviceClient().getFactories() + this.innerObject = serviceManager.serviceClient() + .getFactories() .createOrUpdateWithResponse(resourceGroupName, factoryName, this.innerModel(), createIfMatch, Context.NONE) .getValue(); return this; } public Factory create(Context context) { - this.innerObject = serviceManager.serviceClient().getFactories() + this.innerObject = serviceManager.serviceClient() + .getFactories() .createOrUpdateWithResponse(resourceGroupName, factoryName, this.innerModel(), createIfMatch, context) .getValue(); return this; @@ -168,14 +170,18 @@ public FactoryImpl update() { } public Factory apply() { - this.innerObject = serviceManager.serviceClient().getFactories() - .updateWithResponse(resourceGroupName, factoryName, updateFactoryUpdateParameters, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getFactories() + .updateWithResponse(resourceGroupName, factoryName, updateFactoryUpdateParameters, Context.NONE) + .getValue(); return this; } public Factory apply(Context context) { - this.innerObject = serviceManager.serviceClient().getFactories() - .updateWithResponse(resourceGroupName, factoryName, updateFactoryUpdateParameters, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getFactories() + .updateWithResponse(resourceGroupName, factoryName, updateFactoryUpdateParameters, context) + .getValue(); return this; } @@ -188,32 +194,36 @@ public Factory apply(Context context) { public Factory refresh() { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getFactories() - .getByResourceGroupWithResponse(resourceGroupName, factoryName, localIfNoneMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getFactories() + .getByResourceGroupWithResponse(resourceGroupName, factoryName, localIfNoneMatch, Context.NONE) + .getValue(); return this; } public Factory refresh(Context context) { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getFactories() - .getByResourceGroupWithResponse(resourceGroupName, factoryName, localIfNoneMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getFactories() + .getByResourceGroupWithResponse(resourceGroupName, factoryName, localIfNoneMatch, context) + .getValue(); return this; } public Response getGitHubAccessTokenWithResponse(GitHubAccessTokenRequest gitHubAccessTokenRequest, Context context) { - return serviceManager.factories().getGitHubAccessTokenWithResponse(resourceGroupName, factoryName, - gitHubAccessTokenRequest, context); + return serviceManager.factories() + .getGitHubAccessTokenWithResponse(resourceGroupName, factoryName, gitHubAccessTokenRequest, context); } public GitHubAccessTokenResponse getGitHubAccessToken(GitHubAccessTokenRequest gitHubAccessTokenRequest) { - return serviceManager.factories().getGitHubAccessToken(resourceGroupName, factoryName, - gitHubAccessTokenRequest); + return serviceManager.factories() + .getGitHubAccessToken(resourceGroupName, factoryName, gitHubAccessTokenRequest); } public Response getDataPlaneAccessWithResponse(UserAccessPolicy policy, Context context) { - return serviceManager.factories().getDataPlaneAccessWithResponse(resourceGroupName, factoryName, policy, - context); + return serviceManager.factories() + .getDataPlaneAccessWithResponse(resourceGroupName, factoryName, policy, context); } public AccessPolicyResponse getDataPlaneAccess(UserAccessPolicy policy) { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/GlobalParameterResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/GlobalParameterResourceImpl.java index 16af6639a1ba4..eda32cb99b36b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/GlobalParameterResourceImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/GlobalParameterResourceImpl.java @@ -67,14 +67,17 @@ public GlobalParameterResourceImpl withExistingFactory(String resourceGroupName, } public GlobalParameterResource create() { - this.innerObject - = serviceManager.serviceClient().getGlobalParameters().createOrUpdateWithResponse(resourceGroupName, - factoryName, globalParameterName, this.innerModel(), Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getGlobalParameters() + .createOrUpdateWithResponse(resourceGroupName, factoryName, globalParameterName, this.innerModel(), + Context.NONE) + .getValue(); return this; } public GlobalParameterResource create(Context context) { - this.innerObject = serviceManager.serviceClient().getGlobalParameters() + this.innerObject = serviceManager.serviceClient() + .getGlobalParameters() .createOrUpdateWithResponse(resourceGroupName, factoryName, globalParameterName, this.innerModel(), context) .getValue(); return this; @@ -91,14 +94,17 @@ public GlobalParameterResourceImpl update() { } public GlobalParameterResource apply() { - this.innerObject - = serviceManager.serviceClient().getGlobalParameters().createOrUpdateWithResponse(resourceGroupName, - factoryName, globalParameterName, this.innerModel(), Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getGlobalParameters() + .createOrUpdateWithResponse(resourceGroupName, factoryName, globalParameterName, this.innerModel(), + Context.NONE) + .getValue(); return this; } public GlobalParameterResource apply(Context context) { - this.innerObject = serviceManager.serviceClient().getGlobalParameters() + this.innerObject = serviceManager.serviceClient() + .getGlobalParameters() .createOrUpdateWithResponse(resourceGroupName, factoryName, globalParameterName, this.innerModel(), context) .getValue(); return this; @@ -114,14 +120,18 @@ public GlobalParameterResource apply(Context context) { } public GlobalParameterResource refresh() { - this.innerObject = serviceManager.serviceClient().getGlobalParameters() - .getWithResponse(resourceGroupName, factoryName, globalParameterName, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getGlobalParameters() + .getWithResponse(resourceGroupName, factoryName, globalParameterName, Context.NONE) + .getValue(); return this; } public GlobalParameterResource refresh(Context context) { - this.innerObject = serviceManager.serviceClient().getGlobalParameters() - .getWithResponse(resourceGroupName, factoryName, globalParameterName, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getGlobalParameters() + .getWithResponse(resourceGroupName, factoryName, globalParameterName, context) + .getValue(); return this; } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimeNodesImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimeNodesImpl.java index 4911b4b76d15b..13c457057cf94 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimeNodesImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimeNodesImpl.java @@ -31,8 +31,8 @@ public IntegrationRuntimeNodesImpl(IntegrationRuntimeNodesClient innerClient, public Response getWithResponse(String resourceGroupName, String factoryName, String integrationRuntimeName, String nodeName, Context context) { - Response inner = this.serviceClient().getWithResponse(resourceGroupName, - factoryName, integrationRuntimeName, nodeName, context); + Response inner = this.serviceClient() + .getWithResponse(resourceGroupName, factoryName, integrationRuntimeName, nodeName, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new SelfHostedIntegrationRuntimeNodeImpl(inner.getValue(), this.manager())); @@ -54,8 +54,8 @@ public SelfHostedIntegrationRuntimeNode get(String resourceGroupName, String fac public Response deleteWithResponse(String resourceGroupName, String factoryName, String integrationRuntimeName, String nodeName, Context context) { - return this.serviceClient().deleteWithResponse(resourceGroupName, factoryName, integrationRuntimeName, nodeName, - context); + return this.serviceClient() + .deleteWithResponse(resourceGroupName, factoryName, integrationRuntimeName, nodeName, context); } public void delete(String resourceGroupName, String factoryName, String integrationRuntimeName, String nodeName) { @@ -65,8 +65,8 @@ public void delete(String resourceGroupName, String factoryName, String integrat public Response updateWithResponse(String resourceGroupName, String factoryName, String integrationRuntimeName, String nodeName, UpdateIntegrationRuntimeNodeRequest updateIntegrationRuntimeNodeRequest, Context context) { - Response inner - = this.serviceClient().updateWithResponse(resourceGroupName, factoryName, integrationRuntimeName, nodeName, + Response inner = this.serviceClient() + .updateWithResponse(resourceGroupName, factoryName, integrationRuntimeName, nodeName, updateIntegrationRuntimeNodeRequest, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), @@ -79,8 +79,9 @@ public Response updateWithResponse(String reso public SelfHostedIntegrationRuntimeNode update(String resourceGroupName, String factoryName, String integrationRuntimeName, String nodeName, UpdateIntegrationRuntimeNodeRequest updateIntegrationRuntimeNodeRequest) { - SelfHostedIntegrationRuntimeNodeInner inner = this.serviceClient().update(resourceGroupName, factoryName, - integrationRuntimeName, nodeName, updateIntegrationRuntimeNodeRequest); + SelfHostedIntegrationRuntimeNodeInner inner = this.serviceClient() + .update(resourceGroupName, factoryName, integrationRuntimeName, nodeName, + updateIntegrationRuntimeNodeRequest); if (inner != null) { return new SelfHostedIntegrationRuntimeNodeImpl(inner, this.manager()); } else { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimeObjectMetadatasImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimeObjectMetadatasImpl.java index 398fd7483ca44..ae1928ba63019 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimeObjectMetadatasImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimeObjectMetadatasImpl.java @@ -53,8 +53,8 @@ public SsisObjectMetadataStatusResponse refresh(String resourceGroupName, String public Response getWithResponse(String resourceGroupName, String factoryName, String integrationRuntimeName, GetSsisObjectMetadataRequest getMetadataRequest, Context context) { - Response inner = this.serviceClient().getWithResponse(resourceGroupName, - factoryName, integrationRuntimeName, getMetadataRequest, context); + Response inner = this.serviceClient() + .getWithResponse(resourceGroupName, factoryName, integrationRuntimeName, getMetadataRequest, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new SsisObjectMetadataListResponseImpl(inner.getValue(), this.manager())); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimeResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimeResourceImpl.java index dd7d6ab65d998..0838b5edb9b94 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimeResourceImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimeResourceImpl.java @@ -74,16 +74,20 @@ public IntegrationRuntimeResourceImpl withExistingFactory(String resourceGroupNa } public IntegrationRuntimeResource create() { - this.innerObject - = serviceManager.serviceClient().getIntegrationRuntimes().createOrUpdateWithResponse(resourceGroupName, - factoryName, integrationRuntimeName, this.innerModel(), createIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getIntegrationRuntimes() + .createOrUpdateWithResponse(resourceGroupName, factoryName, integrationRuntimeName, this.innerModel(), + createIfMatch, Context.NONE) + .getValue(); return this; } public IntegrationRuntimeResource create(Context context) { - this.innerObject - = serviceManager.serviceClient().getIntegrationRuntimes().createOrUpdateWithResponse(resourceGroupName, - factoryName, integrationRuntimeName, this.innerModel(), createIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getIntegrationRuntimes() + .createOrUpdateWithResponse(resourceGroupName, factoryName, integrationRuntimeName, this.innerModel(), + createIfMatch, context) + .getValue(); return this; } @@ -101,14 +105,20 @@ public IntegrationRuntimeResourceImpl update() { } public IntegrationRuntimeResource apply() { - this.innerObject = serviceManager.serviceClient().getIntegrationRuntimes().updateWithResponse(resourceGroupName, - factoryName, integrationRuntimeName, updateUpdateIntegrationRuntimeRequest, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getIntegrationRuntimes() + .updateWithResponse(resourceGroupName, factoryName, integrationRuntimeName, + updateUpdateIntegrationRuntimeRequest, Context.NONE) + .getValue(); return this; } public IntegrationRuntimeResource apply(Context context) { - this.innerObject = serviceManager.serviceClient().getIntegrationRuntimes().updateWithResponse(resourceGroupName, - factoryName, integrationRuntimeName, updateUpdateIntegrationRuntimeRequest, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getIntegrationRuntimes() + .updateWithResponse(resourceGroupName, factoryName, integrationRuntimeName, + updateUpdateIntegrationRuntimeRequest, context) + .getValue(); return this; } @@ -124,7 +134,8 @@ public IntegrationRuntimeResource apply(Context context) { public IntegrationRuntimeResource refresh() { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getIntegrationRuntimes() + this.innerObject = serviceManager.serviceClient() + .getIntegrationRuntimes() .getWithResponse(resourceGroupName, factoryName, integrationRuntimeName, localIfNoneMatch, Context.NONE) .getValue(); return this; @@ -132,15 +143,16 @@ public IntegrationRuntimeResource refresh() { public IntegrationRuntimeResource refresh(Context context) { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getIntegrationRuntimes() + this.innerObject = serviceManager.serviceClient() + .getIntegrationRuntimes() .getWithResponse(resourceGroupName, factoryName, integrationRuntimeName, localIfNoneMatch, context) .getValue(); return this; } public Response getStatusWithResponse(Context context) { - return serviceManager.integrationRuntimes().getStatusWithResponse(resourceGroupName, factoryName, - integrationRuntimeName, context); + return serviceManager.integrationRuntimes() + .getStatusWithResponse(resourceGroupName, factoryName, integrationRuntimeName, context); } public IntegrationRuntimeStatusResponse getStatus() { @@ -148,35 +160,36 @@ public IntegrationRuntimeStatusResponse getStatus() { } public Response getConnectionInfoWithResponse(Context context) { - return serviceManager.integrationRuntimes().getConnectionInfoWithResponse(resourceGroupName, factoryName, - integrationRuntimeName, context); + return serviceManager.integrationRuntimes() + .getConnectionInfoWithResponse(resourceGroupName, factoryName, integrationRuntimeName, context); } public IntegrationRuntimeConnectionInfo getConnectionInfo() { - return serviceManager.integrationRuntimes().getConnectionInfo(resourceGroupName, factoryName, - integrationRuntimeName); + return serviceManager.integrationRuntimes() + .getConnectionInfo(resourceGroupName, factoryName, integrationRuntimeName); } public Response regenerateAuthKeyWithResponse( IntegrationRuntimeRegenerateKeyParameters regenerateKeyParameters, Context context) { - return serviceManager.integrationRuntimes().regenerateAuthKeyWithResponse(resourceGroupName, factoryName, - integrationRuntimeName, regenerateKeyParameters, context); + return serviceManager.integrationRuntimes() + .regenerateAuthKeyWithResponse(resourceGroupName, factoryName, integrationRuntimeName, + regenerateKeyParameters, context); } public IntegrationRuntimeAuthKeys regenerateAuthKey(IntegrationRuntimeRegenerateKeyParameters regenerateKeyParameters) { - return serviceManager.integrationRuntimes().regenerateAuthKey(resourceGroupName, factoryName, - integrationRuntimeName, regenerateKeyParameters); + return serviceManager.integrationRuntimes() + .regenerateAuthKey(resourceGroupName, factoryName, integrationRuntimeName, regenerateKeyParameters); } public Response listAuthKeysWithResponse(Context context) { - return serviceManager.integrationRuntimes().listAuthKeysWithResponse(resourceGroupName, factoryName, - integrationRuntimeName, context); + return serviceManager.integrationRuntimes() + .listAuthKeysWithResponse(resourceGroupName, factoryName, integrationRuntimeName, context); } public IntegrationRuntimeAuthKeys listAuthKeys() { - return serviceManager.integrationRuntimes().listAuthKeys(resourceGroupName, factoryName, - integrationRuntimeName); + return serviceManager.integrationRuntimes() + .listAuthKeys(resourceGroupName, factoryName, integrationRuntimeName); } public IntegrationRuntimeStatusResponse start() { @@ -184,8 +197,8 @@ public IntegrationRuntimeStatusResponse start() { } public IntegrationRuntimeStatusResponse start(Context context) { - return serviceManager.integrationRuntimes().start(resourceGroupName, factoryName, integrationRuntimeName, - context); + return serviceManager.integrationRuntimes() + .start(resourceGroupName, factoryName, integrationRuntimeName, context); } public void stop() { @@ -197,8 +210,8 @@ public void stop(Context context) { } public Response syncCredentialsWithResponse(Context context) { - return serviceManager.integrationRuntimes().syncCredentialsWithResponse(resourceGroupName, factoryName, - integrationRuntimeName, context); + return serviceManager.integrationRuntimes() + .syncCredentialsWithResponse(resourceGroupName, factoryName, integrationRuntimeName, context); } public void syncCredentials() { @@ -206,18 +219,18 @@ public void syncCredentials() { } public Response getMonitoringDataWithResponse(Context context) { - return serviceManager.integrationRuntimes().getMonitoringDataWithResponse(resourceGroupName, factoryName, - integrationRuntimeName, context); + return serviceManager.integrationRuntimes() + .getMonitoringDataWithResponse(resourceGroupName, factoryName, integrationRuntimeName, context); } public IntegrationRuntimeMonitoringData getMonitoringData() { - return serviceManager.integrationRuntimes().getMonitoringData(resourceGroupName, factoryName, - integrationRuntimeName); + return serviceManager.integrationRuntimes() + .getMonitoringData(resourceGroupName, factoryName, integrationRuntimeName); } public Response upgradeWithResponse(Context context) { - return serviceManager.integrationRuntimes().upgradeWithResponse(resourceGroupName, factoryName, - integrationRuntimeName, context); + return serviceManager.integrationRuntimes() + .upgradeWithResponse(resourceGroupName, factoryName, integrationRuntimeName, context); } public void upgrade() { @@ -226,25 +239,28 @@ public void upgrade() { public Response removeLinksWithResponse(LinkedIntegrationRuntimeRequest linkedIntegrationRuntimeRequest, Context context) { - return serviceManager.integrationRuntimes().removeLinksWithResponse(resourceGroupName, factoryName, - integrationRuntimeName, linkedIntegrationRuntimeRequest, context); + return serviceManager.integrationRuntimes() + .removeLinksWithResponse(resourceGroupName, factoryName, integrationRuntimeName, + linkedIntegrationRuntimeRequest, context); } public void removeLinks(LinkedIntegrationRuntimeRequest linkedIntegrationRuntimeRequest) { - serviceManager.integrationRuntimes().removeLinks(resourceGroupName, factoryName, integrationRuntimeName, - linkedIntegrationRuntimeRequest); + serviceManager.integrationRuntimes() + .removeLinks(resourceGroupName, factoryName, integrationRuntimeName, linkedIntegrationRuntimeRequest); } public Response createLinkedIntegrationRuntimeWithResponse( CreateLinkedIntegrationRuntimeRequest createLinkedIntegrationRuntimeRequest, Context context) { - return serviceManager.integrationRuntimes().createLinkedIntegrationRuntimeWithResponse(resourceGroupName, - factoryName, integrationRuntimeName, createLinkedIntegrationRuntimeRequest, context); + return serviceManager.integrationRuntimes() + .createLinkedIntegrationRuntimeWithResponse(resourceGroupName, factoryName, integrationRuntimeName, + createLinkedIntegrationRuntimeRequest, context); } public IntegrationRuntimeStatusResponse createLinkedIntegrationRuntime(CreateLinkedIntegrationRuntimeRequest createLinkedIntegrationRuntimeRequest) { - return serviceManager.integrationRuntimes().createLinkedIntegrationRuntime(resourceGroupName, factoryName, - integrationRuntimeName, createLinkedIntegrationRuntimeRequest); + return serviceManager.integrationRuntimes() + .createLinkedIntegrationRuntime(resourceGroupName, factoryName, integrationRuntimeName, + createLinkedIntegrationRuntimeRequest); } public IntegrationRuntimeResourceImpl withProperties(IntegrationRuntime properties) { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimesImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimesImpl.java index 9d68eff456cdb..99f1479a8f339 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimesImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/IntegrationRuntimesImpl.java @@ -23,8 +23,8 @@ import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeRegenerateKeyParameters; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeResource; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatusResponse; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimes; +import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatusResponse; import com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntimeRequest; public final class IntegrationRuntimesImpl implements IntegrationRuntimes { @@ -57,8 +57,8 @@ public PagedIterable listByFactory(String resourceGr public Response getWithResponse(String resourceGroupName, String factoryName, String integrationRuntimeName, String ifNoneMatch, Context context) { - Response inner = this.serviceClient().getWithResponse(resourceGroupName, - factoryName, integrationRuntimeName, ifNoneMatch, context); + Response inner = this.serviceClient() + .getWithResponse(resourceGroupName, factoryName, integrationRuntimeName, ifNoneMatch, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new IntegrationRuntimeResourceImpl(inner.getValue(), this.manager())); @@ -112,8 +112,8 @@ public IntegrationRuntimeStatusResponse getStatus(String resourceGroupName, Stri public Response listOutboundNetworkDependenciesEndpointsWithResponse(String resourceGroupName, String factoryName, String integrationRuntimeName, Context context) { - Response inner - = this.serviceClient().listOutboundNetworkDependenciesEndpointsWithResponse(resourceGroupName, factoryName, + Response inner = this.serviceClient() + .listOutboundNetworkDependenciesEndpointsWithResponse(resourceGroupName, factoryName, integrationRuntimeName, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), @@ -161,8 +161,9 @@ public IntegrationRuntimeConnectionInfo getConnectionInfo(String resourceGroupNa public Response regenerateAuthKeyWithResponse(String resourceGroupName, String factoryName, String integrationRuntimeName, IntegrationRuntimeRegenerateKeyParameters regenerateKeyParameters, Context context) { - Response inner = this.serviceClient().regenerateAuthKeyWithResponse( - resourceGroupName, factoryName, integrationRuntimeName, regenerateKeyParameters, context); + Response inner = this.serviceClient() + .regenerateAuthKeyWithResponse(resourceGroupName, factoryName, integrationRuntimeName, + regenerateKeyParameters, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new IntegrationRuntimeAuthKeysImpl(inner.getValue(), this.manager())); @@ -173,8 +174,8 @@ public Response regenerateAuthKeyWithResponse(String public IntegrationRuntimeAuthKeys regenerateAuthKey(String resourceGroupName, String factoryName, String integrationRuntimeName, IntegrationRuntimeRegenerateKeyParameters regenerateKeyParameters) { - IntegrationRuntimeAuthKeysInner inner = this.serviceClient().regenerateAuthKey(resourceGroupName, factoryName, - integrationRuntimeName, regenerateKeyParameters); + IntegrationRuntimeAuthKeysInner inner = this.serviceClient() + .regenerateAuthKey(resourceGroupName, factoryName, integrationRuntimeName, regenerateKeyParameters); if (inner != null) { return new IntegrationRuntimeAuthKeysImpl(inner, this.manager()); } else { @@ -237,8 +238,8 @@ public void stop(String resourceGroupName, String factoryName, String integratio public Response syncCredentialsWithResponse(String resourceGroupName, String factoryName, String integrationRuntimeName, Context context) { - return this.serviceClient().syncCredentialsWithResponse(resourceGroupName, factoryName, integrationRuntimeName, - context); + return this.serviceClient() + .syncCredentialsWithResponse(resourceGroupName, factoryName, integrationRuntimeName, context); } public void syncCredentials(String resourceGroupName, String factoryName, String integrationRuntimeName) { @@ -270,8 +271,8 @@ public IntegrationRuntimeMonitoringData getMonitoringData(String resourceGroupNa public Response upgradeWithResponse(String resourceGroupName, String factoryName, String integrationRuntimeName, Context context) { - return this.serviceClient().upgradeWithResponse(resourceGroupName, factoryName, integrationRuntimeName, - context); + return this.serviceClient() + .upgradeWithResponse(resourceGroupName, factoryName, integrationRuntimeName, context); } public void upgrade(String resourceGroupName, String factoryName, String integrationRuntimeName) { @@ -281,22 +282,23 @@ public void upgrade(String resourceGroupName, String factoryName, String integra public Response removeLinksWithResponse(String resourceGroupName, String factoryName, String integrationRuntimeName, LinkedIntegrationRuntimeRequest linkedIntegrationRuntimeRequest, Context context) { - return this.serviceClient().removeLinksWithResponse(resourceGroupName, factoryName, integrationRuntimeName, - linkedIntegrationRuntimeRequest, context); + return this.serviceClient() + .removeLinksWithResponse(resourceGroupName, factoryName, integrationRuntimeName, + linkedIntegrationRuntimeRequest, context); } public void removeLinks(String resourceGroupName, String factoryName, String integrationRuntimeName, LinkedIntegrationRuntimeRequest linkedIntegrationRuntimeRequest) { - this.serviceClient().removeLinks(resourceGroupName, factoryName, integrationRuntimeName, - linkedIntegrationRuntimeRequest); + this.serviceClient() + .removeLinks(resourceGroupName, factoryName, integrationRuntimeName, linkedIntegrationRuntimeRequest); } public Response createLinkedIntegrationRuntimeWithResponse( String resourceGroupName, String factoryName, String integrationRuntimeName, CreateLinkedIntegrationRuntimeRequest createLinkedIntegrationRuntimeRequest, Context context) { - Response inner - = this.serviceClient().createLinkedIntegrationRuntimeWithResponse(resourceGroupName, factoryName, - integrationRuntimeName, createLinkedIntegrationRuntimeRequest, context); + Response inner = this.serviceClient() + .createLinkedIntegrationRuntimeWithResponse(resourceGroupName, factoryName, integrationRuntimeName, + createLinkedIntegrationRuntimeRequest, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new IntegrationRuntimeStatusResponseImpl(inner.getValue(), this.manager())); @@ -307,8 +309,9 @@ public Response createLinkedIntegrationRuntime public IntegrationRuntimeStatusResponse createLinkedIntegrationRuntime(String resourceGroupName, String factoryName, String integrationRuntimeName, CreateLinkedIntegrationRuntimeRequest createLinkedIntegrationRuntimeRequest) { - IntegrationRuntimeStatusResponseInner inner = this.serviceClient().createLinkedIntegrationRuntime( - resourceGroupName, factoryName, integrationRuntimeName, createLinkedIntegrationRuntimeRequest); + IntegrationRuntimeStatusResponseInner inner = this.serviceClient() + .createLinkedIntegrationRuntime(resourceGroupName, factoryName, integrationRuntimeName, + createLinkedIntegrationRuntimeRequest); if (inner != null) { return new IntegrationRuntimeStatusResponseImpl(inner, this.manager()); } else { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/LinkedServiceResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/LinkedServiceResourceImpl.java index b7d2e2550461e..b5bbd19fad4d0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/LinkedServiceResourceImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/LinkedServiceResourceImpl.java @@ -64,16 +64,20 @@ public LinkedServiceResourceImpl withExistingFactory(String resourceGroupName, S } public LinkedServiceResource create() { - this.innerObject - = serviceManager.serviceClient().getLinkedServices().createOrUpdateWithResponse(resourceGroupName, - factoryName, linkedServiceName, this.innerModel(), createIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getLinkedServices() + .createOrUpdateWithResponse(resourceGroupName, factoryName, linkedServiceName, this.innerModel(), + createIfMatch, Context.NONE) + .getValue(); return this; } public LinkedServiceResource create(Context context) { - this.innerObject - = serviceManager.serviceClient().getLinkedServices().createOrUpdateWithResponse(resourceGroupName, - factoryName, linkedServiceName, this.innerModel(), createIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getLinkedServices() + .createOrUpdateWithResponse(resourceGroupName, factoryName, linkedServiceName, this.innerModel(), + createIfMatch, context) + .getValue(); return this; } @@ -90,16 +94,20 @@ public LinkedServiceResourceImpl update() { } public LinkedServiceResource apply() { - this.innerObject - = serviceManager.serviceClient().getLinkedServices().createOrUpdateWithResponse(resourceGroupName, - factoryName, linkedServiceName, this.innerModel(), updateIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getLinkedServices() + .createOrUpdateWithResponse(resourceGroupName, factoryName, linkedServiceName, this.innerModel(), + updateIfMatch, Context.NONE) + .getValue(); return this; } public LinkedServiceResource apply(Context context) { - this.innerObject - = serviceManager.serviceClient().getLinkedServices().createOrUpdateWithResponse(resourceGroupName, - factoryName, linkedServiceName, this.innerModel(), updateIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getLinkedServices() + .createOrUpdateWithResponse(resourceGroupName, factoryName, linkedServiceName, this.innerModel(), + updateIfMatch, context) + .getValue(); return this; } @@ -114,7 +122,8 @@ public LinkedServiceResource apply(Context context) { public LinkedServiceResource refresh() { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getLinkedServices() + this.innerObject = serviceManager.serviceClient() + .getLinkedServices() .getWithResponse(resourceGroupName, factoryName, linkedServiceName, localIfNoneMatch, Context.NONE) .getValue(); return this; @@ -122,8 +131,10 @@ public LinkedServiceResource refresh() { public LinkedServiceResource refresh(Context context) { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getLinkedServices() - .getWithResponse(resourceGroupName, factoryName, linkedServiceName, localIfNoneMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getLinkedServices() + .getWithResponse(resourceGroupName, factoryName, linkedServiceName, localIfNoneMatch, context) + .getValue(); return this; } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/LinkedServicesImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/LinkedServicesImpl.java index 020174a124eb6..0c27e6bd661c4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/LinkedServicesImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/LinkedServicesImpl.java @@ -42,8 +42,8 @@ public PagedIterable listByFactory(String resourceGroupNa public Response getWithResponse(String resourceGroupName, String factoryName, String linkedServiceName, String ifNoneMatch, Context context) { - Response inner = this.serviceClient().getWithResponse(resourceGroupName, - factoryName, linkedServiceName, ifNoneMatch, context); + Response inner = this.serviceClient() + .getWithResponse(resourceGroupName, factoryName, linkedServiceName, ifNoneMatch, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new LinkedServiceResourceImpl(inner.getValue(), this.manager())); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedIdentityCredentialResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedIdentityCredentialResourceImpl.java deleted file mode 100644 index b3f49a561f136..0000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedIdentityCredentialResourceImpl.java +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.implementation; - -import com.azure.core.util.Context; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityCredentialResourceInner; -import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential; -import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredentialResource; - -public final class ManagedIdentityCredentialResourceImpl implements ManagedIdentityCredentialResource, - ManagedIdentityCredentialResource.Definition, ManagedIdentityCredentialResource.Update { - private ManagedIdentityCredentialResourceInner innerObject; - - private final com.azure.resourcemanager.datafactory.DataFactoryManager serviceManager; - - public String id() { - return this.innerModel().id(); - } - - public ManagedIdentityCredential properties() { - return this.innerModel().properties(); - } - - public String name() { - return this.innerModel().name(); - } - - public String type() { - return this.innerModel().type(); - } - - public String etag() { - return this.innerModel().etag(); - } - - public String resourceGroupName() { - return resourceGroupName; - } - - public ManagedIdentityCredentialResourceInner innerModel() { - return this.innerObject; - } - - private com.azure.resourcemanager.datafactory.DataFactoryManager manager() { - return this.serviceManager; - } - - private String resourceGroupName; - - private String factoryName; - - private String credentialName; - - private String createIfMatch; - - private String updateIfMatch; - - public ManagedIdentityCredentialResourceImpl withExistingFactory(String resourceGroupName, String factoryName) { - this.resourceGroupName = resourceGroupName; - this.factoryName = factoryName; - return this; - } - - public ManagedIdentityCredentialResource create() { - this.innerObject - = serviceManager.serviceClient().getCredentialOperations().createOrUpdateWithResponse(resourceGroupName, - factoryName, credentialName, this.innerModel(), createIfMatch, Context.NONE).getValue(); - return this; - } - - public ManagedIdentityCredentialResource create(Context context) { - this.innerObject - = serviceManager.serviceClient().getCredentialOperations().createOrUpdateWithResponse(resourceGroupName, - factoryName, credentialName, this.innerModel(), createIfMatch, context).getValue(); - return this; - } - - ManagedIdentityCredentialResourceImpl(String name, - com.azure.resourcemanager.datafactory.DataFactoryManager serviceManager) { - this.innerObject = new ManagedIdentityCredentialResourceInner(); - this.serviceManager = serviceManager; - this.credentialName = name; - this.createIfMatch = null; - } - - public ManagedIdentityCredentialResourceImpl update() { - this.updateIfMatch = null; - return this; - } - - public ManagedIdentityCredentialResource apply() { - this.innerObject - = serviceManager.serviceClient().getCredentialOperations().createOrUpdateWithResponse(resourceGroupName, - factoryName, credentialName, this.innerModel(), updateIfMatch, Context.NONE).getValue(); - return this; - } - - public ManagedIdentityCredentialResource apply(Context context) { - this.innerObject - = serviceManager.serviceClient().getCredentialOperations().createOrUpdateWithResponse(resourceGroupName, - factoryName, credentialName, this.innerModel(), updateIfMatch, context).getValue(); - return this; - } - - ManagedIdentityCredentialResourceImpl(ManagedIdentityCredentialResourceInner innerObject, - com.azure.resourcemanager.datafactory.DataFactoryManager serviceManager) { - this.innerObject = innerObject; - this.serviceManager = serviceManager; - this.resourceGroupName = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "resourceGroups"); - this.factoryName = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "factories"); - this.credentialName = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "credentials"); - } - - public ManagedIdentityCredentialResource refresh() { - String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getCredentialOperations() - .getWithResponse(resourceGroupName, factoryName, credentialName, localIfNoneMatch, Context.NONE).getValue(); - return this; - } - - public ManagedIdentityCredentialResource refresh(Context context) { - String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getCredentialOperations() - .getWithResponse(resourceGroupName, factoryName, credentialName, localIfNoneMatch, context).getValue(); - return this; - } - - public ManagedIdentityCredentialResourceImpl withProperties(ManagedIdentityCredential properties) { - this.innerModel().withProperties(properties); - return this; - } - - public ManagedIdentityCredentialResourceImpl withIfMatch(String ifMatch) { - if (isInCreateMode()) { - this.createIfMatch = ifMatch; - return this; - } else { - this.updateIfMatch = ifMatch; - return this; - } - } - - private boolean isInCreateMode() { - return this.innerModel().id() == null; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedPrivateEndpointResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedPrivateEndpointResourceImpl.java index d13765ca80fed..833246da38f34 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedPrivateEndpointResourceImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedPrivateEndpointResourceImpl.java @@ -68,17 +68,19 @@ public ManagedPrivateEndpointResourceImpl withExistingManagedVirtualNetwork(Stri } public ManagedPrivateEndpointResource create() { - this.innerObject = serviceManager - .serviceClient().getManagedPrivateEndpoints().createOrUpdateWithResponse(resourceGroupName, factoryName, - managedVirtualNetworkName, managedPrivateEndpointName, this.innerModel(), createIfMatch, Context.NONE) + this.innerObject = serviceManager.serviceClient() + .getManagedPrivateEndpoints() + .createOrUpdateWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, + managedPrivateEndpointName, this.innerModel(), createIfMatch, Context.NONE) .getValue(); return this; } public ManagedPrivateEndpointResource create(Context context) { - this.innerObject = serviceManager - .serviceClient().getManagedPrivateEndpoints().createOrUpdateWithResponse(resourceGroupName, factoryName, - managedVirtualNetworkName, managedPrivateEndpointName, this.innerModel(), createIfMatch, context) + this.innerObject = serviceManager.serviceClient() + .getManagedPrivateEndpoints() + .createOrUpdateWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, + managedPrivateEndpointName, this.innerModel(), createIfMatch, context) .getValue(); return this; } @@ -97,17 +99,19 @@ public ManagedPrivateEndpointResourceImpl update() { } public ManagedPrivateEndpointResource apply() { - this.innerObject = serviceManager - .serviceClient().getManagedPrivateEndpoints().createOrUpdateWithResponse(resourceGroupName, factoryName, - managedVirtualNetworkName, managedPrivateEndpointName, this.innerModel(), updateIfMatch, Context.NONE) + this.innerObject = serviceManager.serviceClient() + .getManagedPrivateEndpoints() + .createOrUpdateWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, + managedPrivateEndpointName, this.innerModel(), updateIfMatch, Context.NONE) .getValue(); return this; } public ManagedPrivateEndpointResource apply(Context context) { - this.innerObject = serviceManager - .serviceClient().getManagedPrivateEndpoints().createOrUpdateWithResponse(resourceGroupName, factoryName, - managedVirtualNetworkName, managedPrivateEndpointName, this.innerModel(), updateIfMatch, context) + this.innerObject = serviceManager.serviceClient() + .getManagedPrivateEndpoints() + .createOrUpdateWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, + managedPrivateEndpointName, this.innerModel(), updateIfMatch, context) .getValue(); return this; } @@ -126,7 +130,8 @@ public ManagedPrivateEndpointResource apply(Context context) { public ManagedPrivateEndpointResource refresh() { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getManagedPrivateEndpoints() + this.innerObject = serviceManager.serviceClient() + .getManagedPrivateEndpoints() .getWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, managedPrivateEndpointName, localIfNoneMatch, Context.NONE) .getValue(); @@ -135,7 +140,8 @@ public ManagedPrivateEndpointResource refresh() { public ManagedPrivateEndpointResource refresh(Context context) { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getManagedPrivateEndpoints() + this.innerObject = serviceManager.serviceClient() + .getManagedPrivateEndpoints() .getWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, managedPrivateEndpointName, localIfNoneMatch, context) .getValue(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedPrivateEndpointsImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedPrivateEndpointsImpl.java index d069827e3e31c..9ce1339f26eb4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedPrivateEndpointsImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedPrivateEndpointsImpl.java @@ -45,8 +45,9 @@ public PagedIterable listByFactory(String resour public Response getWithResponse(String resourceGroupName, String factoryName, String managedVirtualNetworkName, String managedPrivateEndpointName, String ifNoneMatch, Context context) { - Response inner = this.serviceClient().getWithResponse(resourceGroupName, - factoryName, managedVirtualNetworkName, managedPrivateEndpointName, ifNoneMatch, context); + Response inner = this.serviceClient() + .getWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, managedPrivateEndpointName, + ifNoneMatch, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new ManagedPrivateEndpointResourceImpl(inner.getValue(), this.manager())); @@ -57,8 +58,8 @@ public Response getWithResponse(String resourceG public ManagedPrivateEndpointResource get(String resourceGroupName, String factoryName, String managedVirtualNetworkName, String managedPrivateEndpointName) { - ManagedPrivateEndpointResourceInner inner = this.serviceClient().get(resourceGroupName, factoryName, - managedVirtualNetworkName, managedPrivateEndpointName); + ManagedPrivateEndpointResourceInner inner = this.serviceClient() + .get(resourceGroupName, factoryName, managedVirtualNetworkName, managedPrivateEndpointName); if (inner != null) { return new ManagedPrivateEndpointResourceImpl(inner, this.manager()); } else { @@ -68,14 +69,15 @@ public ManagedPrivateEndpointResource get(String resourceGroupName, String facto public Response deleteWithResponse(String resourceGroupName, String factoryName, String managedVirtualNetworkName, String managedPrivateEndpointName, Context context) { - return this.serviceClient().deleteWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, - managedPrivateEndpointName, context); + return this.serviceClient() + .deleteWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, managedPrivateEndpointName, + context); } public void delete(String resourceGroupName, String factoryName, String managedVirtualNetworkName, String managedPrivateEndpointName) { - this.serviceClient().delete(resourceGroupName, factoryName, managedVirtualNetworkName, - managedPrivateEndpointName); + this.serviceClient() + .delete(resourceGroupName, factoryName, managedVirtualNetworkName, managedPrivateEndpointName); } public ManagedPrivateEndpointResource getById(String id) { @@ -100,8 +102,10 @@ public ManagedPrivateEndpointResource getById(String id) { .format("The resource ID '%s' is not valid. Missing path segment 'managedPrivateEndpoints'.", id))); } String localIfNoneMatch = null; - return this.getWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, - managedPrivateEndpointName, localIfNoneMatch, Context.NONE).getValue(); + return this + .getWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, managedPrivateEndpointName, + localIfNoneMatch, Context.NONE) + .getValue(); } public Response getByIdWithResponse(String id, String ifNoneMatch, diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedVirtualNetworkResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedVirtualNetworkResourceImpl.java index 1a0fd8817cd23..6f3bd18975cf5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedVirtualNetworkResourceImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedVirtualNetworkResourceImpl.java @@ -64,16 +64,20 @@ public ManagedVirtualNetworkResourceImpl withExistingFactory(String resourceGrou } public ManagedVirtualNetworkResource create() { - this.innerObject - = serviceManager.serviceClient().getManagedVirtualNetworks().createOrUpdateWithResponse(resourceGroupName, - factoryName, managedVirtualNetworkName, this.innerModel(), createIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getManagedVirtualNetworks() + .createOrUpdateWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, this.innerModel(), + createIfMatch, Context.NONE) + .getValue(); return this; } public ManagedVirtualNetworkResource create(Context context) { - this.innerObject - = serviceManager.serviceClient().getManagedVirtualNetworks().createOrUpdateWithResponse(resourceGroupName, - factoryName, managedVirtualNetworkName, this.innerModel(), createIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getManagedVirtualNetworks() + .createOrUpdateWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, this.innerModel(), + createIfMatch, context) + .getValue(); return this; } @@ -91,16 +95,20 @@ public ManagedVirtualNetworkResourceImpl update() { } public ManagedVirtualNetworkResource apply() { - this.innerObject - = serviceManager.serviceClient().getManagedVirtualNetworks().createOrUpdateWithResponse(resourceGroupName, - factoryName, managedVirtualNetworkName, this.innerModel(), updateIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getManagedVirtualNetworks() + .createOrUpdateWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, this.innerModel(), + updateIfMatch, Context.NONE) + .getValue(); return this; } public ManagedVirtualNetworkResource apply(Context context) { - this.innerObject - = serviceManager.serviceClient().getManagedVirtualNetworks().createOrUpdateWithResponse(resourceGroupName, - factoryName, managedVirtualNetworkName, this.innerModel(), updateIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getManagedVirtualNetworks() + .createOrUpdateWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, this.innerModel(), + updateIfMatch, context) + .getValue(); return this; } @@ -116,7 +124,8 @@ public ManagedVirtualNetworkResource apply(Context context) { public ManagedVirtualNetworkResource refresh() { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getManagedVirtualNetworks() + this.innerObject = serviceManager.serviceClient() + .getManagedVirtualNetworks() .getWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, localIfNoneMatch, Context.NONE) .getValue(); return this; @@ -124,7 +133,8 @@ public ManagedVirtualNetworkResource refresh() { public ManagedVirtualNetworkResource refresh(Context context) { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getManagedVirtualNetworks() + this.innerObject = serviceManager.serviceClient() + .getManagedVirtualNetworks() .getWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, localIfNoneMatch, context) .getValue(); return this; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedVirtualNetworksImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedVirtualNetworksImpl.java index 3acf3f2c82010..1bddac29c342f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedVirtualNetworksImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ManagedVirtualNetworksImpl.java @@ -44,8 +44,8 @@ public PagedIterable listByFactory(String resourc public Response getWithResponse(String resourceGroupName, String factoryName, String managedVirtualNetworkName, String ifNoneMatch, Context context) { - Response inner = this.serviceClient().getWithResponse(resourceGroupName, - factoryName, managedVirtualNetworkName, ifNoneMatch, context); + Response inner = this.serviceClient() + .getWithResponse(resourceGroupName, factoryName, managedVirtualNetworkName, ifNoneMatch, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new ManagedVirtualNetworkResourceImpl(inner.getValue(), this.manager())); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/OperationsClientImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/OperationsClientImpl.java index 19fd6e8cc15c9..dc4698dfaa214 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/OperationsClientImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/OperationsClientImpl.java @@ -56,8 +56,8 @@ public final class OperationsClientImpl implements OperationsClient { } /** - * The interface defining all the services for DataFactoryManagementClientOperations to be used by the proxy - * service to perform REST calls. + * The interface defining all the services for DataFactoryManagementClientOperations to be used by the proxy service + * to perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "DataFactoryManagemen") diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PipelineResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PipelineResourceImpl.java index 751d6a1c44829..271b9b89136f1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PipelineResourceImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PipelineResourceImpl.java @@ -139,14 +139,20 @@ public PipelineResourceImpl withExistingFactory(String resourceGroupName, String } public PipelineResource create() { - this.innerObject = serviceManager.serviceClient().getPipelines().createOrUpdateWithResponse(resourceGroupName, - factoryName, pipelineName, this.innerModel(), createIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getPipelines() + .createOrUpdateWithResponse(resourceGroupName, factoryName, pipelineName, this.innerModel(), createIfMatch, + Context.NONE) + .getValue(); return this; } public PipelineResource create(Context context) { - this.innerObject = serviceManager.serviceClient().getPipelines().createOrUpdateWithResponse(resourceGroupName, - factoryName, pipelineName, this.innerModel(), createIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getPipelines() + .createOrUpdateWithResponse(resourceGroupName, factoryName, pipelineName, this.innerModel(), createIfMatch, + context) + .getValue(); return this; } @@ -163,14 +169,20 @@ public PipelineResourceImpl update() { } public PipelineResource apply() { - this.innerObject = serviceManager.serviceClient().getPipelines().createOrUpdateWithResponse(resourceGroupName, - factoryName, pipelineName, this.innerModel(), updateIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getPipelines() + .createOrUpdateWithResponse(resourceGroupName, factoryName, pipelineName, this.innerModel(), updateIfMatch, + Context.NONE) + .getValue(); return this; } public PipelineResource apply(Context context) { - this.innerObject = serviceManager.serviceClient().getPipelines().createOrUpdateWithResponse(resourceGroupName, - factoryName, pipelineName, this.innerModel(), updateIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getPipelines() + .createOrUpdateWithResponse(resourceGroupName, factoryName, pipelineName, this.innerModel(), updateIfMatch, + context) + .getValue(); return this; } @@ -185,22 +197,27 @@ public PipelineResource apply(Context context) { public PipelineResource refresh() { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getPipelines() - .getWithResponse(resourceGroupName, factoryName, pipelineName, localIfNoneMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getPipelines() + .getWithResponse(resourceGroupName, factoryName, pipelineName, localIfNoneMatch, Context.NONE) + .getValue(); return this; } public PipelineResource refresh(Context context) { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getPipelines() - .getWithResponse(resourceGroupName, factoryName, pipelineName, localIfNoneMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getPipelines() + .getWithResponse(resourceGroupName, factoryName, pipelineName, localIfNoneMatch, context) + .getValue(); return this; } public Response createRunWithResponse(String referencePipelineRunId, Boolean isRecovery, String startActivityName, Boolean startFromFailure, Map parameters, Context context) { - return serviceManager.pipelines().createRunWithResponse(resourceGroupName, factoryName, pipelineName, - referencePipelineRunId, isRecovery, startActivityName, startFromFailure, parameters, context); + return serviceManager.pipelines() + .createRunWithResponse(resourceGroupName, factoryName, pipelineName, referencePipelineRunId, isRecovery, + startActivityName, startFromFailure, parameters, context); } public CreateRunResponse createRun() { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PipelinesImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PipelinesImpl.java index 1a1ceabe71629..b7b595d257fbe 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PipelinesImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PipelinesImpl.java @@ -75,9 +75,9 @@ public void delete(String resourceGroupName, String factoryName, String pipeline public Response createRunWithResponse(String resourceGroupName, String factoryName, String pipelineName, String referencePipelineRunId, Boolean isRecovery, String startActivityName, Boolean startFromFailure, Map parameters, Context context) { - Response inner - = this.serviceClient().createRunWithResponse(resourceGroupName, factoryName, pipelineName, - referencePipelineRunId, isRecovery, startActivityName, startFromFailure, parameters, context); + Response inner = this.serviceClient() + .createRunWithResponse(resourceGroupName, factoryName, pipelineName, referencePipelineRunId, isRecovery, + startActivityName, startFromFailure, parameters, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new CreateRunResponseImpl(inner.getValue(), this.manager())); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndPointConnectionsImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndPointConnectionsImpl.java index 021d88dc94d24..fba65ca5e9094 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndPointConnectionsImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndPointConnectionsImpl.java @@ -9,8 +9,8 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.PrivateEndPointConnectionsClient; import com.azure.resourcemanager.datafactory.fluent.models.PrivateEndpointConnectionResourceInner; -import com.azure.resourcemanager.datafactory.models.PrivateEndPointConnections; import com.azure.resourcemanager.datafactory.models.PrivateEndpointConnectionResource; +import com.azure.resourcemanager.datafactory.models.PrivateEndPointConnections; public final class PrivateEndPointConnectionsImpl implements PrivateEndPointConnections { private static final ClientLogger LOGGER = new ClientLogger(PrivateEndPointConnectionsImpl.class); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndpointConnectionOperationsClientImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndpointConnectionOperationsClientImpl.java index ff57a24d0717f..c8fc74f12a071 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndpointConnectionOperationsClientImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndpointConnectionOperationsClientImpl.java @@ -30,8 +30,7 @@ import reactor.core.publisher.Mono; /** - * An instance of this class provides access to all the operations defined in - * PrivateEndpointConnectionOperationsClient. + * An instance of this class provides access to all the operations defined in PrivateEndpointConnectionOperationsClient. */ public final class PrivateEndpointConnectionOperationsClientImpl implements PrivateEndpointConnectionOperationsClient { /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndpointConnectionOperationsImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndpointConnectionOperationsImpl.java index bf593bf0a6de8..781a17de5cfbd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndpointConnectionOperationsImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndpointConnectionOperationsImpl.java @@ -28,8 +28,8 @@ public PrivateEndpointConnectionOperationsImpl(PrivateEndpointConnectionOperatio public Response getWithResponse(String resourceGroupName, String factoryName, String privateEndpointConnectionName, String ifNoneMatch, Context context) { - Response inner = this.serviceClient().getWithResponse(resourceGroupName, - factoryName, privateEndpointConnectionName, ifNoneMatch, context); + Response inner = this.serviceClient() + .getWithResponse(resourceGroupName, factoryName, privateEndpointConnectionName, ifNoneMatch, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new PrivateEndpointConnectionResourceImpl(inner.getValue(), this.manager())); @@ -51,8 +51,8 @@ public PrivateEndpointConnectionResource get(String resourceGroupName, String fa public Response deleteWithResponse(String resourceGroupName, String factoryName, String privateEndpointConnectionName, Context context) { - return this.serviceClient().deleteWithResponse(resourceGroupName, factoryName, privateEndpointConnectionName, - context); + return this.serviceClient() + .deleteWithResponse(resourceGroupName, factoryName, privateEndpointConnectionName, context); } public void delete(String resourceGroupName, String factoryName, String privateEndpointConnectionName) { @@ -77,8 +77,10 @@ public PrivateEndpointConnectionResource getById(String id) { .format("The resource ID '%s' is not valid. Missing path segment 'privateEndpointConnections'.", id))); } String localIfNoneMatch = null; - return this.getWithResponse(resourceGroupName, factoryName, privateEndpointConnectionName, localIfNoneMatch, - Context.NONE).getValue(); + return this + .getWithResponse(resourceGroupName, factoryName, privateEndpointConnectionName, localIfNoneMatch, + Context.NONE) + .getValue(); } public Response getByIdWithResponse(String id, String ifNoneMatch, diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndpointConnectionResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndpointConnectionResourceImpl.java index 337095ef66fed..0ebd146a38b73 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndpointConnectionResourceImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/PrivateEndpointConnectionResourceImpl.java @@ -70,17 +70,19 @@ public PrivateEndpointConnectionResourceImpl withExistingFactory(String resource } public PrivateEndpointConnectionResource create() { - this.innerObject = serviceManager - .serviceClient().getPrivateEndpointConnectionOperations().createOrUpdateWithResponse(resourceGroupName, - factoryName, privateEndpointConnectionName, createPrivateEndpointWrapper, createIfMatch, Context.NONE) + this.innerObject = serviceManager.serviceClient() + .getPrivateEndpointConnectionOperations() + .createOrUpdateWithResponse(resourceGroupName, factoryName, privateEndpointConnectionName, + createPrivateEndpointWrapper, createIfMatch, Context.NONE) .getValue(); return this; } public PrivateEndpointConnectionResource create(Context context) { - this.innerObject = serviceManager - .serviceClient().getPrivateEndpointConnectionOperations().createOrUpdateWithResponse(resourceGroupName, - factoryName, privateEndpointConnectionName, createPrivateEndpointWrapper, createIfMatch, context) + this.innerObject = serviceManager.serviceClient() + .getPrivateEndpointConnectionOperations() + .createOrUpdateWithResponse(resourceGroupName, factoryName, privateEndpointConnectionName, + createPrivateEndpointWrapper, createIfMatch, context) .getValue(); return this; } @@ -101,17 +103,19 @@ public PrivateEndpointConnectionResourceImpl update() { } public PrivateEndpointConnectionResource apply() { - this.innerObject = serviceManager - .serviceClient().getPrivateEndpointConnectionOperations().createOrUpdateWithResponse(resourceGroupName, - factoryName, privateEndpointConnectionName, updatePrivateEndpointWrapper, updateIfMatch, Context.NONE) + this.innerObject = serviceManager.serviceClient() + .getPrivateEndpointConnectionOperations() + .createOrUpdateWithResponse(resourceGroupName, factoryName, privateEndpointConnectionName, + updatePrivateEndpointWrapper, updateIfMatch, Context.NONE) .getValue(); return this; } public PrivateEndpointConnectionResource apply(Context context) { - this.innerObject = serviceManager - .serviceClient().getPrivateEndpointConnectionOperations().createOrUpdateWithResponse(resourceGroupName, - factoryName, privateEndpointConnectionName, updatePrivateEndpointWrapper, updateIfMatch, context) + this.innerObject = serviceManager.serviceClient() + .getPrivateEndpointConnectionOperations() + .createOrUpdateWithResponse(resourceGroupName, factoryName, privateEndpointConnectionName, + updatePrivateEndpointWrapper, updateIfMatch, context) .getValue(); return this; } @@ -128,15 +132,18 @@ public PrivateEndpointConnectionResource apply(Context context) { public PrivateEndpointConnectionResource refresh() { String localIfNoneMatch = null; - this.innerObject - = serviceManager.serviceClient().getPrivateEndpointConnectionOperations().getWithResponse(resourceGroupName, - factoryName, privateEndpointConnectionName, localIfNoneMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getPrivateEndpointConnectionOperations() + .getWithResponse(resourceGroupName, factoryName, privateEndpointConnectionName, localIfNoneMatch, + Context.NONE) + .getValue(); return this; } public PrivateEndpointConnectionResource refresh(Context context) { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getPrivateEndpointConnectionOperations() + this.innerObject = serviceManager.serviceClient() + .getPrivateEndpointConnectionOperations() .getWithResponse(resourceGroupName, factoryName, privateEndpointConnectionName, localIfNoneMatch, context) .getValue(); return this; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ResourceManagerUtils.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ResourceManagerUtils.java index 848c8d6fec683..8024dc36bc57d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ResourceManagerUtils.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ResourceManagerUtils.java @@ -41,7 +41,6 @@ static String getValueFromIdByName(String id, String name) { } } return null; - } static String getValueFromIdByParameterName(String id, String pathTemplate, String parameterName) { @@ -75,7 +74,6 @@ static String getValueFromIdByParameterName(String id, String pathTemplate, Stri } } return null; - } static PagedIterable mapPage(PagedIterable pageIterable, Function mapper) { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/TriggerQueryResponseImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/TriggerQueryResponseImpl.java index 1497f2f84b1f3..855d988e7337c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/TriggerQueryResponseImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/TriggerQueryResponseImpl.java @@ -27,7 +27,8 @@ public List value() { List inner = this.innerModel().value(); if (inner != null) { return Collections.unmodifiableList(inner.stream() - .map(inner1 -> new TriggerResourceImpl(inner1, this.manager())).collect(Collectors.toList())); + .map(inner1 -> new TriggerResourceImpl(inner1, this.manager())) + .collect(Collectors.toList())); } else { return Collections.emptyList(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/TriggerResourceImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/TriggerResourceImpl.java index 07f3340b8f38f..01f458ff53193 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/TriggerResourceImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/TriggerResourceImpl.java @@ -65,14 +65,20 @@ public TriggerResourceImpl withExistingFactory(String resourceGroupName, String } public TriggerResource create() { - this.innerObject = serviceManager.serviceClient().getTriggers().createOrUpdateWithResponse(resourceGroupName, - factoryName, triggerName, this.innerModel(), createIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getTriggers() + .createOrUpdateWithResponse(resourceGroupName, factoryName, triggerName, this.innerModel(), createIfMatch, + Context.NONE) + .getValue(); return this; } public TriggerResource create(Context context) { - this.innerObject = serviceManager.serviceClient().getTriggers().createOrUpdateWithResponse(resourceGroupName, - factoryName, triggerName, this.innerModel(), createIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getTriggers() + .createOrUpdateWithResponse(resourceGroupName, factoryName, triggerName, this.innerModel(), createIfMatch, + context) + .getValue(); return this; } @@ -89,14 +95,20 @@ public TriggerResourceImpl update() { } public TriggerResource apply() { - this.innerObject = serviceManager.serviceClient().getTriggers().createOrUpdateWithResponse(resourceGroupName, - factoryName, triggerName, this.innerModel(), updateIfMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getTriggers() + .createOrUpdateWithResponse(resourceGroupName, factoryName, triggerName, this.innerModel(), updateIfMatch, + Context.NONE) + .getValue(); return this; } public TriggerResource apply(Context context) { - this.innerObject = serviceManager.serviceClient().getTriggers().createOrUpdateWithResponse(resourceGroupName, - factoryName, triggerName, this.innerModel(), updateIfMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getTriggers() + .createOrUpdateWithResponse(resourceGroupName, factoryName, triggerName, this.innerModel(), updateIfMatch, + context) + .getValue(); return this; } @@ -111,15 +123,19 @@ public TriggerResource apply(Context context) { public TriggerResource refresh() { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getTriggers() - .getWithResponse(resourceGroupName, factoryName, triggerName, localIfNoneMatch, Context.NONE).getValue(); + this.innerObject = serviceManager.serviceClient() + .getTriggers() + .getWithResponse(resourceGroupName, factoryName, triggerName, localIfNoneMatch, Context.NONE) + .getValue(); return this; } public TriggerResource refresh(Context context) { String localIfNoneMatch = null; - this.innerObject = serviceManager.serviceClient().getTriggers() - .getWithResponse(resourceGroupName, factoryName, triggerName, localIfNoneMatch, context).getValue(); + this.innerObject = serviceManager.serviceClient() + .getTriggers() + .getWithResponse(resourceGroupName, factoryName, triggerName, localIfNoneMatch, context) + .getValue(); return this; } @@ -132,8 +148,8 @@ public TriggerSubscriptionOperationStatus subscribeToEvents(Context context) { } public Response getEventSubscriptionStatusWithResponse(Context context) { - return serviceManager.triggers().getEventSubscriptionStatusWithResponse(resourceGroupName, factoryName, - triggerName, context); + return serviceManager.triggers() + .getEventSubscriptionStatusWithResponse(resourceGroupName, factoryName, triggerName, context); } public TriggerSubscriptionOperationStatus getEventSubscriptionStatus() { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/TriggersImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/TriggersImpl.java index 476f3baedb1c8..538e0e23926c6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/TriggersImpl.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/TriggersImpl.java @@ -16,8 +16,8 @@ import com.azure.resourcemanager.datafactory.models.TriggerFilterParameters; import com.azure.resourcemanager.datafactory.models.TriggerQueryResponse; import com.azure.resourcemanager.datafactory.models.TriggerResource; -import com.azure.resourcemanager.datafactory.models.TriggerSubscriptionOperationStatus; import com.azure.resourcemanager.datafactory.models.Triggers; +import com.azure.resourcemanager.datafactory.models.TriggerSubscriptionOperationStatus; public final class TriggersImpl implements Triggers { private static final ClientLogger LOGGER = new ClientLogger(TriggersImpl.class); @@ -45,8 +45,8 @@ public PagedIterable listByFactory(String resourceGroupName, St public Response queryByFactoryWithResponse(String resourceGroupName, String factoryName, TriggerFilterParameters filterParameters, Context context) { - Response inner = this.serviceClient().queryByFactoryWithResponse(resourceGroupName, - factoryName, filterParameters, context); + Response inner = this.serviceClient() + .queryByFactoryWithResponse(resourceGroupName, factoryName, filterParameters, context); if (inner != null) { return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new TriggerQueryResponseImpl(inner.getValue(), this.manager())); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/package-info.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/package-info.java index 1369422aa1c14..1f3617d9e8630 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/package-info.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/package-info.java @@ -4,7 +4,7 @@ /** * Package containing the implementations for DataFactoryManagementClient. - * The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data - * Factory V2 services. + * The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory + * V2 services. */ package com.azure.resourcemanager.datafactory.implementation; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Activity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Activity.java index de62cfd7b3836..71c78cb2fe261 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Activity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Activity.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -20,11 +21,7 @@ /** * A pipeline activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = Activity.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Activity.class, visible = true) @JsonTypeName("Activity") @JsonSubTypes({ @JsonSubTypes.Type(name = "Container", value = ControlActivity.class), @@ -32,6 +29,13 @@ @JsonSubTypes.Type(name = "ExecuteWranglingDataflow", value = ExecuteWranglingDataflowActivity.class) }) @Fluent public class Activity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Activity name. */ @@ -51,8 +55,7 @@ public class Activity { private ActivityState state; /* - * Status result of the activity when the state is set to Inactive. This is an optional property and if not - * provided when the activity is inactive, the status will be Succeeded by default. + * Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default. */ @JsonProperty(value = "onInactiveMarkAs") private ActivityOnInactiveMarkAs onInactiveMarkAs; @@ -79,6 +82,16 @@ public class Activity { * Creates an instance of Activity class. */ public Activity() { + this.type = "Activity"; + } + + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + public String type() { + return this.type; } /** @@ -241,8 +254,8 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { if (name() == null) { - throw LOGGER - .logExceptionAsError(new IllegalArgumentException("Missing required property name in model Activity")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property name in model Activity")); } if (dependsOn() != null) { dependsOn().forEach(e -> e.validate()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityDependency.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityDependency.java index 979623a995f2e..2d5333a6cd32f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityDependency.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityDependency.java @@ -119,12 +119,13 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { if (activity() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property activity in model ActivityDependency")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property activity in model ActivityDependency")); } if (dependencyConditions() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property dependencyConditions in model ActivityDependency")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property dependencyConditions in model ActivityDependency")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityPolicy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityPolicy.java index c9bdc344b13b1..e0cfe70e6067e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityPolicy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityPolicy.java @@ -18,15 +18,13 @@ @Fluent public final class ActivityPolicy { /* - * Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with - * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "timeout") private Object timeout; /* - * Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: - * 0. + * Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. */ @JsonProperty(value = "retry") private Object retry; @@ -106,8 +104,7 @@ public ActivityPolicy withRetry(Object retry) { } /** - * Get the retryIntervalInSeconds property: Interval between each retry attempt (in seconds). The default is 30 - * sec. + * Get the retryIntervalInSeconds property: Interval between each retry attempt (in seconds). The default is 30 sec. * * @return the retryIntervalInSeconds value. */ @@ -116,8 +113,7 @@ public Integer retryIntervalInSeconds() { } /** - * Set the retryIntervalInSeconds property: Interval between each retry attempt (in seconds). The default is 30 - * sec. + * Set the retryIntervalInSeconds property: Interval between each retry attempt (in seconds). The default is 30 sec. * * @param retryIntervalInSeconds the retryIntervalInSeconds value to set. * @return the ActivityPolicy object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsLinkedService.java index af40c39215266..d971145fb2869 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AmazonMwsLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Amazon Marketplace Web Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonMwsLinkedService.class, visible = true) @JsonTypeName("AmazonMWS") @Fluent public final class AmazonMwsLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonMWS"; + /* * Amazon Marketplace Web Service linked service properties. */ @@ -32,6 +40,16 @@ public final class AmazonMwsLinkedService extends LinkedService { public AmazonMwsLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Amazon Marketplace Web Service linked service properties. * @@ -268,8 +286,8 @@ public AmazonMwsLinkedService withUseHostVerification(Object useHostVerification } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -278,8 +296,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the AmazonMwsLinkedService object itself. @@ -293,8 +311,8 @@ public AmazonMwsLinkedService withUsePeerVerification(Object usePeerVerification } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -303,8 +321,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AmazonMwsLinkedService object itself. @@ -326,8 +344,9 @@ public AmazonMwsLinkedService withEncryptedCredential(String encryptedCredential public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AmazonMwsLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AmazonMwsLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsObjectDataset.java index 721e96c808986..067a02d4c96e6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Amazon Marketplace Web Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonMwsObjectDataset.class, visible = true) @JsonTypeName("AmazonMWSObject") @Fluent public final class AmazonMwsObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonMWSObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class AmazonMwsObjectDataset extends Dataset { public AmazonMwsObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsSource.java index 9917527cabd20..816eaf613a01a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Amazon Marketplace Web Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonMwsSource.class, visible = true) @JsonTypeName("AmazonMWSSource") @Fluent public final class AmazonMwsSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonMWSSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class AmazonMwsSource extends TabularSource { public AmazonMwsSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleLinkedService.java index de0f4c60c1928..ccb58d761a7bb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * AmazonRdsForOracle database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AmazonRdsForOracleLinkedService.class, + visible = true) @JsonTypeName("AmazonRdsForOracle") @Fluent public final class AmazonRdsForOracleLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonRdsForOracle"; + /* * AmazonRdsForOracle database linked service properties. */ @@ -32,6 +44,16 @@ public final class AmazonRdsForOracleLinkedService extends LinkedService { public AmazonRdsForOracleLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: AmazonRdsForOracle database linked service properties. * @@ -126,8 +148,8 @@ public AmazonRdsForOracleLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -136,8 +158,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AmazonRdsForOracleLinkedService object itself. @@ -159,8 +181,9 @@ public AmazonRdsForOracleLinkedService withEncryptedCredential(String encryptedC public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AmazonRdsForOracleLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AmazonRdsForOracleLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOraclePartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOraclePartitionSettings.java index bb8480266e085..2abc88e3f1c51 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOraclePartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOraclePartitionSettings.java @@ -13,28 +13,25 @@ @Fluent public final class AmazonRdsForOraclePartitionSettings { /* - * Names of the physical partitions of AmazonRdsForOracle table. + * Names of the physical partitions of AmazonRdsForOracle table. */ @JsonProperty(value = "partitionNames") private Object partitionNames; /* - * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or - * Expression with resultType string). + * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /* - * The maximum value of column specified in partitionColumnName that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionUpperBound") private Object partitionUpperBound; /* - * The minimum value of column specified in partitionColumnName that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionLowerBound") private Object partitionLowerBound; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleSource.java index 9c983782905dc..d7dbd3d842709 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleSource.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity AmazonRdsForOracle source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AmazonRdsForOracleSource.class, + visible = true) @JsonTypeName("AmazonRdsForOracleSource") @Fluent public final class AmazonRdsForOracleSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonRdsForOracleSource"; + /* * AmazonRdsForOracle reader query. Type: string (or Expression with resultType string). */ @@ -23,15 +35,13 @@ public final class AmazonRdsForOracleSource extends CopySource { private Object oracleReaderQuery; /* - * Query timeout. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* - * The partition mechanism that will be used for AmazonRdsForOracle read in parallel. Type: string (or Expression - * with resultType string). + * The partition mechanism that will be used for AmazonRdsForOracle read in parallel. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -43,8 +53,7 @@ public final class AmazonRdsForOracleSource extends CopySource { private AmazonRdsForOraclePartitionSettings partitionSettings; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -55,6 +64,16 @@ public final class AmazonRdsForOracleSource extends CopySource { public AmazonRdsForOracleSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the oracleReaderQuery property: AmazonRdsForOracle reader query. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleTableDataset.java index 5d10d2f35cba7..1c26cd901a864 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForOracleTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * The AmazonRdsForOracle database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AmazonRdsForOracleTableDataset.class, + visible = true) @JsonTypeName("AmazonRdsForOracleTable") @Fluent public final class AmazonRdsForOracleTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonRdsForOracleTable"; + /* * AmazonRdsForOracle dataset properties. */ @@ -31,6 +43,16 @@ public final class AmazonRdsForOracleTableDataset extends Dataset { public AmazonRdsForOracleTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: AmazonRdsForOracle dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerLinkedService.java index cc011a15b6b93..e99a5ca2967e0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForSqlServerLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Amazon RDS for SQL Server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AmazonRdsForSqlServerLinkedService.class, + visible = true) @JsonTypeName("AmazonRdsForSqlServer") @Fluent public final class AmazonRdsForSqlServerLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonRdsForSqlServer"; + /* * Amazon RDS for SQL Server linked service properties. */ @@ -33,6 +45,16 @@ public final class AmazonRdsForSqlServerLinkedService extends LinkedService { public AmazonRdsForSqlServerLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Amazon RDS for SQL Server linked service properties. * @@ -152,8 +174,8 @@ public AmazonRdsForSqlServerLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -162,8 +184,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AmazonRdsForSqlServerLinkedService object itself. @@ -209,8 +231,9 @@ public SqlAlwaysEncryptedProperties alwaysEncryptedSettings() { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AmazonRdsForSqlServerLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AmazonRdsForSqlServerLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerSource.java index 462a7422b256a..252aedb437450 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerSource.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Amazon RDS for SQL Server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AmazonRdsForSqlServerSource.class, + visible = true) @JsonTypeName("AmazonRdsForSqlServerSource") @Fluent public final class AmazonRdsForSqlServerSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonRdsForSqlServerSource"; + /* * SQL reader query. Type: string (or Expression with resultType string). */ @@ -23,8 +35,7 @@ public final class AmazonRdsForSqlServerSource extends TabularSource { private Object sqlReaderQuery; /* - * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. - * Type: string (or Expression with resultType string). + * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; @@ -36,9 +47,7 @@ public final class AmazonRdsForSqlServerSource extends TabularSource { private Object storedProcedureParameters; /* - * Specifies the transaction locking behavior for the SQL source. Allowed values: - * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: - * string (or Expression with resultType string). + * Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). */ @JsonProperty(value = "isolationLevel") private Object isolationLevel; @@ -50,8 +59,7 @@ public final class AmazonRdsForSqlServerSource extends TabularSource { private Object produceAdditionalTypes; /* - * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", - * "PhysicalPartitionsOfTable", "DynamicRange". + * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -68,6 +76,16 @@ public final class AmazonRdsForSqlServerSource extends TabularSource { public AmazonRdsForSqlServerSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the sqlReaderQuery property: SQL reader query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerTableDataset.java index ce04d10d52e0a..e726d162c182c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForSqlServerTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * The Amazon RDS for SQL Server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AmazonRdsForSqlServerTableDataset.class, + visible = true) @JsonTypeName("AmazonRdsForSqlServerTable") @Fluent public final class AmazonRdsForSqlServerTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonRdsForSqlServerTable"; + /* * The Amazon RDS for SQL Server dataset properties. */ @@ -31,6 +43,16 @@ public final class AmazonRdsForSqlServerTableDataset extends Dataset { public AmazonRdsForSqlServerTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: The Amazon RDS for SQL Server dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftLinkedService.java index a80a2f42fbb4e..91e640adad375 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AmazonRedshiftLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for Amazon Redshift. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AmazonRedshiftLinkedService.class, + visible = true) @JsonTypeName("AmazonRedshift") @Fluent public final class AmazonRedshiftLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonRedshift"; + /* * Amazon Redshift linked service properties. */ @@ -33,6 +45,16 @@ public final class AmazonRedshiftLinkedService extends LinkedService { public AmazonRedshiftLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Amazon Redshift linked service properties. * @@ -177,8 +199,8 @@ public AmazonRedshiftLinkedService withDatabase(Object database) { } /** - * Get the port property: The TCP port number that the Amazon Redshift server uses to listen for client - * connections. The default value is 5439. Type: integer (or Expression with resultType integer). + * Get the port property: The TCP port number that the Amazon Redshift server uses to listen for client connections. + * The default value is 5439. Type: integer (or Expression with resultType integer). * * @return the port value. */ @@ -187,8 +209,8 @@ public Object port() { } /** - * Set the port property: The TCP port number that the Amazon Redshift server uses to listen for client - * connections. The default value is 5439. Type: integer (or Expression with resultType integer). + * Set the port property: The TCP port number that the Amazon Redshift server uses to listen for client connections. + * The default value is 5439. Type: integer (or Expression with resultType integer). * * @param port the port value to set. * @return the AmazonRedshiftLinkedService object itself. @@ -202,8 +224,8 @@ public AmazonRedshiftLinkedService withPort(Object port) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -212,8 +234,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AmazonRedshiftLinkedService object itself. @@ -235,8 +257,9 @@ public AmazonRedshiftLinkedService withEncryptedCredential(String encryptedCrede public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AmazonRedshiftLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AmazonRedshiftLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftSource.java index 25b46ea12be83..fe90e805f7b3e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for Amazon Redshift Source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonRedshiftSource.class, visible = true) @JsonTypeName("AmazonRedshiftSource") @Fluent public final class AmazonRedshiftSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonRedshiftSource"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -23,9 +31,7 @@ public final class AmazonRedshiftSource extends TabularSource { private Object query; /* - * The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With - * this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink - * from the interim S3. + * The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. */ @JsonProperty(value = "redshiftUnloadSettings") private RedshiftUnloadSettings redshiftUnloadSettings; @@ -36,6 +42,16 @@ public final class AmazonRedshiftSource extends TabularSource { public AmazonRedshiftSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftTableDataset.java index 0c5917b6c4b43..0a36e45eec02b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.AmazonRedshiftTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * The Amazon Redshift table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AmazonRedshiftTableDataset.class, + visible = true) @JsonTypeName("AmazonRedshiftTable") @Fluent public final class AmazonRedshiftTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonRedshiftTable"; + /* * Amazon Redshift table dataset properties. */ @@ -31,6 +43,16 @@ public final class AmazonRedshiftTableDataset extends Dataset { public AmazonRedshiftTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Amazon Redshift table dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLinkedService.java index ed5aa6f309c93..8f21e8f12ab01 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AmazonS3CompatibleLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for Amazon S3 Compatible. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AmazonS3CompatibleLinkedService.class, + visible = true) @JsonTypeName("AmazonS3Compatible") @Fluent public final class AmazonS3CompatibleLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonS3Compatible"; + /* * Amazon S3 Compatible linked service properties. */ @@ -33,6 +45,16 @@ public final class AmazonS3CompatibleLinkedService extends LinkedService { public AmazonS3CompatibleLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Amazon S3 Compatible linked service properties. * @@ -129,9 +151,9 @@ public AmazonS3CompatibleLinkedService withSecretAccessKey(SecretBase secretAcce } /** - * Get the serviceUrl property: This value specifies the endpoint to access with the Amazon S3 Compatible - * Connector. This is an optional property; change it only if you want to try a different service endpoint or want - * to switch between https and http. Type: string (or Expression with resultType string). + * Get the serviceUrl property: This value specifies the endpoint to access with the Amazon S3 Compatible Connector. + * This is an optional property; change it only if you want to try a different service endpoint or want to switch + * between https and http. Type: string (or Expression with resultType string). * * @return the serviceUrl value. */ @@ -140,9 +162,9 @@ public Object serviceUrl() { } /** - * Set the serviceUrl property: This value specifies the endpoint to access with the Amazon S3 Compatible - * Connector. This is an optional property; change it only if you want to try a different service endpoint or want - * to switch between https and http. Type: string (or Expression with resultType string). + * Set the serviceUrl property: This value specifies the endpoint to access with the Amazon S3 Compatible Connector. + * This is an optional property; change it only if you want to try a different service endpoint or want to switch + * between https and http. Type: string (or Expression with resultType string). * * @param serviceUrl the serviceUrl value to set. * @return the AmazonS3CompatibleLinkedService object itself. @@ -181,8 +203,8 @@ public AmazonS3CompatibleLinkedService withForcePathStyle(Object forcePathStyle) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -191,8 +213,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AmazonS3CompatibleLinkedService object itself. @@ -214,8 +236,9 @@ public AmazonS3CompatibleLinkedService withEncryptedCredential(String encryptedC public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AmazonS3CompatibleLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AmazonS3CompatibleLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLocation.java index 5d1bd5e95d9d0..c037391516b24 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLocation.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of Amazon S3 Compatible dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AmazonS3CompatibleLocation.class, + visible = true) @JsonTypeName("AmazonS3CompatibleLocation") @Fluent public final class AmazonS3CompatibleLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonS3CompatibleLocation"; + /* * Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression with resultType string) */ @@ -34,6 +46,16 @@ public final class AmazonS3CompatibleLocation extends DatasetLocation { public AmazonS3CompatibleLocation() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the bucketName property: Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression with * resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleReadSettings.java index e76f10e365216..1441d5c248fea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleReadSettings.java @@ -6,19 +6,30 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Amazon S3 Compatible read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AmazonS3CompatibleReadSettings.class, + visible = true) @JsonTypeName("AmazonS3CompatibleReadSettings") @Fluent public final class AmazonS3CompatibleReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonS3CompatibleReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -42,8 +53,7 @@ public final class AmazonS3CompatibleReadSettings extends StoreReadSettings { private Object prefix; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; @@ -55,15 +65,13 @@ public final class AmazonS3CompatibleReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; @@ -87,8 +95,18 @@ public AmazonS3CompatibleReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -97,8 +115,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the AmazonS3CompatibleReadSettings object itself. @@ -175,8 +193,8 @@ public AmazonS3CompatibleReadSettings withPrefix(Object prefix) { } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -185,8 +203,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the AmazonS3CompatibleReadSettings object itself. @@ -219,8 +237,8 @@ public AmazonS3CompatibleReadSettings withEnablePartitionDiscovery(Object enable } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -229,8 +247,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the AmazonS3CompatibleReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Dataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Dataset.java index 8e48fd8831b46..9c211276477c7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Dataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Dataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AmazonS3DatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * A single Amazon Simple Storage Service (S3) object or a set of S3 objects. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonS3Dataset.class, visible = true) @JsonTypeName("AmazonS3Object") @Fluent public final class AmazonS3Dataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonS3Object"; + /* * Amazon S3 dataset properties. */ @@ -32,6 +40,16 @@ public final class AmazonS3Dataset extends Dataset { public AmazonS3Dataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Amazon S3 dataset properties. * @@ -305,8 +323,9 @@ public AmazonS3Dataset withCompression(DatasetCompression compression) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model AmazonS3Dataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AmazonS3Dataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3LinkedService.java index 199428485f606..9b39636d26a6c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3LinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AmazonS3LinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for Amazon S3. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonS3LinkedService.class, visible = true) @JsonTypeName("AmazonS3") @Fluent public final class AmazonS3LinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonS3"; + /* * Amazon S3 linked service properties. */ @@ -32,6 +40,16 @@ public final class AmazonS3LinkedService extends LinkedService { public AmazonS3LinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Amazon S3 linked service properties. * @@ -154,8 +172,8 @@ public AmazonS3LinkedService withSecretAccessKey(SecretBase secretAccessKey) { /** * Get the serviceUrl property: This value specifies the endpoint to access with the S3 Connector. This is an - * optional property; change it only if you want to try a different service endpoint or want to switch between - * https and http. Type: string (or Expression with resultType string). + * optional property; change it only if you want to try a different service endpoint or want to switch between https + * and http. Type: string (or Expression with resultType string). * * @return the serviceUrl value. */ @@ -165,8 +183,8 @@ public Object serviceUrl() { /** * Set the serviceUrl property: This value specifies the endpoint to access with the S3 Connector. This is an - * optional property; change it only if you want to try a different service endpoint or want to switch between - * https and http. Type: string (or Expression with resultType string). + * optional property; change it only if you want to try a different service endpoint or want to switch between https + * and http. Type: string (or Expression with resultType string). * * @param serviceUrl the serviceUrl value to set. * @return the AmazonS3LinkedService object itself. @@ -203,8 +221,8 @@ public AmazonS3LinkedService withSessionToken(SecretBase sessionToken) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -213,8 +231,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AmazonS3LinkedService object itself. @@ -236,8 +254,9 @@ public AmazonS3LinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AmazonS3LinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AmazonS3LinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Location.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Location.java index ee486f439de96..40322316a9813 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Location.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Location.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of amazon S3 dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonS3Location.class, visible = true) @JsonTypeName("AmazonS3Location") @Fluent public final class AmazonS3Location extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonS3Location"; + /* * Specify the bucketName of amazon S3. Type: string (or Expression with resultType string) */ @@ -34,6 +42,16 @@ public final class AmazonS3Location extends DatasetLocation { public AmazonS3Location() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the bucketName property: Specify the bucketName of amazon S3. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3ReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3ReadSettings.java index b22896f556952..597a97e5b5b42 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3ReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3ReadSettings.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Amazon S3 read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonS3ReadSettings.class, visible = true) @JsonTypeName("AmazonS3ReadSettings") @Fluent public final class AmazonS3ReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AmazonS3ReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -42,8 +49,7 @@ public final class AmazonS3ReadSettings extends StoreReadSettings { private Object prefix; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; @@ -55,15 +61,13 @@ public final class AmazonS3ReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; @@ -87,8 +91,18 @@ public AmazonS3ReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -97,8 +111,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the AmazonS3ReadSettings object itself. @@ -175,8 +189,8 @@ public AmazonS3ReadSettings withPrefix(Object prefix) { } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -185,8 +199,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the AmazonS3ReadSettings object itself. @@ -219,8 +233,8 @@ public AmazonS3ReadSettings withEnablePartitionDiscovery(Object enablePartitionD } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -229,8 +243,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the AmazonS3ReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppFiguresLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppFiguresLinkedService.java index 2c297ccafe882..48c8b7eec2ac4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppFiguresLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppFiguresLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AppFiguresLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for AppFigures. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AppFiguresLinkedService.class, + visible = true) @JsonTypeName("AppFigures") @Fluent public final class AppFiguresLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AppFigures"; + /* * AppFigures linked service properties. */ @@ -32,6 +44,16 @@ public final class AppFiguresLinkedService extends LinkedService { public AppFiguresLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: AppFigures linked service properties. * @@ -157,8 +179,9 @@ public AppFiguresLinkedService withClientKey(SecretBase clientKey) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AppFiguresLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AppFiguresLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppendVariableActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppendVariableActivity.java index d0078e309287b..b43b77fc08ba2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppendVariableActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppendVariableActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AppendVariableActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Append value for a Variable of type Array. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AppendVariableActivity.class, visible = true) @JsonTypeName("AppendVariable") @Fluent public final class AppendVariableActivity extends ControlActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AppendVariable"; + /* * Append Variable activity properties. */ @@ -31,6 +39,16 @@ public final class AppendVariableActivity extends ControlActivity { public AppendVariableActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Append Variable activity properties. * @@ -118,8 +136,8 @@ public AppendVariableActivity withVariableName(String variableName) { } /** - * Get the value property: Value to be appended. Type: could be a static value matching type of the variable item - * or Expression with resultType matching type of the variable item. + * Get the value property: Value to be appended. Type: could be a static value matching type of the variable item or + * Expression with resultType matching type of the variable item. * * @return the value value. */ @@ -128,8 +146,8 @@ public Object value() { } /** - * Set the value property: Value to be appended. Type: could be a static value matching type of the variable item - * or Expression with resultType matching type of the variable item. + * Set the value property: Value to be appended. Type: could be a static value matching type of the variable item or + * Expression with resultType matching type of the variable item. * * @param value the value value to set. * @return the AppendVariableActivity object itself. @@ -151,8 +169,9 @@ public AppendVariableActivity withValue(Object value) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AppendVariableActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AppendVariableActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AsanaLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AsanaLinkedService.java index e10c9ebf91c8a..91c2a19dd0dd2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AsanaLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AsanaLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AsanaLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for Asana. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AsanaLinkedService.class, visible = true) @JsonTypeName("Asana") @Fluent public final class AsanaLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Asana"; + /* * Asana linked service properties. */ @@ -32,6 +40,16 @@ public final class AsanaLinkedService extends LinkedService { public AsanaLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Asana linked service properties. * @@ -101,8 +119,8 @@ public AsanaLinkedService withApiToken(SecretBase apiToken) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -111,8 +129,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AsanaLinkedService object itself. @@ -134,8 +152,9 @@ public AsanaLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AsanaLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AsanaLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroDataset.java index 9de57c4f39edd..6e7eb40a15a0d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.AvroDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Avro dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AvroDataset.class, visible = true) @JsonTypeName("Avro") @Fluent public final class AvroDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Avro"; + /* * Avro dataset properties. */ @@ -31,6 +39,16 @@ public final class AvroDataset extends Dataset { public AvroDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Avro dataset properties. * @@ -127,8 +145,8 @@ public AvroDataset withLocation(DatasetLocation location) { } /** - * Get the avroCompressionCodec property: The data avroCompressionCodec. Type: string (or Expression with - * resultType string). + * Get the avroCompressionCodec property: The data avroCompressionCodec. Type: string (or Expression with resultType + * string). * * @return the avroCompressionCodec value. */ @@ -137,8 +155,8 @@ public Object avroCompressionCodec() { } /** - * Set the avroCompressionCodec property: The data avroCompressionCodec. Type: string (or Expression with - * resultType string). + * Set the avroCompressionCodec property: The data avroCompressionCodec. Type: string (or Expression with resultType + * string). * * @param avroCompressionCodec the avroCompressionCodec value to set. * @return the AvroDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroFormat.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroFormat.java index 16006aa8219d0..937f0ce9f7a6b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroFormat.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroFormat.java @@ -5,22 +5,41 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The data stored in Avro format. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AvroFormat.class, visible = true) @JsonTypeName("AvroFormat") @Fluent public final class AvroFormat extends DatasetStorageFormat { + /* + * Type of dataset storage format. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AvroFormat"; + /** * Creates an instance of AvroFormat class. */ public AvroFormat() { } + /** + * Get the type property: Type of dataset storage format. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSink.java index 023e7fd4a6232..c24f6295ddb0f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Avro sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AvroSink.class, visible = true) @JsonTypeName("AvroSink") @Fluent public final class AvroSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AvroSink"; + /* * Avro store settings. */ @@ -34,6 +42,16 @@ public final class AvroSink extends CopySink { public AvroSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: Avro store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSource.java index 210dfa1ec4615..af10b1d6f900c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Avro source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AvroSource.class, visible = true) @JsonTypeName("AvroSource") @Fluent public final class AvroSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AvroSource"; + /* * Avro store settings. */ @@ -23,8 +31,7 @@ public final class AvroSource extends CopySource { private StoreReadSettings storeSettings; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -35,6 +42,16 @@ public final class AvroSource extends CopySource { public AvroSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: Avro store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroWriteSettings.java index 26386fc86b1dc..a1013a15c6f2f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroWriteSettings.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Avro write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AvroWriteSettings.class, visible = true) @JsonTypeName("AvroWriteSettings") @Fluent public final class AvroWriteSettings extends FormatWriteSettings { + /* + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AvroWriteSettings"; + /* * Top level record name in write result, which is required in AVRO spec. */ @@ -29,15 +37,13 @@ public final class AvroWriteSettings extends FormatWriteSettings { private String recordNamespace; /* - * Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or - * Expression with resultType integer). + * Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "maxRowsPerFile") private Object maxRowsPerFile; /* - * Specifies the file name pattern _. when copy from non-file based store - * without partitionOptions. Type: string (or Expression with resultType string). + * Specifies the file name pattern _. when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileNamePrefix") private Object fileNamePrefix; @@ -48,6 +54,16 @@ public final class AvroWriteSettings extends FormatWriteSettings { public AvroWriteSettings() { } + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the recordName property: Top level record name in write result, which is required in AVRO spec. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzPowerShellSetup.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzPowerShellSetup.java index d7f8f6f05f980..d267bf98f47c3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzPowerShellSetup.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzPowerShellSetup.java @@ -8,16 +8,24 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzPowerShellSetupTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The express custom setup of installing Azure PowerShell. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzPowerShellSetup.class, visible = true) @JsonTypeName("AzPowerShellSetup") @Fluent public final class AzPowerShellSetup extends CustomSetupBase { + /* + * The type of custom setup. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzPowerShellSetup"; + /* * Install Azure PowerShell type properties. */ @@ -30,6 +38,16 @@ public final class AzPowerShellSetup extends CustomSetupBase { public AzPowerShellSetup() { } + /** + * Get the type property: The type of custom setup. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Install Azure PowerShell type properties. * @@ -71,8 +89,9 @@ public AzPowerShellSetup withVersion(String version) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzPowerShellSetup")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzPowerShellSetup")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBatchLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBatchLinkedService.java index e3c037c34e3bc..a57b51fcf7587 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBatchLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBatchLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureBatchLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure Batch linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureBatchLinkedService.class, + visible = true) @JsonTypeName("AzureBatch") @Fluent public final class AzureBatchLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBatch"; + /* * Azure Batch linked service properties. */ @@ -32,6 +44,16 @@ public final class AzureBatchLinkedService extends LinkedService { public AzureBatchLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Batch linked service properties. * @@ -193,8 +215,8 @@ public AzureBatchLinkedService withLinkedServiceName(LinkedServiceReference link } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -203,8 +225,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureBatchLinkedService object itself. @@ -249,8 +271,9 @@ public AzureBatchLinkedService withCredential(CredentialReference credential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureBatchLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureBatchLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobDataset.java index 5528301822787..c50b49a055a3a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.AzureBlobDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The Azure Blob storage. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureBlobDataset.class, visible = true) @JsonTypeName("AzureBlob") @Fluent public final class AzureBlobDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBlob"; + /* * Azure Blob dataset properties. */ @@ -31,6 +39,16 @@ public final class AzureBlobDataset extends Dataset { public AzureBlobDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Blob dataset properties. * @@ -200,8 +218,8 @@ public AzureBlobDataset withModifiedDatetimeStart(Object modifiedDatetimeStart) } /** - * Get the modifiedDatetimeEnd property: The end of Azure Blob's modified datetime. Type: string (or Expression - * with resultType string). + * Get the modifiedDatetimeEnd property: The end of Azure Blob's modified datetime. Type: string (or Expression with + * resultType string). * * @return the modifiedDatetimeEnd value. */ @@ -210,8 +228,8 @@ public Object modifiedDatetimeEnd() { } /** - * Set the modifiedDatetimeEnd property: The end of Azure Blob's modified datetime. Type: string (or Expression - * with resultType string). + * Set the modifiedDatetimeEnd property: The end of Azure Blob's modified datetime. Type: string (or Expression with + * resultType string). * * @param modifiedDatetimeEnd the modifiedDatetimeEnd value to set. * @return the AzureBlobDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSDataset.java index 3eb3b0b2b83a4..c76faab16b2b8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.AzureBlobFSDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The Azure Data Lake Storage Gen2 storage. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureBlobFSDataset.class, visible = true) @JsonTypeName("AzureBlobFSFile") @Fluent public final class AzureBlobFSDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBlobFSFile"; + /* * Azure Data Lake Storage Gen2 dataset properties. */ @@ -31,6 +39,16 @@ public final class AzureBlobFSDataset extends Dataset { public AzureBlobFSDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Data Lake Storage Gen2 dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLinkedService.java index eae85a7c237f0..7373b38c080eb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureBlobFSLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure Data Lake Storage Gen2 linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureBlobFSLinkedService.class, + visible = true) @JsonTypeName("AzureBlobFS") @Fluent public final class AzureBlobFSLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBlobFS"; + /* * Azure Data Lake Storage Gen2 linked service properties. */ @@ -32,6 +44,16 @@ public final class AzureBlobFSLinkedService extends LinkedService { public AzureBlobFSLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Data Lake Storage Gen2 linked service properties. * @@ -153,8 +175,8 @@ public AzureBlobFSLinkedService withServicePrincipalId(Object servicePrincipalId } /** - * Get the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data - * Lake Storage Gen2 account. + * Get the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data Lake + * Storage Gen2 account. * * @return the servicePrincipalKey value. */ @@ -163,8 +185,8 @@ public SecretBase servicePrincipalKey() { } /** - * Set the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data - * Lake Storage Gen2 account. + * Set the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data Lake + * Storage Gen2 account. * * @param servicePrincipalKey the servicePrincipalKey value to set. * @return the AzureBlobFSLinkedService object itself. @@ -203,9 +225,9 @@ public AzureBlobFSLinkedService withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -214,9 +236,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the AzureBlobFSLinkedService object itself. @@ -230,8 +252,8 @@ public AzureBlobFSLinkedService withAzureCloudType(Object azureCloudType) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -240,8 +262,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureBlobFSLinkedService object itself. @@ -278,9 +300,9 @@ public AzureBlobFSLinkedService withCredential(CredentialReference credential) { } /** - * Get the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Get the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @return the servicePrincipalCredentialType value. */ @@ -289,9 +311,9 @@ public Object servicePrincipalCredentialType() { } /** - * Set the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Set the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @param servicePrincipalCredentialType the servicePrincipalCredentialType value to set. * @return the AzureBlobFSLinkedService object itself. @@ -390,8 +412,9 @@ public AzureBlobFSLinkedService withSasToken(SecretBase sasToken) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureBlobFSLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureBlobFSLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLocation.java index 27fa5a763b368..624cf465471df 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLocation.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of azure blobFS dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureBlobFSLocation.class, visible = true) @JsonTypeName("AzureBlobFSLocation") @Fluent public final class AzureBlobFSLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBlobFSLocation"; + /* * Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class AzureBlobFSLocation extends DatasetLocation { public AzureBlobFSLocation() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the fileSystem property: Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSReadSettings.java index 3ba762247a1c3..d2385bcb6082c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSReadSettings.java @@ -6,19 +6,30 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Azure blobFS read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureBlobFSReadSettings.class, + visible = true) @JsonTypeName("AzureBlobFSReadSettings") @Fluent public final class AzureBlobFSReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBlobFSReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -36,8 +47,7 @@ public final class AzureBlobFSReadSettings extends StoreReadSettings { private Object wildcardFileName; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; @@ -49,15 +59,13 @@ public final class AzureBlobFSReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; @@ -81,8 +89,18 @@ public AzureBlobFSReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -91,8 +109,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the AzureBlobFSReadSettings object itself. @@ -103,8 +121,8 @@ public AzureBlobFSReadSettings withRecursive(Object recursive) { } /** - * Get the wildcardFolderPath property: Azure blobFS wildcardFolderPath. Type: string (or Expression with - * resultType string). + * Get the wildcardFolderPath property: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType + * string). * * @return the wildcardFolderPath value. */ @@ -113,8 +131,8 @@ public Object wildcardFolderPath() { } /** - * Set the wildcardFolderPath property: Azure blobFS wildcardFolderPath. Type: string (or Expression with - * resultType string). + * Set the wildcardFolderPath property: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType + * string). * * @param wildcardFolderPath the wildcardFolderPath value to set. * @return the AzureBlobFSReadSettings object itself. @@ -147,8 +165,8 @@ public AzureBlobFSReadSettings withWildcardFileName(Object wildcardFileName) { } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -157,8 +175,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the AzureBlobFSReadSettings object itself. @@ -191,8 +209,8 @@ public AzureBlobFSReadSettings withEnablePartitionDiscovery(Object enablePartiti } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -201,8 +219,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the AzureBlobFSReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSink.java index 0f79c19840dfd..98fce92e3b295 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSink.java @@ -6,6 +6,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -13,10 +14,17 @@ /** * A copy activity Azure Data Lake Storage Gen2 sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureBlobFSSink.class, visible = true) @JsonTypeName("AzureBlobFSSink") @Fluent public final class AzureBlobFSSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBlobFSSink"; + /* * The type of copy behavior for copy sink. Type: string (or Expression with resultType string). */ @@ -24,8 +32,7 @@ public final class AzureBlobFSSink extends CopySink { private Object copyBehavior; /* - * Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType - * array of objects). + * Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). */ @JsonProperty(value = "metadata") private List metadata; @@ -36,6 +43,16 @@ public final class AzureBlobFSSink extends CopySink { public AzureBlobFSSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the copyBehavior property: The type of copy behavior for copy sink. Type: string (or Expression with * resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSource.java index bb8b1cee16554..6003c6cfa9f4c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure BlobFS source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureBlobFSSource.class, visible = true) @JsonTypeName("AzureBlobFSSource") @Fluent public final class AzureBlobFSSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBlobFSSource"; + /* * Treat empty as null. Type: boolean (or Expression with resultType boolean). */ @@ -29,8 +37,7 @@ public final class AzureBlobFSSource extends CopySource { private Object skipHeaderLineCount; /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -41,6 +48,16 @@ public final class AzureBlobFSSource extends CopySource { public AzureBlobFSSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the treatEmptyAsNull property: Treat empty as null. Type: boolean (or Expression with resultType boolean). * @@ -62,8 +79,8 @@ public AzureBlobFSSource withTreatEmptyAsNull(Object treatEmptyAsNull) { } /** - * Get the skipHeaderLineCount property: Number of header lines to skip from each blob. Type: integer (or - * Expression with resultType integer). + * Get the skipHeaderLineCount property: Number of header lines to skip from each blob. Type: integer (or Expression + * with resultType integer). * * @return the skipHeaderLineCount value. */ @@ -72,8 +89,8 @@ public Object skipHeaderLineCount() { } /** - * Set the skipHeaderLineCount property: Number of header lines to skip from each blob. Type: integer (or - * Expression with resultType integer). + * Set the skipHeaderLineCount property: Number of header lines to skip from each blob. Type: integer (or Expression + * with resultType integer). * * @param skipHeaderLineCount the skipHeaderLineCount value to set. * @return the AzureBlobFSSource object itself. @@ -84,8 +101,8 @@ public AzureBlobFSSource withSkipHeaderLineCount(Object skipHeaderLineCount) { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -94,8 +111,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the AzureBlobFSSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSWriteSettings.java index e701793178afd..1160807b4ef3d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSWriteSettings.java @@ -6,6 +6,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -13,10 +14,21 @@ /** * Azure blobFS write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureBlobFSWriteSettings.class, + visible = true) @JsonTypeName("AzureBlobFSWriteSettings") @Fluent public final class AzureBlobFSWriteSettings extends StoreWriteSettings { + /* + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBlobFSWriteSettings"; + /* * Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). */ @@ -29,6 +41,16 @@ public final class AzureBlobFSWriteSettings extends StoreWriteSettings { public AzureBlobFSWriteSettings() { } + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the blockSizeInMB property: Indicates the block size(MB) when writing data to blob. Type: integer (or * Expression with resultType integer). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLinkedService.java index 4374c3684742c..fda293096d57f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureBlobStorageLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * The azure blob storage linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureBlobStorageLinkedService.class, + visible = true) @JsonTypeName("AzureBlobStorage") @Fluent public final class AzureBlobStorageLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBlobStorage"; + /* * Azure Blob Storage linked service properties. */ @@ -33,6 +45,16 @@ public final class AzureBlobStorageLinkedService extends LinkedService { public AzureBlobStorageLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Blob Storage linked service properties. * @@ -275,9 +297,9 @@ public AzureBlobStorageLinkedService withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -286,9 +308,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the AzureBlobStorageLinkedService object itself. @@ -329,8 +351,8 @@ public AzureBlobStorageLinkedService withAccountKind(Object accountKind) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -339,8 +361,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureBlobStorageLinkedService object itself. @@ -433,8 +455,9 @@ public AzureBlobStorageLinkedService withContainerUri(Object containerUri) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureBlobStorageLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureBlobStorageLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLocation.java index b066aaa6395de..f499d459acd97 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLocation.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of azure blob dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureBlobStorageLocation.class, + visible = true) @JsonTypeName("AzureBlobStorageLocation") @Fluent public final class AzureBlobStorageLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBlobStorageLocation"; + /* * Specify the container of azure blob. Type: string (or Expression with resultType string). */ @@ -28,6 +40,16 @@ public final class AzureBlobStorageLocation extends DatasetLocation { public AzureBlobStorageLocation() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the container property: Specify the container of azure blob. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageReadSettings.java index a249291c0419c..d437cf1477a99 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageReadSettings.java @@ -6,19 +6,30 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Azure blob read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureBlobStorageReadSettings.class, + visible = true) @JsonTypeName("AzureBlobStorageReadSettings") @Fluent public final class AzureBlobStorageReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBlobStorageReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -42,8 +53,7 @@ public final class AzureBlobStorageReadSettings extends StoreReadSettings { private Object prefix; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; @@ -55,15 +65,13 @@ public final class AzureBlobStorageReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; @@ -87,8 +95,18 @@ public AzureBlobStorageReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -97,8 +115,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the AzureBlobStorageReadSettings object itself. @@ -175,8 +193,8 @@ public AzureBlobStorageReadSettings withPrefix(Object prefix) { } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -185,8 +203,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the AzureBlobStorageReadSettings object itself. @@ -219,8 +237,8 @@ public AzureBlobStorageReadSettings withEnablePartitionDiscovery(Object enablePa } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -229,8 +247,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the AzureBlobStorageReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageWriteSettings.java index 4475ad1c79b8c..578d94ad46270 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageWriteSettings.java @@ -6,6 +6,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -13,10 +14,21 @@ /** * Azure blob write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureBlobStorageWriteSettings.class, + visible = true) @JsonTypeName("AzureBlobStorageWriteSettings") @Fluent public final class AzureBlobStorageWriteSettings extends StoreWriteSettings { + /* + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureBlobStorageWriteSettings"; + /* * Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). */ @@ -29,6 +41,16 @@ public final class AzureBlobStorageWriteSettings extends StoreWriteSettings { public AzureBlobStorageWriteSettings() { } + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the blockSizeInMB property: Indicates the block size(MB) when writing data to blob. Type: integer (or * Expression with resultType integer). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerCommandActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerCommandActivity.java index 6a0f10c691bae..e8b37844d1639 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerCommandActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerCommandActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureDataExplorerCommandActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Azure Data Explorer command activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDataExplorerCommandActivity.class, + visible = true) @JsonTypeName("AzureDataExplorerCommand") @Fluent public final class AzureDataExplorerCommandActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataExplorerCommand"; + /* * Azure Data Explorer command activity properties. */ @@ -32,6 +44,16 @@ public final class AzureDataExplorerCommandActivity extends ExecutionActivity { public AzureDataExplorerCommandActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Data Explorer command activity properties. * @@ -172,8 +194,9 @@ public AzureDataExplorerCommandActivity withCommandTimeout(Object commandTimeout public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureDataExplorerCommandActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureDataExplorerCommandActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerLinkedService.java index d40789a886398..04cfee8c6c59d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureDataExplorerLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure Data Explorer (Kusto) linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDataExplorerLinkedService.class, + visible = true) @JsonTypeName("AzureDataExplorer") @Fluent public final class AzureDataExplorerLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataExplorer"; + /* * Azure Data Explorer (Kusto) linked service properties. */ @@ -33,6 +45,16 @@ public final class AzureDataExplorerLinkedService extends LinkedService { public AzureDataExplorerLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Data Explorer (Kusto) linked service properties. * @@ -79,9 +101,9 @@ public AzureDataExplorerLinkedService withAnnotations(List annotations) } /** - * Get the endpoint property: The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the - * format https://<clusterName>.<regionName>.kusto.windows.net. Type: string (or Expression with - * resultType string). + * Get the endpoint property: The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format + * https://<clusterName>.<regionName>.kusto.windows.net. Type: string (or Expression with resultType + * string). * * @return the endpoint value. */ @@ -90,9 +112,9 @@ public Object endpoint() { } /** - * Set the endpoint property: The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the - * format https://<clusterName>.<regionName>.kusto.windows.net. Type: string (or Expression with - * resultType string). + * Set the endpoint property: The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format + * https://<clusterName>.<regionName>.kusto.windows.net. Type: string (or Expression with resultType + * string). * * @param endpoint the endpoint value to set. * @return the AzureDataExplorerLinkedService object itself. @@ -233,8 +255,9 @@ public AzureDataExplorerLinkedService withCredential(CredentialReference credent public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureDataExplorerLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureDataExplorerLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSink.java index 3df8037f6abfa..0c36692a3a50e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure Data Explorer sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureDataExplorerSink.class, visible = true) @JsonTypeName("AzureDataExplorerSink") @Fluent public final class AzureDataExplorerSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataExplorerSink"; + /* * A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. */ @@ -40,6 +48,16 @@ public final class AzureDataExplorerSink extends CopySink { public AzureDataExplorerSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the ingestionMappingName property: A name of a pre-created csv mapping that was defined on the target Kusto * table. Type: string. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSource.java index 7969aea88bab0..e70804b442842 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSource.java @@ -7,40 +7,48 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure Data Explorer (Kusto) source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDataExplorerSource.class, + visible = true) @JsonTypeName("AzureDataExplorerSource") @Fluent public final class AzureDataExplorerSource extends CopySource { /* - * Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType - * string). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataExplorerSource"; + + /* + * Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). */ @JsonProperty(value = "query", required = true) private Object query; /* - * The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a - * certain row-count limit. + * The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count limit. */ @JsonProperty(value = "noTruncation") private Object noTruncation; /* - * Query timeout. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. */ @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -52,8 +60,18 @@ public AzureDataExplorerSource() { } /** - * Get the query property: Database query. Should be a Kusto Query Language (KQL) query. Type: string (or - * Expression with resultType string). + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the query property: Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression + * with resultType string). * * @return the query value. */ @@ -62,8 +80,8 @@ public Object query() { } /** - * Set the query property: Database query. Should be a Kusto Query Language (KQL) query. Type: string (or - * Expression with resultType string). + * Set the query property: Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression + * with resultType string). * * @param query the query value to set. * @return the AzureDataExplorerSource object itself. @@ -184,8 +202,8 @@ public AzureDataExplorerSource withDisableMetricsCollection(Object disableMetric public void validate() { super.validate(); if (query() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property query in model AzureDataExplorerSource")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property query in model AzureDataExplorerSource")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerTableDataset.java index 93109a33512ce..e60bdb457bd49 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerTableDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureDataExplorerDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * The Azure Data Explorer (Kusto) dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDataExplorerTableDataset.class, + visible = true) @JsonTypeName("AzureDataExplorerTable") @Fluent public final class AzureDataExplorerTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataExplorerTable"; + /* * Azure Data Explorer (Kusto) dataset properties. */ @@ -32,6 +44,16 @@ public final class AzureDataExplorerTableDataset extends Dataset { public AzureDataExplorerTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Data Explorer (Kusto) dataset properties. * @@ -138,8 +160,9 @@ public AzureDataExplorerTableDataset withTable(Object table) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureDataExplorerTableDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureDataExplorerTableDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeAnalyticsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeAnalyticsLinkedService.java index 9a3a3abd66d61..62494197e0968 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeAnalyticsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeAnalyticsLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureDataLakeAnalyticsLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure Data Lake Analytics linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDataLakeAnalyticsLinkedService.class, + visible = true) @JsonTypeName("AzureDataLakeAnalytics") @Fluent public final class AzureDataLakeAnalyticsLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataLakeAnalytics"; + /* * Azure Data Lake Analytics linked service properties. */ @@ -33,6 +45,16 @@ public final class AzureDataLakeAnalyticsLinkedService extends LinkedService { public AzureDataLakeAnalyticsLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Data Lake Analytics linked service properties. * @@ -129,8 +151,8 @@ public AzureDataLakeAnalyticsLinkedService withServicePrincipalId(Object service } /** - * Get the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data - * Lake Analytics account. + * Get the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data Lake + * Analytics account. * * @return the servicePrincipalKey value. */ @@ -139,8 +161,8 @@ public SecretBase servicePrincipalKey() { } /** - * Set the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data - * Lake Analytics account. + * Set the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data Lake + * Analytics account. * * @param servicePrincipalKey the servicePrincipalKey value to set. * @return the AzureDataLakeAnalyticsLinkedService object itself. @@ -254,8 +276,8 @@ public AzureDataLakeAnalyticsLinkedService withDataLakeAnalyticsUri(Object dataL } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -264,8 +286,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureDataLakeAnalyticsLinkedService object itself. @@ -287,8 +309,9 @@ public AzureDataLakeAnalyticsLinkedService withEncryptedCredential(String encryp public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureDataLakeAnalyticsLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureDataLakeAnalyticsLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreDataset.java index 9cc70705ec634..4a16934b17cb5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.AzureDataLakeStoreDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Azure Data Lake Store dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDataLakeStoreDataset.class, + visible = true) @JsonTypeName("AzureDataLakeStoreFile") @Fluent public final class AzureDataLakeStoreDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataLakeStoreFile"; + /* * Azure Data Lake Store dataset properties. */ @@ -31,6 +43,16 @@ public final class AzureDataLakeStoreDataset extends Dataset { public AzureDataLakeStoreDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Data Lake Store dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLinkedService.java index 2b3bf60de55e8..56d0fd60abd8e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureDataLakeStoreLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure Data Lake Store linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDataLakeStoreLinkedService.class, + visible = true) @JsonTypeName("AzureDataLakeStore") @Fluent public final class AzureDataLakeStoreLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataLakeStore"; + /* * Azure Data Lake Store linked service properties. */ @@ -33,6 +45,16 @@ public final class AzureDataLakeStoreLinkedService extends LinkedService { public AzureDataLakeStoreLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Data Lake Store linked service properties. * @@ -129,8 +151,8 @@ public AzureDataLakeStoreLinkedService withServicePrincipalId(Object servicePrin } /** - * Get the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data - * Lake Store account. + * Get the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data Lake + * Store account. * * @return the servicePrincipalKey value. */ @@ -139,8 +161,8 @@ public SecretBase servicePrincipalKey() { } /** - * Set the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data - * Lake Store account. + * Set the servicePrincipalKey property: The Key of the application used to authenticate against the Azure Data Lake + * Store account. * * @param servicePrincipalKey the servicePrincipalKey value to set. * @return the AzureDataLakeStoreLinkedService object itself. @@ -179,9 +201,9 @@ public AzureDataLakeStoreLinkedService withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -190,9 +212,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the AzureDataLakeStoreLinkedService object itself. @@ -279,8 +301,8 @@ public AzureDataLakeStoreLinkedService withResourceGroupName(Object resourceGrou } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -289,8 +311,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureDataLakeStoreLinkedService object itself. @@ -335,8 +357,9 @@ public AzureDataLakeStoreLinkedService withCredential(CredentialReference creden public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureDataLakeStoreLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureDataLakeStoreLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLocation.java index a16c8045dcff3..b950b2deabc88 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLocation.java @@ -5,22 +5,45 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of azure data lake store dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDataLakeStoreLocation.class, + visible = true) @JsonTypeName("AzureDataLakeStoreLocation") @Fluent public final class AzureDataLakeStoreLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataLakeStoreLocation"; + /** * Creates an instance of AzureDataLakeStoreLocation class. */ public AzureDataLakeStoreLocation() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreReadSettings.java index 1594b65c66179..893bf098550f9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreReadSettings.java @@ -6,19 +6,30 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Azure data lake store read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDataLakeStoreReadSettings.class, + visible = true) @JsonTypeName("AzureDataLakeStoreReadSettings") @Fluent public final class AzureDataLakeStoreReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataLakeStoreReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -36,24 +47,19 @@ public final class AzureDataLakeStoreReadSettings extends StoreReadSettings { private Object wildcardFileName; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; /* - * Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the - * folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with - * resultType string). + * Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). */ @JsonProperty(value = "listAfter") private Object listAfter; /* - * Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the - * folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with - * resultType string). + * Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). */ @JsonProperty(value = "listBefore") private Object listBefore; @@ -65,15 +71,13 @@ public final class AzureDataLakeStoreReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; @@ -97,8 +101,18 @@ public AzureDataLakeStoreReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -107,8 +121,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the AzureDataLakeStoreReadSettings object itself. @@ -161,8 +175,8 @@ public AzureDataLakeStoreReadSettings withWildcardFileName(Object wildcardFileNa } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -171,8 +185,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the AzureDataLakeStoreReadSettings object itself. @@ -207,9 +221,9 @@ public AzureDataLakeStoreReadSettings withListAfter(Object listAfter) { } /** - * Get the listBefore property: Lists files before the value (inclusive) based on file/folder names’ - * lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the - * folderPath. Type: string (or Expression with resultType string). + * Get the listBefore property: Lists files before the value (inclusive) based on file/folder names’ lexicographical + * order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string + * (or Expression with resultType string). * * @return the listBefore value. */ @@ -218,9 +232,9 @@ public Object listBefore() { } /** - * Set the listBefore property: Lists files before the value (inclusive) based on file/folder names’ - * lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the - * folderPath. Type: string (or Expression with resultType string). + * Set the listBefore property: Lists files before the value (inclusive) based on file/folder names’ lexicographical + * order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string + * (or Expression with resultType string). * * @param listBefore the listBefore value to set. * @return the AzureDataLakeStoreReadSettings object itself. @@ -253,8 +267,8 @@ public AzureDataLakeStoreReadSettings withEnablePartitionDiscovery(Object enable } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -263,8 +277,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the AzureDataLakeStoreReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSink.java index 0d7dc3b5a564a..515f1d8355e87 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure Data Lake Store sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureDataLakeStoreSink.class, visible = true) @JsonTypeName("AzureDataLakeStoreSink") @Fluent public final class AzureDataLakeStoreSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataLakeStoreSink"; + /* * The type of copy behavior for copy sink. Type: string (or Expression with resultType string). */ @@ -34,6 +42,16 @@ public final class AzureDataLakeStoreSink extends CopySink { public AzureDataLakeStoreSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the copyBehavior property: The type of copy behavior for copy sink. Type: string (or Expression with * resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSource.java index c95799e83429e..8db3d8d29c2bb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSource.java @@ -6,19 +6,30 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure Data Lake source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDataLakeStoreSource.class, + visible = true) @JsonTypeName("AzureDataLakeStoreSource") @Fluent public final class AzureDataLakeStoreSource extends CopySource { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataLakeStoreSource"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -30,8 +41,18 @@ public AzureDataLakeStoreSource() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -40,8 +61,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the AzureDataLakeStoreSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreWriteSettings.java index 7b4132e76734c..b4b10889c54b7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreWriteSettings.java @@ -6,6 +6,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -13,13 +14,23 @@ /** * Azure data lake store write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDataLakeStoreWriteSettings.class, + visible = true) @JsonTypeName("AzureDataLakeStoreWriteSettings") @Fluent public final class AzureDataLakeStoreWriteSettings extends StoreWriteSettings { /* - * Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of - * "2018-12-01T05:00:00Z". Default value is NULL. Type: string (or Expression with resultType string). + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDataLakeStoreWriteSettings"; + + /* + * Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: string (or Expression with resultType string). */ @JsonProperty(value = "expiryDateTime") private Object expiryDateTime; @@ -30,6 +41,16 @@ public final class AzureDataLakeStoreWriteSettings extends StoreWriteSettings { public AzureDataLakeStoreWriteSettings() { } + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the expiryDateTime property: Specifies the expiry time of the written files. The time is applied to the UTC * time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: string (or Expression with diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeDataset.java index d680dd5b87463..2e7fa0535bf6b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.AzureDatabricksDeltaLakeDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Azure Databricks Delta Lake dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDatabricksDeltaLakeDataset.class, + visible = true) @JsonTypeName("AzureDatabricksDeltaLakeDataset") @Fluent public final class AzureDatabricksDeltaLakeDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDatabricksDeltaLakeDataset"; + /* * Properties specific to this dataset type. */ @@ -31,6 +43,16 @@ public final class AzureDatabricksDeltaLakeDataset extends Dataset { public AzureDatabricksDeltaLakeDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * @@ -127,8 +149,7 @@ public AzureDatabricksDeltaLakeDataset withTable(Object table) { } /** - * Get the database property: The database name of delta table. Type: string (or Expression with resultType - * string). + * Get the database property: The database name of delta table. Type: string (or Expression with resultType string). * * @return the database value. */ @@ -137,8 +158,7 @@ public Object database() { } /** - * Set the database property: The database name of delta table. Type: string (or Expression with resultType - * string). + * Set the database property: The database name of delta table. Type: string (or Expression with resultType string). * * @param database the database value to set. * @return the AzureDatabricksDeltaLakeDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeExportCommand.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeExportCommand.java index c1faa11cbea4c..ac1eb9ad6871a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeExportCommand.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeExportCommand.java @@ -6,26 +6,36 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Azure Databricks Delta Lake export command settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDatabricksDeltaLakeExportCommand.class, + visible = true) @JsonTypeName("AzureDatabricksDeltaLakeExportCommand") @Fluent public final class AzureDatabricksDeltaLakeExportCommand extends ExportSettings { /* - * Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with - * resultType string). + * The export setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDatabricksDeltaLakeExportCommand"; + + /* + * Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "dateFormat") private Object dateFormat; /* - * Specify the timestamp format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with - * resultType string). + * Specify the timestamp format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "timestampFormat") private Object timestampFormat; @@ -36,6 +46,16 @@ public final class AzureDatabricksDeltaLakeExportCommand extends ExportSettings public AzureDatabricksDeltaLakeExportCommand() { } + /** + * Get the type property: The export setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the dateFormat property: Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: * string (or Expression with resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeImportCommand.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeImportCommand.java index 410df15ff1c3f..17853e01a0e46 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeImportCommand.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeImportCommand.java @@ -6,26 +6,36 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Azure Databricks Delta Lake import command settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDatabricksDeltaLakeImportCommand.class, + visible = true) @JsonTypeName("AzureDatabricksDeltaLakeImportCommand") @Fluent public final class AzureDatabricksDeltaLakeImportCommand extends ImportSettings { /* - * Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType - * string). + * The import setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDatabricksDeltaLakeImportCommand"; + + /* + * Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "dateFormat") private Object dateFormat; /* - * Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with - * resultType string). + * Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "timestampFormat") private Object timestampFormat; @@ -36,6 +46,16 @@ public final class AzureDatabricksDeltaLakeImportCommand extends ImportSettings public AzureDatabricksDeltaLakeImportCommand() { } + /** + * Get the type property: The import setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the dateFormat property: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string * (or Expression with resultType string). @@ -59,8 +79,8 @@ public AzureDatabricksDeltaLakeImportCommand withDateFormat(Object dateFormat) { } /** - * Get the timestampFormat property: Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. - * Type: string (or Expression with resultType string). + * Get the timestampFormat property: Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: + * string (or Expression with resultType string). * * @return the timestampFormat value. */ @@ -69,8 +89,8 @@ public Object timestampFormat() { } /** - * Set the timestampFormat property: Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. - * Type: string (or Expression with resultType string). + * Set the timestampFormat property: Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: + * string (or Expression with resultType string). * * @param timestampFormat the timestampFormat value to set. * @return the AzureDatabricksDeltaLakeImportCommand object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeLinkedService.java index 235c30e30b0c5..f8e48dc081326 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureDatabricksDetltaLakeLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure Databricks Delta Lake linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDatabricksDeltaLakeLinkedService.class, + visible = true) @JsonTypeName("AzureDatabricksDeltaLake") @Fluent public final class AzureDatabricksDeltaLakeLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDatabricksDeltaLake"; + /* * Azure Databricks Delta Lake linked service properties. */ @@ -33,6 +45,16 @@ public final class AzureDatabricksDeltaLakeLinkedService extends LinkedService { public AzureDatabricksDeltaLakeLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Databricks Delta Lake linked service properties. * @@ -131,8 +153,8 @@ public AzureDatabricksDeltaLakeLinkedService withAccessToken(SecretBase accessTo } /** - * Get the clusterId property: The id of an existing interactive cluster that will be used for all runs of this - * job. Type: string (or Expression with resultType string). + * Get the clusterId property: The id of an existing interactive cluster that will be used for all runs of this job. + * Type: string (or Expression with resultType string). * * @return the clusterId value. */ @@ -141,8 +163,8 @@ public Object clusterId() { } /** - * Set the clusterId property: The id of an existing interactive cluster that will be used for all runs of this - * job. Type: string (or Expression with resultType string). + * Set the clusterId property: The id of an existing interactive cluster that will be used for all runs of this job. + * Type: string (or Expression with resultType string). * * @param clusterId the clusterId value to set. * @return the AzureDatabricksDeltaLakeLinkedService object itself. @@ -156,8 +178,8 @@ public AzureDatabricksDeltaLakeLinkedService withClusterId(Object clusterId) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -166,8 +188,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureDatabricksDeltaLakeLinkedService object itself. @@ -237,8 +259,9 @@ public AzureDatabricksDeltaLakeLinkedService withWorkspaceResourceId(Object work public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureDatabricksDeltaLakeLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureDatabricksDeltaLakeLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSink.java index 090f2e2b82fbe..9eb4985e5dea5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSink.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure Databricks Delta Lake sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDatabricksDeltaLakeSink.class, + visible = true) @JsonTypeName("AzureDatabricksDeltaLakeSink") @Fluent public final class AzureDatabricksDeltaLakeSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDatabricksDeltaLakeSink"; + /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ @@ -34,6 +46,16 @@ public final class AzureDatabricksDeltaLakeSink extends CopySink { public AzureDatabricksDeltaLakeSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preCopyScript property: SQL pre-copy script. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSource.java index 5a123fb757425..2444ddddd6238 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSource.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure Databricks Delta Lake source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDatabricksDeltaLakeSource.class, + visible = true) @JsonTypeName("AzureDatabricksDeltaLakeSource") @Fluent public final class AzureDatabricksDeltaLakeSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDatabricksDeltaLakeSource"; + /* * Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). */ @@ -34,6 +46,16 @@ public final class AzureDatabricksDeltaLakeSource extends CopySource { public AzureDatabricksDeltaLakeSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksLinkedService.java index 1fb4760970015..a546fa8d59766 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureDatabricksLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure Databricks linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureDatabricksLinkedService.class, + visible = true) @JsonTypeName("AzureDatabricks") @Fluent public final class AzureDatabricksLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureDatabricks"; + /* * Azure Databricks linked service properties. */ @@ -33,6 +45,16 @@ public final class AzureDatabricksLinkedService extends LinkedService { public AzureDatabricksLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Databricks linked service properties. * @@ -131,8 +153,8 @@ public AzureDatabricksLinkedService withAccessToken(SecretBase accessToken) { } /** - * Get the authentication property: Required to specify MSI, if using Workspace resource id for databricks REST - * API. Type: string (or Expression with resultType string). + * Get the authentication property: Required to specify MSI, if using Workspace resource id for databricks REST API. + * Type: string (or Expression with resultType string). * * @return the authentication value. */ @@ -141,8 +163,8 @@ public Object authentication() { } /** - * Set the authentication property: Required to specify MSI, if using Workspace resource id for databricks REST - * API. Type: string (or Expression with resultType string). + * Set the authentication property: Required to specify MSI, if using Workspace resource id for databricks REST API. + * Type: string (or Expression with resultType string). * * @param authentication the authentication value to set. * @return the AzureDatabricksLinkedService object itself. @@ -290,8 +312,8 @@ public AzureDatabricksLinkedService withNewClusterNumOfWorker(Object newClusterN /** * Get the newClusterNodeType property: The node type of the new job cluster. This property is required if - * newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this - * property is ignored. Type: string (or Expression with resultType string). + * newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property + * is ignored. Type: string (or Expression with resultType string). * * @return the newClusterNodeType value. */ @@ -301,8 +323,8 @@ public Object newClusterNodeType() { /** * Set the newClusterNodeType property: The node type of the new job cluster. This property is required if - * newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this - * property is ignored. Type: string (or Expression with resultType string). + * newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property + * is ignored. Type: string (or Expression with resultType string). * * @param newClusterNodeType the newClusterNodeType value to set. * @return the AzureDatabricksLinkedService object itself. @@ -414,8 +436,8 @@ public AzureDatabricksLinkedService withNewClusterLogDestination(Object newClust } /** - * Get the newClusterDriverNodeType property: The driver node type for the new job cluster. This property is - * ignored in instance pool configurations. Type: string (or Expression with resultType string). + * Get the newClusterDriverNodeType property: The driver node type for the new job cluster. This property is ignored + * in instance pool configurations. Type: string (or Expression with resultType string). * * @return the newClusterDriverNodeType value. */ @@ -424,8 +446,8 @@ public Object newClusterDriverNodeType() { } /** - * Set the newClusterDriverNodeType property: The driver node type for the new job cluster. This property is - * ignored in instance pool configurations. Type: string (or Expression with resultType string). + * Set the newClusterDriverNodeType property: The driver node type for the new job cluster. This property is ignored + * in instance pool configurations. Type: string (or Expression with resultType string). * * @param newClusterDriverNodeType the newClusterDriverNodeType value to set. * @return the AzureDatabricksLinkedService object itself. @@ -491,8 +513,8 @@ public AzureDatabricksLinkedService withNewClusterEnableElasticDisk(Object newCl } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -501,8 +523,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureDatabricksLinkedService object itself. @@ -572,8 +594,9 @@ public AzureDatabricksLinkedService withCredential(CredentialReference credentia public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureDatabricksLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureDatabricksLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLinkedService.java index 5569040856ec4..7b43cddb78498 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureFileStorageLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure File Storage linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureFileStorageLinkedService.class, + visible = true) @JsonTypeName("AzureFileStorage") @Fluent public final class AzureFileStorageLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureFileStorage"; + /* * Azure File Storage linked service properties. */ @@ -33,6 +45,16 @@ public final class AzureFileStorageLinkedService extends LinkedService { public AzureFileStorageLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure File Storage linked service properties. * @@ -294,8 +316,8 @@ public AzureFileStorageLinkedService withSnapshot(Object snapshot) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -304,8 +326,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureFileStorageLinkedService object itself. @@ -327,8 +349,9 @@ public AzureFileStorageLinkedService withEncryptedCredential(String encryptedCre public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureFileStorageLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureFileStorageLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLocation.java index dc4081e83ec8b..7ef77ce121cc2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLocation.java @@ -5,22 +5,45 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of file server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureFileStorageLocation.class, + visible = true) @JsonTypeName("AzureFileStorageLocation") @Fluent public final class AzureFileStorageLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureFileStorageLocation"; + /** * Creates an instance of AzureFileStorageLocation class. */ public AzureFileStorageLocation() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageReadSettings.java index bf28a65adc387..85fed97c1fb1f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageReadSettings.java @@ -6,19 +6,30 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Azure File Storage read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureFileStorageReadSettings.class, + visible = true) @JsonTypeName("AzureFileStorageReadSettings") @Fluent public final class AzureFileStorageReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureFileStorageReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -36,15 +47,13 @@ public final class AzureFileStorageReadSettings extends StoreReadSettings { private Object wildcardFileName; /* - * The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType - * string). + * The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). */ @JsonProperty(value = "prefix") private Object prefix; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; @@ -56,15 +65,13 @@ public final class AzureFileStorageReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; @@ -88,8 +95,18 @@ public AzureFileStorageReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -98,8 +115,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the AzureFileStorageReadSettings object itself. @@ -176,8 +193,8 @@ public AzureFileStorageReadSettings withPrefix(Object prefix) { } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -186,8 +203,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the AzureFileStorageReadSettings object itself. @@ -220,8 +237,8 @@ public AzureFileStorageReadSettings withEnablePartitionDiscovery(Object enablePa } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -230,8 +247,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the AzureFileStorageReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageWriteSettings.java index f92436d636ff7..f48365f10d93f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageWriteSettings.java @@ -5,6 +5,8 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -12,16 +14,37 @@ /** * Azure File Storage write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureFileStorageWriteSettings.class, + visible = true) @JsonTypeName("AzureFileStorageWriteSettings") @Fluent public final class AzureFileStorageWriteSettings extends StoreWriteSettings { + /* + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureFileStorageWriteSettings"; + /** * Creates an instance of AzureFileStorageWriteSettings class. */ public AzureFileStorageWriteSettings() { } + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionActivity.java index e87a644a9d81e..c7183528b2f21 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureFunctionActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Azure Function activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureFunctionActivity.class, visible = true) @JsonTypeName("AzureFunctionActivity") @Fluent public final class AzureFunctionActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureFunctionActivity"; + /* * Azure Function activity properties. */ @@ -32,6 +40,16 @@ public final class AzureFunctionActivity extends ExecutionActivity { public AzureFunctionActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Function activity properties. * @@ -168,7 +186,7 @@ public AzureFunctionActivity withFunctionName(Object functionName) { * * @return the headers value. */ - public Map headers() { + public Map headers() { return this.innerTypeProperties() == null ? null : this.innerTypeProperties().headers(); } @@ -180,7 +198,7 @@ public Map headers() { * @param headers the headers value to set. * @return the AzureFunctionActivity object itself. */ - public AzureFunctionActivity withHeaders(Map headers) { + public AzureFunctionActivity withHeaders(Map headers) { if (this.innerTypeProperties() == null) { this.innerTypeProperties = new AzureFunctionActivityTypeProperties(); } @@ -222,8 +240,9 @@ public AzureFunctionActivity withBody(Object body) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureFunctionActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureFunctionActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionLinkedService.java index 3907c0fc7e324..a330e9c91254c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureFunctionLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure Function linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureFunctionLinkedService.class, + visible = true) @JsonTypeName("AzureFunction") @Fluent public final class AzureFunctionLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureFunction"; + /* * Azure Function linked service properties. */ @@ -33,6 +45,16 @@ public final class AzureFunctionLinkedService extends LinkedService { public AzureFunctionLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Function linked service properties. * @@ -127,8 +149,8 @@ public AzureFunctionLinkedService withFunctionKey(SecretBase functionKey) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -137,8 +159,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureFunctionLinkedService object itself. @@ -233,8 +255,9 @@ public AzureFunctionLinkedService withAuthentication(Object authentication) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureFunctionLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureFunctionLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultLinkedService.java index 60a4b500be73d..b52250db54347 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureKeyVaultLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure Key Vault linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureKeyVaultLinkedService.class, + visible = true) @JsonTypeName("AzureKeyVault") @Fluent public final class AzureKeyVaultLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureKeyVault"; + /* * Azure Key Vault linked service properties. */ @@ -33,6 +45,16 @@ public final class AzureKeyVaultLinkedService extends LinkedService { public AzureKeyVaultLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Key Vault linked service properties. * @@ -135,8 +157,9 @@ public AzureKeyVaultLinkedService withCredential(CredentialReference credential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureKeyVaultLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureKeyVaultLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultSecretReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultSecretReference.java index a924d2acf6a7c..0630a795917fd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultSecretReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultSecretReference.java @@ -7,16 +7,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Azure Key Vault secret reference. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureKeyVaultSecretReference.class, + visible = true) @JsonTypeName("AzureKeyVaultSecret") @Fluent public final class AzureKeyVaultSecretReference extends SecretBase { + /* + * Type of the secret. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureKeyVaultSecret"; + /* * The Azure Key Vault linked service reference. */ @@ -30,8 +42,7 @@ public final class AzureKeyVaultSecretReference extends SecretBase { private Object secretName; /* - * The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: - * string (or Expression with resultType string). + * The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). */ @JsonProperty(value = "secretVersion") private Object secretVersion; @@ -42,6 +53,16 @@ public final class AzureKeyVaultSecretReference extends SecretBase { public AzureKeyVaultSecretReference() { } + /** + * Get the type property: Type of the secret. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the store property: The Azure Key Vault linked service reference. * @@ -115,14 +136,16 @@ public AzureKeyVaultSecretReference withSecretVersion(Object secretVersion) { public void validate() { super.validate(); if (store() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property store in model AzureKeyVaultSecretReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property store in model AzureKeyVaultSecretReference")); } else { store().validate(); } if (secretName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property secretName in model AzureKeyVaultSecretReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property secretName in model AzureKeyVaultSecretReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLBatchExecutionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLBatchExecutionActivity.java index 00f81c2abbe63..392b2bd44da23 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLBatchExecutionActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLBatchExecutionActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureMLBatchExecutionActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure ML Batch Execution activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureMLBatchExecutionActivity.class, + visible = true) @JsonTypeName("AzureMLBatchExecution") @Fluent public final class AzureMLBatchExecutionActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureMLBatchExecution"; + /* * Azure ML Batch Execution activity properties. */ @@ -33,6 +45,16 @@ public final class AzureMLBatchExecutionActivity extends ExecutionActivity { public AzureMLBatchExecutionActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure ML Batch Execution activity properties. * @@ -115,9 +137,9 @@ public AzureMLBatchExecutionActivity withUserProperties(List userP } /** - * Get the globalParameters property: Key,Value pairs to be passed to the Azure ML Batch Execution Service - * endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. - * Values will be passed in the GlobalParameters property of the Azure ML batch execution request. + * Get the globalParameters property: Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. + * Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be + * passed in the GlobalParameters property of the Azure ML batch execution request. * * @return the globalParameters value. */ @@ -126,9 +148,9 @@ public Map globalParameters() { } /** - * Set the globalParameters property: Key,Value pairs to be passed to the Azure ML Batch Execution Service - * endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. - * Values will be passed in the GlobalParameters property of the Azure ML batch execution request. + * Set the globalParameters property: Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. + * Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be + * passed in the GlobalParameters property of the Azure ML batch execution request. * * @param globalParameters the globalParameters value to set. * @return the AzureMLBatchExecutionActivity object itself. @@ -142,9 +164,9 @@ public AzureMLBatchExecutionActivity withGlobalParameters(Map gl } /** - * Get the webServiceOutputs property: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service - * Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed - * in the WebServiceOutputs property of the Azure ML batch execution request. + * Get the webServiceOutputs property: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs + * to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the + * WebServiceOutputs property of the Azure ML batch execution request. * * @return the webServiceOutputs value. */ @@ -153,9 +175,9 @@ public Map webServiceOutputs() { } /** - * Set the webServiceOutputs property: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service - * Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed - * in the WebServiceOutputs property of the Azure ML batch execution request. + * Set the webServiceOutputs property: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs + * to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the + * WebServiceOutputs property of the Azure ML batch execution request. * * @param webServiceOutputs the webServiceOutputs value to set. * @return the AzureMLBatchExecutionActivity object itself. @@ -204,8 +226,9 @@ public AzureMLBatchExecutionActivity withWebServiceInputs(Map user } /** - * Get the mlPipelineId property: ID of the published Azure ML pipeline. Type: string (or Expression with - * resultType string). + * Get the mlPipelineId property: ID of the published Azure ML pipeline. Type: string (or Expression with resultType + * string). * * @return the mlPipelineId value. */ @@ -124,8 +146,8 @@ public Object mlPipelineId() { } /** - * Set the mlPipelineId property: ID of the published Azure ML pipeline. Type: string (or Expression with - * resultType string). + * Set the mlPipelineId property: ID of the published Azure ML pipeline. Type: string (or Expression with resultType + * string). * * @param mlPipelineId the mlPipelineId value to set. * @return the AzureMLExecutePipelineActivity object itself. @@ -189,9 +211,9 @@ public AzureMLExecutePipelineActivity withVersion(Object version) { } /** - * Get the experimentName property: Run history experiment name of the pipeline run. This information will be - * passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression - * with resultType string). + * Get the experimentName property: Run history experiment name of the pipeline run. This information will be passed + * in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with + * resultType string). * * @return the experimentName value. */ @@ -200,9 +222,9 @@ public Object experimentName() { } /** - * Set the experimentName property: Run history experiment name of the pipeline run. This information will be - * passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression - * with resultType string). + * Set the experimentName property: Run history experiment name of the pipeline run. This information will be passed + * in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with + * resultType string). * * @param experimentName the experimentName value to set. * @return the AzureMLExecutePipelineActivity object itself. @@ -273,8 +295,8 @@ public AzureMLExecutePipelineActivity withDataPathAssignments(Object dataPathAss /** * Get the mlParentRunId property: The parent Azure ML Service pipeline run id. This information will be passed in - * the ParentRunId property of the published pipeline execution request. Type: string (or Expression with - * resultType string). + * the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType + * string). * * @return the mlParentRunId value. */ @@ -284,8 +306,8 @@ public Object mlParentRunId() { /** * Set the mlParentRunId property: The parent Azure ML Service pipeline run id. This information will be passed in - * the ParentRunId property of the published pipeline execution request. Type: string (or Expression with - * resultType string). + * the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType + * string). * * @param mlParentRunId the mlParentRunId value to set. * @return the AzureMLExecutePipelineActivity object itself. @@ -299,9 +321,9 @@ public AzureMLExecutePipelineActivity withMlParentRunId(Object mlParentRunId) { } /** - * Get the continueOnStepFailure property: Whether to continue execution of other steps in the PipelineRun if a - * step fails. This information will be passed in the continueOnStepFailure property of the published pipeline - * execution request. Type: boolean (or Expression with resultType boolean). + * Get the continueOnStepFailure property: Whether to continue execution of other steps in the PipelineRun if a step + * fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution + * request. Type: boolean (or Expression with resultType boolean). * * @return the continueOnStepFailure value. */ @@ -310,9 +332,9 @@ public Object continueOnStepFailure() { } /** - * Set the continueOnStepFailure property: Whether to continue execution of other steps in the PipelineRun if a - * step fails. This information will be passed in the continueOnStepFailure property of the published pipeline - * execution request. Type: boolean (or Expression with resultType boolean). + * Set the continueOnStepFailure property: Whether to continue execution of other steps in the PipelineRun if a step + * fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution + * request. Type: boolean (or Expression with resultType boolean). * * @param continueOnStepFailure the continueOnStepFailure value to set. * @return the AzureMLExecutePipelineActivity object itself. @@ -334,8 +356,9 @@ public AzureMLExecutePipelineActivity withContinueOnStepFailure(Object continueO public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureMLExecutePipelineActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureMLExecutePipelineActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLLinkedService.java index 5ccb956e14a1e..f39516265f541 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureMLLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Azure ML Studio Web Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureMLLinkedService.class, visible = true) @JsonTypeName("AzureML") @Fluent public final class AzureMLLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureML"; + /* * Azure ML Studio Web Service linked service properties. */ @@ -32,6 +40,16 @@ public final class AzureMLLinkedService extends LinkedService { public AzureMLLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure ML Studio Web Service linked service properties. * @@ -176,8 +194,8 @@ public AzureMLLinkedService withServicePrincipalId(Object servicePrincipalId) { } /** - * Get the servicePrincipalKey property: The key of the service principal used to authenticate against the - * ARM-based updateResourceEndpoint of an Azure ML Studio web service. + * Get the servicePrincipalKey property: The key of the service principal used to authenticate against the ARM-based + * updateResourceEndpoint of an Azure ML Studio web service. * * @return the servicePrincipalKey value. */ @@ -186,8 +204,8 @@ public SecretBase servicePrincipalKey() { } /** - * Set the servicePrincipalKey property: The key of the service principal used to authenticate against the - * ARM-based updateResourceEndpoint of an Azure ML Studio web service. + * Set the servicePrincipalKey property: The key of the service principal used to authenticate against the ARM-based + * updateResourceEndpoint of an Azure ML Studio web service. * * @param servicePrincipalKey the servicePrincipalKey value to set. * @return the AzureMLLinkedService object itself. @@ -226,8 +244,8 @@ public AzureMLLinkedService withTenant(Object tenant) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -236,8 +254,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureMLLinkedService object itself. @@ -284,8 +302,9 @@ public AzureMLLinkedService withAuthentication(Object authentication) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureMLLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureMLLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLServiceLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLServiceLinkedService.java index 7908a0c525a08..78712e47d9e86 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLServiceLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLServiceLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureMLServiceLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure ML Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureMLServiceLinkedService.class, + visible = true) @JsonTypeName("AzureMLService") @Fluent public final class AzureMLServiceLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureMLService"; + /* * Azure ML Service linked service properties. */ @@ -33,6 +45,16 @@ public final class AzureMLServiceLinkedService extends LinkedService { public AzureMLServiceLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure ML Service linked service properties. * @@ -179,8 +201,8 @@ public AzureMLServiceLinkedService withAuthentication(Object authentication) { } /** - * Get the servicePrincipalId property: The ID of the service principal used to authenticate against the endpoint - * of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). + * Get the servicePrincipalId property: The ID of the service principal used to authenticate against the endpoint of + * a published Azure ML Service pipeline. Type: string (or Expression with resultType string). * * @return the servicePrincipalId value. */ @@ -189,8 +211,8 @@ public Object servicePrincipalId() { } /** - * Set the servicePrincipalId property: The ID of the service principal used to authenticate against the endpoint - * of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). + * Set the servicePrincipalId property: The ID of the service principal used to authenticate against the endpoint of + * a published Azure ML Service pipeline. Type: string (or Expression with resultType string). * * @param servicePrincipalId the servicePrincipalId value to set. * @return the AzureMLServiceLinkedService object itself. @@ -254,8 +276,8 @@ public AzureMLServiceLinkedService withTenant(Object tenant) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -264,8 +286,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureMLServiceLinkedService object itself. @@ -287,8 +309,9 @@ public AzureMLServiceLinkedService withEncryptedCredential(String encryptedCrede public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureMLServiceLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureMLServiceLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLUpdateResourceActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLUpdateResourceActivity.java index 3fae5667e22bd..aaf808f076f00 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLUpdateResourceActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLUpdateResourceActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureMLUpdateResourceActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Azure ML Update Resource management activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureMLUpdateResourceActivity.class, + visible = true) @JsonTypeName("AzureMLUpdateResource") @Fluent public final class AzureMLUpdateResourceActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureMLUpdateResource"; + /* * Azure ML Update Resource management activity properties. */ @@ -32,6 +44,16 @@ public final class AzureMLUpdateResourceActivity extends ExecutionActivity { public AzureMLUpdateResourceActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure ML Update Resource management activity properties. * @@ -200,8 +222,9 @@ public AzureMLUpdateResourceActivity withTrainedModelFilePath(Object trainedMode public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureMLUpdateResourceActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureMLUpdateResourceActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLWebServiceFile.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLWebServiceFile.java index 20a6bd9fa7216..a2b3f83e241c9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLWebServiceFile.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLWebServiceFile.java @@ -14,8 +14,7 @@ @Fluent public final class AzureMLWebServiceFile { /* - * The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. - * Type: string (or Expression with resultType string). + * The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: string (or Expression with resultType string). */ @JsonProperty(value = "filePath", required = true) private Object filePath; @@ -83,12 +82,13 @@ public AzureMLWebServiceFile withLinkedServiceName(LinkedServiceReference linked */ public void validate() { if (filePath() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property filePath in model AzureMLWebServiceFile")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property filePath in model AzureMLWebServiceFile")); } if (linkedServiceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property linkedServiceName in model AzureMLWebServiceFile")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property linkedServiceName in model AzureMLWebServiceFile")); } else { linkedServiceName().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBLinkedService.java index c05842e72b97c..affed074719d7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureMariaDBLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure Database for MariaDB linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureMariaDBLinkedService.class, + visible = true) @JsonTypeName("AzureMariaDB") @Fluent public final class AzureMariaDBLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureMariaDB"; + /* * Azure Database for MariaDB linked service properties. */ @@ -32,6 +44,16 @@ public final class AzureMariaDBLinkedService extends LinkedService { public AzureMariaDBLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Database for MariaDB linked service properties. * @@ -126,8 +148,8 @@ public AzureMariaDBLinkedService withPwd(AzureKeyVaultSecretReference pwd) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -136,8 +158,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureMariaDBLinkedService object itself. @@ -159,8 +181,9 @@ public AzureMariaDBLinkedService withEncryptedCredential(String encryptedCredent public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureMariaDBLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureMariaDBLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBSource.java index 87689ab7a3514..192d95856e91b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure MariaDB source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureMariaDBSource.class, visible = true) @JsonTypeName("AzureMariaDBSource") @Fluent public final class AzureMariaDBSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureMariaDBSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class AzureMariaDBSource extends TabularSource { public AzureMariaDBSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBTableDataset.java index 82248746e45ad..85c756db716c6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Azure Database for MariaDB dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureMariaDBTableDataset.class, + visible = true) @JsonTypeName("AzureMariaDBTable") @Fluent public final class AzureMariaDBTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureMariaDBTable"; + /* * Properties specific to this dataset type. */ @@ -31,6 +43,16 @@ public final class AzureMariaDBTableDataset extends Dataset { public AzureMariaDBTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlLinkedService.java index 5bb2e8c589444..8601fb93e55cb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureMySqlLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure MySQL database linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureMySqlLinkedService.class, + visible = true) @JsonTypeName("AzureMySql") @Fluent public final class AzureMySqlLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureMySql"; + /* * Azure MySQL database linked service properties. */ @@ -32,6 +44,16 @@ public final class AzureMySqlLinkedService extends LinkedService { public AzureMySqlLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure MySQL database linked service properties. * @@ -126,8 +148,8 @@ public AzureMySqlLinkedService withPassword(AzureKeyVaultSecretReference passwor } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -136,8 +158,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureMySqlLinkedService object itself. @@ -159,8 +181,9 @@ public AzureMySqlLinkedService withEncryptedCredential(String encryptedCredentia public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureMySqlLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureMySqlLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSink.java index b99ffdb22cbbb..9ea141ce4d8a6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure MySql sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureMySqlSink.class, visible = true) @JsonTypeName("AzureMySqlSink") @Fluent public final class AzureMySqlSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureMySqlSink"; + /* * A query to execute before starting the copy. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class AzureMySqlSink extends CopySink { public AzureMySqlSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preCopyScript property: A query to execute before starting the copy. Type: string (or Expression with * resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSource.java index 3c0ba7a8be913..6d9910fe891b4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure MySQL source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureMySqlSource.class, visible = true) @JsonTypeName("AzureMySqlSource") @Fluent public final class AzureMySqlSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureMySqlSource"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class AzureMySqlSource extends TabularSource { public AzureMySqlSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlTableDataset.java index effd5c525fc99..ac4966e75bc44 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlTableDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureMySqlTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * The Azure MySQL database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureMySqlTableDataset.class, visible = true) @JsonTypeName("AzureMySqlTable") @Fluent public final class AzureMySqlTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureMySqlTable"; + /* * Azure MySQL database dataset properties. */ @@ -32,6 +40,16 @@ public final class AzureMySqlTableDataset extends Dataset { public AzureMySqlTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure MySQL database dataset properties. * @@ -163,8 +181,9 @@ public AzureMySqlTableDataset withTable(Object table) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureMySqlTableDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureMySqlTableDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlLinkedService.java index 6bde1f774464e..b73e7493523d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzurePostgreSqlLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure PostgreSQL linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzurePostgreSqlLinkedService.class, + visible = true) @JsonTypeName("AzurePostgreSql") @Fluent public final class AzurePostgreSqlLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzurePostgreSql"; + /* * Azure PostgreSQL linked service properties. */ @@ -33,6 +45,16 @@ public final class AzurePostgreSqlLinkedService extends LinkedService { public AzurePostgreSqlLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure PostgreSQL linked service properties. * @@ -127,8 +149,8 @@ public AzurePostgreSqlLinkedService withPassword(AzureKeyVaultSecretReference pa } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -137,8 +159,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzurePostgreSqlLinkedService object itself. @@ -160,8 +182,9 @@ public AzurePostgreSqlLinkedService withEncryptedCredential(String encryptedCred public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzurePostgreSqlLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzurePostgreSqlLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSink.java index 628c1cc3086ee..15d1ec8f392a6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure PostgreSQL sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzurePostgreSqlSink.class, visible = true) @JsonTypeName("AzurePostgreSqlSink") @Fluent public final class AzurePostgreSqlSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzurePostgreSqlSink"; + /* * A query to execute before starting the copy. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class AzurePostgreSqlSink extends CopySink { public AzurePostgreSqlSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preCopyScript property: A query to execute before starting the copy. Type: string (or Expression with * resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSource.java index 9513bb494cd5c..1a29d7966809a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure PostgreSQL source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzurePostgreSqlSource.class, visible = true) @JsonTypeName("AzurePostgreSqlSource") @Fluent public final class AzurePostgreSqlSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzurePostgreSqlSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class AzurePostgreSqlSource extends TabularSource { public AzurePostgreSqlSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlTableDataset.java index a556d8d50281a..0cf004aa8cb1a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.AzurePostgreSqlTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Azure PostgreSQL dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzurePostgreSqlTableDataset.class, + visible = true) @JsonTypeName("AzurePostgreSqlTable") @Fluent public final class AzurePostgreSqlTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzurePostgreSqlTable"; + /* * Properties specific to this dataset type. */ @@ -31,6 +43,16 @@ public final class AzurePostgreSqlTableDataset extends Dataset { public AzurePostgreSqlTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * @@ -104,8 +126,8 @@ public AzurePostgreSqlTableDataset withFolder(DatasetFolder folder) { } /** - * Get the tableName property: The table name of the Azure PostgreSQL database which includes both schema and - * table. Type: string (or Expression with resultType string). + * Get the tableName property: The table name of the Azure PostgreSQL database which includes both schema and table. + * Type: string (or Expression with resultType string). * * @return the tableName value. */ @@ -114,8 +136,8 @@ public Object tableName() { } /** - * Set the tableName property: The table name of the Azure PostgreSQL database which includes both schema and - * table. Type: string (or Expression with resultType string). + * Set the tableName property: The table name of the Azure PostgreSQL database which includes both schema and table. + * Type: string (or Expression with resultType string). * * @param tableName the tableName value to set. * @return the AzurePostgreSqlTableDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureQueueSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureQueueSink.java index 523cf749e30c6..0555df47b823e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureQueueSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureQueueSink.java @@ -5,22 +5,41 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure Queue sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureQueueSink.class, visible = true) @JsonTypeName("AzureQueueSink") @Fluent public final class AzureQueueSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureQueueSink"; + /** * Creates an instance of AzureQueueSink class. */ public AzureQueueSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexDataset.java index cbb7c3f23136b..e60b95042bfe5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureSearchIndexDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * The Azure Search Index. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureSearchIndexDataset.class, + visible = true) @JsonTypeName("AzureSearchIndex") @Fluent public final class AzureSearchIndexDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureSearchIndex"; + /* * Properties specific to this dataset type. */ @@ -32,6 +44,16 @@ public final class AzureSearchIndexDataset extends Dataset { public AzureSearchIndexDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * @@ -138,8 +160,9 @@ public AzureSearchIndexDataset withIndexName(Object indexName) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureSearchIndexDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureSearchIndexDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexSink.java index 5dbd35711b334..28db9d9c009dd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure Search Index sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureSearchIndexSink.class, visible = true) @JsonTypeName("AzureSearchIndexSink") @Fluent public final class AzureSearchIndexSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureSearchIndexSink"; + /* * Specify the write behavior when upserting documents into Azure Search Index. */ @@ -28,6 +36,16 @@ public final class AzureSearchIndexSink extends CopySink { public AzureSearchIndexSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: Specify the write behavior when upserting documents into Azure Search Index. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchLinkedService.java index c99d55464a8d2..312aed59c4d96 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureSearchLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for Windows Azure Search Service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureSearchLinkedService.class, + visible = true) @JsonTypeName("AzureSearch") @Fluent public final class AzureSearchLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureSearch"; + /* * Windows Azure Search Service linked service properties. */ @@ -32,6 +44,16 @@ public final class AzureSearchLinkedService extends LinkedService { public AzureSearchLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Windows Azure Search Service linked service properties. * @@ -124,8 +146,8 @@ public AzureSearchLinkedService withKey(SecretBase key) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -134,8 +156,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureSearchLinkedService object itself. @@ -157,8 +179,9 @@ public AzureSearchLinkedService withEncryptedCredential(String encryptedCredenti public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureSearchLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureSearchLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWLinkedService.java index f9b7b9b6bf68c..008937aa4a2b7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlDWLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure SQL Data Warehouse linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureSqlDWLinkedService.class, + visible = true) @JsonTypeName("AzureSqlDW") @Fluent public final class AzureSqlDWLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureSqlDW"; + /* * Azure SQL Data Warehouse linked service properties. */ @@ -32,6 +44,16 @@ public final class AzureSqlDWLinkedService extends LinkedService { public AzureSqlDWLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure SQL Data Warehouse linked service properties. * @@ -201,9 +223,9 @@ public AzureSqlDWLinkedService withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -212,9 +234,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the AzureSqlDWLinkedService object itself. @@ -228,8 +250,8 @@ public AzureSqlDWLinkedService withAzureCloudType(Object azureCloudType) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -238,8 +260,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureSqlDWLinkedService object itself. @@ -284,8 +306,9 @@ public AzureSqlDWLinkedService withCredential(CredentialReference credential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureSqlDWLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureSqlDWLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWTableDataset.java index f81392d7d9c81..6e984315953c4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlDWTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The Azure SQL Data Warehouse dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureSqlDWTableDataset.class, visible = true) @JsonTypeName("AzureSqlDWTable") @Fluent public final class AzureSqlDWTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureSqlDWTable"; + /* * Azure SQL Data Warehouse dataset properties. */ @@ -31,6 +39,16 @@ public final class AzureSqlDWTableDataset extends Dataset { public AzureSqlDWTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure SQL Data Warehouse dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDatabaseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDatabaseLinkedService.java index 19d6c8d0e868d..9c66b598e038b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDatabaseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDatabaseLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlDatabaseLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Microsoft Azure SQL Database linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureSqlDatabaseLinkedService.class, + visible = true) @JsonTypeName("AzureSqlDatabase") @Fluent public final class AzureSqlDatabaseLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureSqlDatabase"; + /* * Azure SQL Database linked service properties. */ @@ -33,6 +45,16 @@ public final class AzureSqlDatabaseLinkedService extends LinkedService { public AzureSqlDatabaseLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure SQL Database linked service properties. * @@ -202,9 +224,9 @@ public AzureSqlDatabaseLinkedService withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -213,9 +235,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the AzureSqlDatabaseLinkedService object itself. @@ -229,8 +251,8 @@ public AzureSqlDatabaseLinkedService withAzureCloudType(Object azureCloudType) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -239,8 +261,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureSqlDatabaseLinkedService object itself. @@ -309,8 +331,9 @@ public AzureSqlDatabaseLinkedService withCredential(CredentialReference credenti public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureSqlDatabaseLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureSqlDatabaseLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMILinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMILinkedService.java index 14c180b783d3a..b4f52d7d8ff81 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMILinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMILinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlMILinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure SQL Managed Instance linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureSqlMILinkedService.class, + visible = true) @JsonTypeName("AzureSqlMI") @Fluent public final class AzureSqlMILinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureSqlMI"; + /* * Azure SQL Managed Instance linked service properties. */ @@ -32,6 +44,16 @@ public final class AzureSqlMILinkedService extends LinkedService { public AzureSqlMILinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure SQL Managed Instance linked service properties. * @@ -201,9 +223,9 @@ public AzureSqlMILinkedService withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -212,9 +234,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the AzureSqlMILinkedService object itself. @@ -228,8 +250,8 @@ public AzureSqlMILinkedService withAzureCloudType(Object azureCloudType) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -238,8 +260,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureSqlMILinkedService object itself. @@ -307,8 +329,9 @@ public AzureSqlMILinkedService withCredential(CredentialReference credential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureSqlMILinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureSqlMILinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMITableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMITableDataset.java index 17314e3841ee0..2a10b4be14db5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMITableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMITableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlMITableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The Azure SQL Managed Instance dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureSqlMITableDataset.class, visible = true) @JsonTypeName("AzureSqlMITable") @Fluent public final class AzureSqlMITableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureSqlMITable"; + /* * Azure SQL Managed Instance dataset properties. */ @@ -31,6 +39,16 @@ public final class AzureSqlMITableDataset extends Dataset { public AzureSqlMITableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure SQL Managed Instance dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSink.java index 3a0941a08e724..d2d45105ae7bc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure SQL sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureSqlSink.class, visible = true) @JsonTypeName("AzureSqlSink") @Fluent public final class AzureSqlSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureSqlSink"; + /* * SQL writer stored procedure name. Type: string (or Expression with resultType string). */ @@ -47,8 +55,7 @@ public final class AzureSqlSink extends CopySink { private Object storedProcedureTableTypeParameterName; /* - * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string - * (or Expression with resultType string). + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tableOption") private Object tableOption; @@ -60,8 +67,7 @@ public final class AzureSqlSink extends CopySink { private Object sqlWriterUseTableLock; /* - * Write behavior when copying data into Azure SQL. Type: SqlWriteBehaviorEnum (or Expression with resultType - * SqlWriteBehaviorEnum) + * Write behavior when copying data into Azure SQL. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum) */ @JsonProperty(value = "writeBehavior") private Object writeBehavior; @@ -79,8 +85,18 @@ public AzureSqlSink() { } /** - * Get the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression - * with resultType string). + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression with + * resultType string). * * @return the sqlWriterStoredProcedureName value. */ @@ -89,8 +105,8 @@ public Object sqlWriterStoredProcedureName() { } /** - * Set the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression - * with resultType string). + * Set the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression with + * resultType string). * * @param sqlWriterStoredProcedureName the sqlWriterStoredProcedureName value to set. * @return the AzureSqlSink object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSource.java index ef36d8e3483e2..57ee3181d42f6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure SQL source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureSqlSource.class, visible = true) @JsonTypeName("AzureSqlSource") @Fluent public final class AzureSqlSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureSqlSource"; + /* * SQL reader query. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class AzureSqlSource extends TabularSource { private Object sqlReaderQuery; /* - * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. - * Type: string (or Expression with resultType string). + * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; @@ -36,9 +43,7 @@ public final class AzureSqlSource extends TabularSource { private Object storedProcedureParameters; /* - * Specifies the transaction locking behavior for the SQL source. Allowed values: - * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: - * string (or Expression with resultType string). + * Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). */ @JsonProperty(value = "isolationLevel") private Object isolationLevel; @@ -50,8 +55,7 @@ public final class AzureSqlSource extends TabularSource { private Object produceAdditionalTypes; /* - * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", - * "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). + * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -68,6 +72,16 @@ public final class AzureSqlSource extends TabularSource { public AzureSqlSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the sqlReaderQuery property: SQL reader query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlTableDataset.java index 559d99b20aa04..e391fef98d1c2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The Azure SQL Server database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureSqlTableDataset.class, visible = true) @JsonTypeName("AzureSqlTable") @Fluent public final class AzureSqlTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureSqlTable"; + /* * Azure SQL dataset properties. */ @@ -31,6 +39,16 @@ public final class AzureSqlTableDataset extends Dataset { public AzureSqlTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure SQL dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureStorageLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureStorageLinkedService.java index ec27bfe647737..cebec31513999 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureStorageLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureStorageLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureStorageLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * The storage account linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureStorageLinkedService.class, + visible = true) @JsonTypeName("AzureStorage") @Fluent public final class AzureStorageLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureStorage"; + /* * Azure Storage linked service properties. */ @@ -32,6 +44,16 @@ public final class AzureStorageLinkedService extends LinkedService { public AzureStorageLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Storage linked service properties. * @@ -174,8 +196,8 @@ public AzureStorageLinkedService withSasToken(AzureKeyVaultSecretReference sasTo } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -184,8 +206,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureStorageLinkedService object itself. @@ -207,8 +229,9 @@ public AzureStorageLinkedService withEncryptedCredential(String encryptedCredent public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureStorageLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureStorageLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSynapseArtifactsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSynapseArtifactsLinkedService.java index efe219e69842f..762faaa8604ad 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSynapseArtifactsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSynapseArtifactsLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureSynapseArtifactsLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Azure Synapse Analytics (Artifacts) linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureSynapseArtifactsLinkedService.class, + visible = true) @JsonTypeName("AzureSynapseArtifacts") @Fluent public final class AzureSynapseArtifactsLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureSynapseArtifacts"; + /* * Azure Synapse Analytics (Artifacts) linked service properties. */ @@ -33,6 +45,16 @@ public final class AzureSynapseArtifactsLinkedService extends LinkedService { public AzureSynapseArtifactsLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Synapse Analytics (Artifacts) linked service properties. * @@ -164,8 +186,9 @@ public AzureSynapseArtifactsLinkedService withWorkspaceResourceId(Object workspa public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureSynapseArtifactsLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureSynapseArtifactsLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableDataset.java index 99a40eda21dd9..9a4775f77e439 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * The Azure Table storage dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureTableDataset.class, visible = true) @JsonTypeName("AzureTable") @Fluent public final class AzureTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureTable"; + /* * Azure Table dataset properties. */ @@ -32,6 +40,16 @@ public final class AzureTableDataset extends Dataset { public AzureTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Table dataset properties. * @@ -138,8 +156,9 @@ public AzureTableDataset withTableName(Object tableName) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureTableDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureTableDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSink.java index 67d26d56e0b0a..26b64ea16a629 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure Table sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureTableSink.class, visible = true) @JsonTypeName("AzureTableSink") @Fluent public final class AzureTableSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureTableSink"; + /* * Azure Table default partition key value. Type: string (or Expression with resultType string). */ @@ -46,6 +54,16 @@ public final class AzureTableSink extends CopySink { public AzureTableSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the azureTableDefaultPartitionKeyValue property: Azure Table default partition key value. Type: string (or * Expression with resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSource.java index d5f2f54e46214..b09b39169365f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure Table source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureTableSource.class, visible = true) @JsonTypeName("AzureTableSource") @Fluent public final class AzureTableSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureTableSource"; + /* * Azure Table source query. Type: string (or Expression with resultType string). */ @@ -34,6 +42,16 @@ public final class AzureTableSource extends TabularSource { public AzureTableSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the azureTableSourceQuery property: Azure Table source query. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableStorageLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableStorageLinkedService.java index 55b592bcf0529..e38e2a8995e3b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableStorageLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableStorageLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.AzureStorageLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * The azure table storage linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = AzureTableStorageLinkedService.class, + visible = true) @JsonTypeName("AzureTableStorage") @Fluent public final class AzureTableStorageLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "AzureTableStorage"; + /* * Azure Table Storage linked service properties. */ @@ -32,6 +44,16 @@ public final class AzureTableStorageLinkedService extends LinkedService { public AzureTableStorageLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Azure Table Storage linked service properties. * @@ -174,8 +196,8 @@ public AzureTableStorageLinkedService withSasToken(AzureKeyVaultSecretReference } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -184,8 +206,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the AzureTableStorageLinkedService object itself. @@ -207,8 +229,9 @@ public AzureTableStorageLinkedService withEncryptedCredential(String encryptedCr public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model AzureTableStorageLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureTableStorageLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolParametrizationReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolParametrizationReference.java index 62bd7f6cffa3d..f280dca1e8dbd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolParametrizationReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolParametrizationReference.java @@ -80,12 +80,14 @@ public BigDataPoolParametrizationReference withReferenceName(Object referenceNam */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property type in model BigDataPoolParametrizationReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property type in model BigDataPoolParametrizationReference")); } if (referenceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property referenceName in model BigDataPoolParametrizationReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property referenceName in model BigDataPoolParametrizationReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryDataset.java index 68be5a1138642..2b01d5e2c5880 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.BinaryDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Binary dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BinaryDataset.class, visible = true) @JsonTypeName("Binary") @Fluent public final class BinaryDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Binary"; + /* * Binary dataset properties. */ @@ -31,6 +39,16 @@ public final class BinaryDataset extends Dataset { public BinaryDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Binary dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryReadSettings.java index a846e7e69a95a..c95d5798e90d4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryReadSettings.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Binary read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BinaryReadSettings.class, visible = true) @JsonTypeName("BinaryReadSettings") @Fluent public final class BinaryReadSettings extends FormatReadSettings { + /* + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "BinaryReadSettings"; + /* * Compression settings. */ @@ -28,6 +36,16 @@ public final class BinaryReadSettings extends FormatReadSettings { public BinaryReadSettings() { } + /** + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the compressionProperties property: Compression settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySink.java index 7ee5f787743da..f3a78d49b888a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Binary sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BinarySink.class, visible = true) @JsonTypeName("BinarySink") @Fluent public final class BinarySink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "BinarySink"; + /* * Binary store settings. */ @@ -28,6 +36,16 @@ public final class BinarySink extends CopySink { public BinarySink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: Binary store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySource.java index 0b0c3e05bf954..7b0a5b075698b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Binary source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BinarySource.class, visible = true) @JsonTypeName("BinarySource") @Fluent public final class BinarySource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "BinarySource"; + /* * Binary store settings. */ @@ -34,6 +42,16 @@ public final class BinarySource extends CopySource { public BinarySource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: Binary store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobEventsTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobEventsTrigger.java index acdd2882752be..8915d64259993 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobEventsTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobEventsTrigger.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.BlobEventsTriggerTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Trigger that runs every time a Blob event occurs. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BlobEventsTrigger.class, visible = true) @JsonTypeName("BlobEventsTrigger") @Fluent public final class BlobEventsTrigger extends MultiplePipelineTrigger { + /* + * Trigger type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "BlobEventsTrigger"; + /* * Blob Events Trigger properties. */ @@ -31,6 +39,16 @@ public final class BlobEventsTrigger extends MultiplePipelineTrigger { public BlobEventsTrigger() { } + /** + * Get the type property: Trigger type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Blob Events Trigger properties. * @@ -69,8 +87,8 @@ public BlobEventsTrigger withAnnotations(List annotations) { /** * Get the blobPathBeginsWith property: The blob path must begin with the pattern provided for trigger to fire. For - * example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the - * records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + * example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records + * container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. * * @return the blobPathBeginsWith value. */ @@ -80,8 +98,8 @@ public String blobPathBeginsWith() { /** * Set the blobPathBeginsWith property: The blob path must begin with the pattern provided for trigger to fire. For - * example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the - * records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + * example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records + * container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. * * @param blobPathBeginsWith the blobPathBeginsWith value to set. * @return the BlobEventsTrigger object itself. @@ -96,8 +114,8 @@ public BlobEventsTrigger withBlobPathBeginsWith(String blobPathBeginsWith) { /** * Get the blobPathEndsWith property: The blob path must end with the pattern provided for trigger to fire. For - * example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least - * one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + * example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one + * of these must be provided: blobPathBeginsWith, blobPathEndsWith. * * @return the blobPathEndsWith value. */ @@ -107,8 +125,8 @@ public String blobPathEndsWith() { /** * Set the blobPathEndsWith property: The blob path must end with the pattern provided for trigger to fire. For - * example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least - * one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + * example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one + * of these must be provided: blobPathBeginsWith, blobPathEndsWith. * * @param blobPathEndsWith the blobPathEndsWith value to set. * @return the BlobEventsTrigger object itself. @@ -199,8 +217,9 @@ public BlobEventsTrigger withScope(String scope) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model BlobEventsTrigger")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model BlobEventsTrigger")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSink.java index 4cffe711831ac..730681a51972c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSink.java @@ -6,6 +6,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -13,10 +14,17 @@ /** * A copy activity Azure Blob sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BlobSink.class, visible = true) @JsonTypeName("BlobSink") @Fluent public final class BlobSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "BlobSink"; + /* * Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). */ @@ -42,8 +50,7 @@ public final class BlobSink extends CopySink { private Object copyBehavior; /* - * Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType - * array of objects). + * Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). */ @JsonProperty(value = "metadata") private List metadata; @@ -54,6 +61,16 @@ public final class BlobSink extends CopySink { public BlobSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the blobWriterOverwriteFiles property: Blob writer overwrite files. Type: boolean (or Expression with * resultType boolean). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSource.java index 891acbf85efbc..69e3f6c433dd9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure Blob source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BlobSource.class, visible = true) @JsonTypeName("BlobSource") @Fluent public final class BlobSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "BlobSource"; + /* * Treat empty as null. Type: boolean (or Expression with resultType boolean). */ @@ -29,8 +37,7 @@ public final class BlobSource extends CopySource { private Object skipHeaderLineCount; /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -41,6 +48,16 @@ public final class BlobSource extends CopySource { public BlobSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the treatEmptyAsNull property: Treat empty as null. Type: boolean (or Expression with resultType boolean). * @@ -62,8 +79,8 @@ public BlobSource withTreatEmptyAsNull(Object treatEmptyAsNull) { } /** - * Get the skipHeaderLineCount property: Number of header lines to skip from each blob. Type: integer (or - * Expression with resultType integer). + * Get the skipHeaderLineCount property: Number of header lines to skip from each blob. Type: integer (or Expression + * with resultType integer). * * @return the skipHeaderLineCount value. */ @@ -72,8 +89,8 @@ public Object skipHeaderLineCount() { } /** - * Set the skipHeaderLineCount property: Number of header lines to skip from each blob. Type: integer (or - * Expression with resultType integer). + * Set the skipHeaderLineCount property: Number of header lines to skip from each blob. Type: integer (or Expression + * with resultType integer). * * @param skipHeaderLineCount the skipHeaderLineCount value to set. * @return the BlobSource object itself. @@ -84,8 +101,8 @@ public BlobSource withSkipHeaderLineCount(Object skipHeaderLineCount) { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -94,8 +111,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the BlobSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobTrigger.java index f2e48b9495e86..7ce43eeb279fa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobTrigger.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.BlobTriggerTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Trigger that runs every time the selected Blob container changes. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BlobTrigger.class, visible = true) @JsonTypeName("BlobTrigger") @Fluent public final class BlobTrigger extends MultiplePipelineTrigger { + /* + * Trigger type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "BlobTrigger"; + /* * Blob Trigger properties. */ @@ -31,6 +39,16 @@ public final class BlobTrigger extends MultiplePipelineTrigger { public BlobTrigger() { } + /** + * Get the type property: Trigger type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Blob Trigger properties. * @@ -145,8 +163,9 @@ public BlobTrigger withLinkedService(LinkedServiceReference linkedService) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model BlobTrigger")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property innerTypeProperties in model BlobTrigger")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraLinkedService.java index ebbb84f0b6240..d53990dfa1460 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.CassandraLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for Cassandra data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CassandraLinkedService.class, visible = true) @JsonTypeName("Cassandra") @Fluent public final class CassandraLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Cassandra"; + /* * Cassandra linked service properties. */ @@ -32,6 +40,16 @@ public final class CassandraLinkedService extends LinkedService { public CassandraLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Cassandra linked service properties. * @@ -195,8 +213,8 @@ public CassandraLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -205,8 +223,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the CassandraLinkedService object itself. @@ -228,8 +246,9 @@ public CassandraLinkedService withEncryptedCredential(String encryptedCredential public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model CassandraLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model CassandraLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraSource.java index 5fe7c1d359c64..1a5678221f19a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraSource.java @@ -6,28 +6,32 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for a Cassandra database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CassandraSource.class, visible = true) @JsonTypeName("CassandraSource") @Fluent public final class CassandraSource extends TabularSource { /* - * Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or - * Expression with resultType string). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CassandraSource"; + + /* + * Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). */ @JsonProperty(value = "query") private Object query; /* - * The consistency level specifies how many Cassandra servers must respond to a read request before returning data - * to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the - * read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - * case-insensitive. + * The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. */ @JsonProperty(value = "consistencyLevel") private CassandraSourceReadConsistencyLevels consistencyLevel; @@ -38,6 +42,16 @@ public final class CassandraSource extends TabularSource { public CassandraSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) * command. Type: string (or Expression with resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraTableDataset.java index 6f90a17737fe8..9557011e2df8e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.CassandraTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The Cassandra database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CassandraTableDataset.class, visible = true) @JsonTypeName("CassandraTable") @Fluent public final class CassandraTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CassandraTable"; + /* * Cassandra dataset properties. */ @@ -31,6 +39,16 @@ public final class CassandraTableDataset extends Dataset { public CassandraTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Cassandra dataset properties. * @@ -104,8 +122,8 @@ public CassandraTableDataset withFolder(DatasetFolder folder) { } /** - * Get the tableName property: The table name of the Cassandra database. Type: string (or Expression with - * resultType string). + * Get the tableName property: The table name of the Cassandra database. Type: string (or Expression with resultType + * string). * * @return the tableName value. */ @@ -114,8 +132,8 @@ public Object tableName() { } /** - * Set the tableName property: The table name of the Cassandra database. Type: string (or Expression with - * resultType string). + * Set the tableName property: The table name of the Cassandra database. Type: string (or Expression with resultType + * string). * * @param tableName the tableName value to set. * @return the CassandraTableDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChainingTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChainingTrigger.java index c86238b7ba7f4..ccc70960c81ca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChainingTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChainingTrigger.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ChainingTriggerTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -18,10 +19,17 @@ * runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all * upstream pipeline runs. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ChainingTrigger.class, visible = true) @JsonTypeName("ChainingTrigger") @Fluent public final class ChainingTrigger extends Trigger { + /* + * Trigger type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ChainingTrigger"; + /* * Pipeline for which runs are created when all upstream pipelines complete successfully. */ @@ -41,8 +49,17 @@ public ChainingTrigger() { } /** - * Get the pipeline property: Pipeline for which runs are created when all upstream pipelines complete - * successfully. + * Get the type property: Trigger type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the pipeline property: Pipeline for which runs are created when all upstream pipelines complete successfully. * * @return the pipeline value. */ @@ -51,8 +68,7 @@ public TriggerPipelineReference pipeline() { } /** - * Set the pipeline property: Pipeline for which runs are created when all upstream pipelines complete - * successfully. + * Set the pipeline property: Pipeline for which runs are created when all upstream pipelines complete successfully. * * @param pipeline the pipeline value to set. * @return the ChainingTrigger object itself. @@ -144,14 +160,15 @@ public ChainingTrigger withRunDimension(String runDimension) { public void validate() { super.validate(); if (pipeline() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property pipeline in model ChainingTrigger")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property pipeline in model ChainingTrigger")); } else { pipeline().validate(); } if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model ChainingTrigger")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ChainingTrigger")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChangeDataCaptureListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChangeDataCaptureListResponse.java index 708375bfe9511..199ed7b0b6515 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChangeDataCaptureListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChangeDataCaptureListResponse.java @@ -80,8 +80,9 @@ public ChangeDataCaptureListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model ChangeDataCaptureListResponse")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property value in model ChangeDataCaptureListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CmdkeySetup.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CmdkeySetup.java index b9e961ab5a107..5a0965381e5b0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CmdkeySetup.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CmdkeySetup.java @@ -8,16 +8,24 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.CmdkeySetupTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The custom setup of running cmdkey commands. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CmdkeySetup.class, visible = true) @JsonTypeName("CmdkeySetup") @Fluent public final class CmdkeySetup extends CustomSetupBase { + /* + * The type of custom setup. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CmdkeySetup"; + /* * Cmdkey command custom setup type properties. */ @@ -30,6 +38,16 @@ public final class CmdkeySetup extends CustomSetupBase { public CmdkeySetup() { } + /** + * Get the type property: The type of custom setup. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Cmdkey command custom setup type properties. * @@ -117,8 +135,9 @@ public CmdkeySetup withPassword(SecretBase password) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model CmdkeySetup")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property innerTypeProperties in model CmdkeySetup")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsEntityDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsEntityDataset.java index 6f479a09027cd..10e405b40a904 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsEntityDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsEntityDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.CommonDataServiceForAppsEntityDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * The Common Data Service for Apps entity dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = CommonDataServiceForAppsEntityDataset.class, + visible = true) @JsonTypeName("CommonDataServiceForAppsEntity") @Fluent public final class CommonDataServiceForAppsEntityDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CommonDataServiceForAppsEntity"; + /* * Common Data Service for Apps entity dataset properties. */ @@ -31,6 +43,16 @@ public final class CommonDataServiceForAppsEntityDataset extends Dataset { public CommonDataServiceForAppsEntityDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Common Data Service for Apps entity dataset properties. * @@ -104,8 +126,7 @@ public CommonDataServiceForAppsEntityDataset withFolder(DatasetFolder folder) { } /** - * Get the entityName property: The logical name of the entity. Type: string (or Expression with resultType - * string). + * Get the entityName property: The logical name of the entity. Type: string (or Expression with resultType string). * * @return the entityName value. */ @@ -114,8 +135,7 @@ public Object entityName() { } /** - * Set the entityName property: The logical name of the entity. Type: string (or Expression with resultType - * string). + * Set the entityName property: The logical name of the entity. Type: string (or Expression with resultType string). * * @param entityName the entityName value to set. * @return the CommonDataServiceForAppsEntityDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsLinkedService.java index b4801ca5fbd04..20c3122cb5bbe 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.CommonDataServiceForAppsLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Common Data Service for Apps linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = CommonDataServiceForAppsLinkedService.class, + visible = true) @JsonTypeName("CommonDataServiceForApps") @Fluent public final class CommonDataServiceForAppsLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CommonDataServiceForApps"; + /* * Common Data Service for Apps linked service properties. */ @@ -33,6 +45,16 @@ public final class CommonDataServiceForAppsLinkedService extends LinkedService { public CommonDataServiceForAppsLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Common Data Service for Apps linked service properties. * @@ -310,9 +332,9 @@ public CommonDataServiceForAppsLinkedService withServicePrincipalId(Object servi } /** - * Get the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Get the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @return the servicePrincipalCredentialType value. */ @@ -321,9 +343,9 @@ public Object servicePrincipalCredentialType() { } /** - * Set the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Set the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @param servicePrincipalCredentialType the servicePrincipalCredentialType value to set. * @return the CommonDataServiceForAppsLinkedService object itself. @@ -367,8 +389,8 @@ public CommonDataServiceForAppsLinkedService withServicePrincipalCredential(Secr } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -377,8 +399,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the CommonDataServiceForAppsLinkedService object itself. @@ -400,8 +422,9 @@ public CommonDataServiceForAppsLinkedService withEncryptedCredential(String encr public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model CommonDataServiceForAppsLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model CommonDataServiceForAppsLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSink.java index 33cf9a41f1b5b..1a3948b492cb6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSink.java @@ -7,16 +7,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Common Data Service for Apps sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = CommonDataServiceForAppsSink.class, + visible = true) @JsonTypeName("CommonDataServiceForAppsSink") @Fluent public final class CommonDataServiceForAppsSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CommonDataServiceForAppsSink"; + /* * The write behavior for the operation. */ @@ -24,15 +36,13 @@ public final class CommonDataServiceForAppsSink extends CopySink { private DynamicsSinkWriteBehavior writeBehavior; /* - * The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. - * Default is false. Type: boolean (or Expression with resultType boolean). + * The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; /* - * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression - * with resultType string). + * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). */ @JsonProperty(value = "alternateKeyName") private Object alternateKeyName; @@ -43,6 +53,16 @@ public final class CommonDataServiceForAppsSink extends CopySink { public CommonDataServiceForAppsSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: The write behavior for the operation. * @@ -170,8 +190,9 @@ public CommonDataServiceForAppsSink withDisableMetricsCollection(Object disableM public void validate() { super.validate(); if (writeBehavior() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property writeBehavior in model CommonDataServiceForAppsSink")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property writeBehavior in model CommonDataServiceForAppsSink")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSource.java index 2800df1847414..0d83286d6563a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSource.java @@ -6,26 +6,36 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Common Data Service for Apps source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = CommonDataServiceForAppsSource.class, + visible = true) @JsonTypeName("CommonDataServiceForAppsSource") @Fluent public final class CommonDataServiceForAppsSource extends CopySource { /* - * FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & - * on-premises). Type: string (or Expression with resultType string). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CommonDataServiceForAppsSource"; + + /* + * FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). */ @JsonProperty(value = "query") private Object query; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -36,6 +46,16 @@ public final class CommonDataServiceForAppsSource extends CopySource { public CommonDataServiceForAppsSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: FetchXML is a proprietary query language that is used in Microsoft Common Data Service * for Apps (online & on-premises). Type: string (or Expression with resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ComponentSetup.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ComponentSetup.java index 431e0963042ef..0c37debc2832b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ComponentSetup.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ComponentSetup.java @@ -8,16 +8,24 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.LicensedComponentSetupTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The custom setup of installing 3rd party components. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ComponentSetup.class, visible = true) @JsonTypeName("ComponentSetup") @Fluent public final class ComponentSetup extends CustomSetupBase { + /* + * The type of custom setup. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ComponentSetup"; + /* * Install 3rd party component type properties. */ @@ -30,6 +38,16 @@ public final class ComponentSetup extends CustomSetupBase { public ComponentSetup() { } + /** + * Get the type property: The type of custom setup. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Install 3rd party component type properties. * @@ -94,8 +112,9 @@ public ComponentSetup withLicenseKey(SecretBase licenseKey) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model ComponentSetup")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ComponentSetup")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CompressionReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CompressionReadSettings.java index b70a36bd436d0..a79ef2703b6a1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CompressionReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CompressionReadSettings.java @@ -8,7 +8,9 @@ import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -19,9 +21,9 @@ */ @JsonTypeInfo( use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, property = "type", - defaultImpl = CompressionReadSettings.class) + defaultImpl = CompressionReadSettings.class, + visible = true) @JsonTypeName("CompressionReadSettings") @JsonSubTypes({ @JsonSubTypes.Type(name = "ZipDeflateReadSettings", value = ZipDeflateReadSettings.class), @@ -29,6 +31,13 @@ @JsonSubTypes.Type(name = "TarGZipReadSettings", value = TarGZipReadSettings.class) }) @Fluent public class CompressionReadSettings { + /* + * The Compression setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Compression read settings. */ @@ -39,6 +48,16 @@ public class CompressionReadSettings { * Creates an instance of CompressionReadSettings class. */ public CompressionReadSettings() { + this.type = "CompressionReadSettings"; + } + + /** + * Get the type property: The Compression setting type. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurLinkedService.java index 7e834d81b5e54..d46ebf04d0b43 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ConcurLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Concur Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ConcurLinkedService.class, visible = true) @JsonTypeName("Concur") @Fluent public final class ConcurLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Concur"; + /* * Concur Service linked service properties. */ @@ -32,6 +40,16 @@ public final class ConcurLinkedService extends LinkedService { public ConcurLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Concur Service linked service properties. * @@ -222,8 +240,8 @@ public ConcurLinkedService withUseHostVerification(Object useHostVerification) { } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -232,8 +250,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the ConcurLinkedService object itself. @@ -247,8 +265,8 @@ public ConcurLinkedService withUsePeerVerification(Object usePeerVerification) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -257,8 +275,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ConcurLinkedService object itself. @@ -280,8 +298,9 @@ public ConcurLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ConcurLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ConcurLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurObjectDataset.java index 05913b8bd8664..2d02aa40d819a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Concur Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ConcurObjectDataset.class, visible = true) @JsonTypeName("ConcurObject") @Fluent public final class ConcurObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ConcurObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class ConcurObjectDataset extends Dataset { public ConcurObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurSource.java index ae91d05a11619..c2b6d0de8c94d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Concur Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ConcurSource.class, visible = true) @JsonTypeName("ConcurSource") @Fluent public final class ConcurSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ConcurSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class ConcurSource extends TabularSource { public ConcurSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ControlActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ControlActivity.java index 835f051e6ad88..325e9f073665d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ControlActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ControlActivity.java @@ -5,7 +5,9 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -13,11 +15,7 @@ /** * Base class for all control activities like IfCondition, ForEach , Until. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = ControlActivity.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ControlActivity.class, visible = true) @JsonTypeName("Container") @JsonSubTypes({ @JsonSubTypes.Type(name = "ExecutePipeline", value = ExecutePipelineActivity.class), @@ -34,12 +32,29 @@ @JsonSubTypes.Type(name = "WebHook", value = WebhookActivity.class) }) @Fluent public class ControlActivity extends Activity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Container"; + /** * Creates an instance of ControlActivity class. */ public ControlActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyActivity.java index 67b222214b2cb..3c02937339969 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.CopyActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Copy activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CopyActivity.class, visible = true) @JsonTypeName("Copy") @Fluent public final class CopyActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Copy"; + /* * Copy activity properties. */ @@ -43,6 +51,16 @@ public final class CopyActivity extends ExecutionActivity { public CopyActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Copy activity properties. * @@ -533,8 +551,9 @@ public CopyActivity withSkipErrorFile(SkipErrorFile skipErrorFile) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model CopyActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model CopyActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySink.java index 8b2feef66b6f3..07cd220cfbb29 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySink.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -18,11 +19,7 @@ /** * A copy activity sink. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = CopySink.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CopySink.class, visible = true) @JsonTypeName("CopySink") @JsonSubTypes({ @JsonSubTypes.Type(name = "DelimitedTextSink", value = DelimitedTextSink.class), @@ -71,6 +68,13 @@ @JsonSubTypes.Type(name = "SalesforceServiceCloudV2Sink", value = SalesforceServiceCloudV2Sink.class) }) @Fluent public class CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. */ @@ -78,8 +82,7 @@ public class CopySink { private Object writeBatchSize; /* - * Write batch timeout. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "writeBatchTimeout") private Object writeBatchTimeout; @@ -91,22 +94,19 @@ public class CopySink { private Object sinkRetryCount; /* - * Sink retry wait. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "sinkRetryWait") private Object sinkRetryWait; /* - * The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType - * integer). + * The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "maxConcurrentConnections") private Object maxConcurrentConnections; /* - * If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType - * boolean). + * If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "disableMetricsCollection") private Object disableMetricsCollection; @@ -121,6 +121,16 @@ public class CopySink { * Creates an instance of CopySink class. */ public CopySink() { + this.type = "CopySink"; + } + + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + public String type() { + return this.type; } /** @@ -210,8 +220,8 @@ public CopySink withSinkRetryWait(Object sinkRetryWait) { } /** - * Get the maxConcurrentConnections property: The maximum concurrent connection count for the sink data store. - * Type: integer (or Expression with resultType integer). + * Get the maxConcurrentConnections property: The maximum concurrent connection count for the sink data store. Type: + * integer (or Expression with resultType integer). * * @return the maxConcurrentConnections value. */ @@ -220,8 +230,8 @@ public Object maxConcurrentConnections() { } /** - * Set the maxConcurrentConnections property: The maximum concurrent connection count for the sink data store. - * Type: integer (or Expression with resultType integer). + * Set the maxConcurrentConnections property: The maximum concurrent connection count for the sink data store. Type: + * integer (or Expression with resultType integer). * * @param maxConcurrentConnections the maxConcurrentConnections value to set. * @return the CopySink object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySource.java index 5dc60066f6f35..e2a044f2ff4e2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySource.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -18,11 +19,7 @@ /** * A copy activity source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = CopySource.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CopySource.class, visible = true) @JsonTypeName("CopySource") @JsonSubTypes({ @JsonSubTypes.Type(name = "AvroSource", value = AvroSource.class), @@ -67,6 +64,13 @@ @JsonSubTypes.Type(name = "SalesforceServiceCloudV2Source", value = SalesforceServiceCloudV2Source.class) }) @Fluent public class CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Source retry count. Type: integer (or Expression with resultType integer). */ @@ -74,22 +78,19 @@ public class CopySource { private Object sourceRetryCount; /* - * Source retry wait. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "sourceRetryWait") private Object sourceRetryWait; /* - * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType - * integer). + * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "maxConcurrentConnections") private Object maxConcurrentConnections; /* - * If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType - * boolean). + * If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "disableMetricsCollection") private Object disableMetricsCollection; @@ -104,6 +105,16 @@ public class CopySource { * Creates an instance of CopySource class. */ public CopySource() { + this.type = "CopySource"; + } + + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyTranslator.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyTranslator.java index b2b1cce51f40f..51caa89702a0b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyTranslator.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyTranslator.java @@ -8,7 +8,9 @@ import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -17,15 +19,18 @@ /** * A copy activity translator. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = CopyTranslator.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CopyTranslator.class, visible = true) @JsonTypeName("CopyTranslator") @JsonSubTypes({ @JsonSubTypes.Type(name = "TabularTranslator", value = TabularTranslator.class) }) @Fluent public class CopyTranslator { + /* + * Copy translator type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * A copy activity translator. */ @@ -36,6 +41,16 @@ public class CopyTranslator { * Creates an instance of CopyTranslator class. */ public CopyTranslator() { + this.type = "CopyTranslator"; + } + + /** + * Get the type property: Copy translator type. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbLinkedService.java index 29ba8014b7434..115c5d1d15a4c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.CosmosDbLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Microsoft Azure Cosmos Database (CosmosDB) linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CosmosDbLinkedService.class, visible = true) @JsonTypeName("CosmosDb") @Fluent public final class CosmosDbLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CosmosDb"; + /* * CosmosDB linked service properties. */ @@ -32,6 +40,16 @@ public final class CosmosDbLinkedService extends LinkedService { public CosmosDbLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: CosmosDB linked service properties. * @@ -201,9 +219,8 @@ public CosmosDbLinkedService withServicePrincipalId(Object servicePrincipalId) { } /** - * Get the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string. + * Get the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string. * * @return the servicePrincipalCredentialType value. */ @@ -212,9 +229,8 @@ public Object servicePrincipalCredentialType() { } /** - * Set the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string. + * Set the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string. * * @param servicePrincipalCredentialType the servicePrincipalCredentialType value to set. * @return the CosmosDbLinkedService object itself. @@ -282,9 +298,9 @@ public CosmosDbLinkedService withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -293,9 +309,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the CosmosDbLinkedService object itself. @@ -332,8 +348,8 @@ public CosmosDbLinkedService withConnectionMode(CosmosDbConnectionMode connectio } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -342,8 +358,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the CosmosDbLinkedService object itself. @@ -388,8 +404,9 @@ public CosmosDbLinkedService withCredential(CredentialReference credential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model CosmosDbLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model CosmosDbLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiCollectionDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiCollectionDataset.java index 7b822b8f0fc70..d8103fcb4c501 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiCollectionDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiCollectionDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.CosmosDbMongoDbApiCollectionDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * The CosmosDB (MongoDB API) database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = CosmosDbMongoDbApiCollectionDataset.class, + visible = true) @JsonTypeName("CosmosDbMongoDbApiCollection") @Fluent public final class CosmosDbMongoDbApiCollectionDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CosmosDbMongoDbApiCollection"; + /* * CosmosDB (MongoDB API) database dataset properties. */ @@ -33,6 +45,16 @@ public final class CosmosDbMongoDbApiCollectionDataset extends Dataset { public CosmosDbMongoDbApiCollectionDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: CosmosDB (MongoDB API) database dataset properties. * @@ -139,8 +161,9 @@ public CosmosDbMongoDbApiCollectionDataset withCollection(Object collection) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model CosmosDbMongoDbApiCollectionDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model CosmosDbMongoDbApiCollectionDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiLinkedService.java index 535c47dacb488..f2f1fd953192e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.CosmosDbMongoDbApiLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for CosmosDB (MongoDB API) data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = CosmosDbMongoDbApiLinkedService.class, + visible = true) @JsonTypeName("CosmosDbMongoDbApi") @Fluent public final class CosmosDbMongoDbApiLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CosmosDbMongoDbApi"; + /* * CosmosDB (MongoDB API) linked service properties. */ @@ -33,6 +45,16 @@ public final class CosmosDbMongoDbApiLinkedService extends LinkedService { public CosmosDbMongoDbApiLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: CosmosDB (MongoDB API) linked service properties. * @@ -162,8 +184,9 @@ public CosmosDbMongoDbApiLinkedService withDatabase(Object database) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model CosmosDbMongoDbApiLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model CosmosDbMongoDbApiLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSink.java index 7633abe0b96e3..e3073dc0d62b9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSink.java @@ -6,20 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity sink for a CosmosDB (MongoDB API) database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CosmosDbMongoDbApiSink.class, visible = true) @JsonTypeName("CosmosDbMongoDbApiSink") @Fluent public final class CosmosDbMongoDbApiSink extends CopySink { /* - * Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). - * The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression - * with resultType string). + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CosmosDbMongoDbApiSink"; + + /* + * Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). */ @JsonProperty(value = "writeBehavior") private Object writeBehavior; @@ -30,6 +36,16 @@ public final class CosmosDbMongoDbApiSink extends CopySink { public CosmosDbMongoDbApiSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: Specifies whether the document with same key to be overwritten (upsert) rather * than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSource.java index 6bfd427f675ca..9f0af4fd5159c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSource.java @@ -6,19 +6,30 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for a CosmosDB (MongoDB API) database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = CosmosDbMongoDbApiSource.class, + visible = true) @JsonTypeName("CosmosDbMongoDbApiSource") @Fluent public final class CosmosDbMongoDbApiSource extends CopySource { /* - * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter - * or pass an empty document ({}). Type: string (or Expression with resultType string). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CosmosDbMongoDbApiSource"; + + /* + * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). */ @JsonProperty(value = "filter") private Object filter; @@ -30,23 +41,19 @@ public final class CosmosDbMongoDbApiSource extends CopySource { private MongoDbCursorMethodsProperties cursorMethods; /* - * Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, - * modifying the batch size will not affect the user or the application. This property's main purpose is to avoid - * hit the limitation of response size. Type: integer (or Expression with resultType integer). + * Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "batchSize") private Object batchSize; /* - * Query timeout. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -57,6 +64,16 @@ public final class CosmosDbMongoDbApiSource extends CopySource { public CosmosDbMongoDbApiSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the filter property: Specifies selection filter using query operators. To return all documents in a * collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiCollectionDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiCollectionDataset.java index a51e15c21b9d3..fdfc1a9fa8d6d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiCollectionDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiCollectionDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.CosmosDbSqlApiCollectionDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Microsoft Azure CosmosDB (SQL API) Collection dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = CosmosDbSqlApiCollectionDataset.class, + visible = true) @JsonTypeName("CosmosDbSqlApiCollection") @Fluent public final class CosmosDbSqlApiCollectionDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CosmosDbSqlApiCollection"; + /* * CosmosDB (SQL API) Collection dataset properties. */ @@ -33,6 +45,16 @@ public final class CosmosDbSqlApiCollectionDataset extends Dataset { public CosmosDbSqlApiCollectionDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: CosmosDB (SQL API) Collection dataset properties. * @@ -139,8 +161,9 @@ public CosmosDbSqlApiCollectionDataset withCollectionName(Object collectionName) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model CosmosDbSqlApiCollectionDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model CosmosDbSqlApiCollectionDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSink.java index 754ee7f6dd846..d8c55b30b19f5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSink.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure CosmosDB (SQL API) Collection sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CosmosDbSqlApiSink.class, visible = true) @JsonTypeName("CosmosDbSqlApiSink") @Fluent public final class CosmosDbSqlApiSink extends CopySink { /* - * Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed - * values: insert and upsert. + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CosmosDbSqlApiSink"; + + /* + * Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. */ @JsonProperty(value = "writeBehavior") private Object writeBehavior; @@ -29,6 +36,16 @@ public final class CosmosDbSqlApiSink extends CopySink { public CosmosDbSqlApiSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with * resultType string). Allowed values: insert and upsert. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSource.java index 60fe42a41da64..2ed76a45df4f2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure CosmosDB (SQL API) Collection source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CosmosDbSqlApiSource.class, visible = true) @JsonTypeName("CosmosDbSqlApiSource") @Fluent public final class CosmosDbSqlApiSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CosmosDbSqlApiSource"; + /* * SQL API query. Type: string (or Expression with resultType string). */ @@ -41,8 +49,7 @@ public final class CosmosDbSqlApiSource extends CopySource { private Object detectDatetime; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -53,6 +60,16 @@ public final class CosmosDbSqlApiSource extends CopySource { public CosmosDbSqlApiSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: SQL API query. Type: string (or Expression with resultType string). * @@ -94,8 +111,8 @@ public CosmosDbSqlApiSource withPageSize(Object pageSize) { } /** - * Get the preferredRegions property: Preferred regions. Type: array of strings (or Expression with resultType - * array of strings). + * Get the preferredRegions property: Preferred regions. Type: array of strings (or Expression with resultType array + * of strings). * * @return the preferredRegions value. */ @@ -104,8 +121,8 @@ public Object preferredRegions() { } /** - * Set the preferredRegions property: Preferred regions. Type: array of strings (or Expression with resultType - * array of strings). + * Set the preferredRegions property: Preferred regions. Type: array of strings (or Expression with resultType array + * of strings). * * @param preferredRegions the preferredRegions value to set. * @return the CosmosDbSqlApiSource object itself. @@ -116,8 +133,8 @@ public CosmosDbSqlApiSource withPreferredRegions(Object preferredRegions) { } /** - * Get the detectDatetime property: Whether detect primitive values as datetime values. Type: boolean (or - * Expression with resultType boolean). + * Get the detectDatetime property: Whether detect primitive values as datetime values. Type: boolean (or Expression + * with resultType boolean). * * @return the detectDatetime value. */ @@ -126,8 +143,8 @@ public Object detectDatetime() { } /** - * Set the detectDatetime property: Whether detect primitive values as datetime values. Type: boolean (or - * Expression with resultType boolean). + * Set the detectDatetime property: Whether detect primitive values as datetime values. Type: boolean (or Expression + * with resultType boolean). * * @param detectDatetime the detectDatetime value to set. * @return the CosmosDbSqlApiSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseLinkedService.java index 0ff477e545c6d..3397a42b5275a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.CouchbaseLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Couchbase server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CouchbaseLinkedService.class, visible = true) @JsonTypeName("Couchbase") @Fluent public final class CouchbaseLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Couchbase"; + /* * Couchbase server linked service properties. */ @@ -32,6 +40,16 @@ public final class CouchbaseLinkedService extends LinkedService { public CouchbaseLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Couchbase server linked service properties. * @@ -126,8 +144,8 @@ public CouchbaseLinkedService withCredString(AzureKeyVaultSecretReference credSt } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -136,8 +154,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the CouchbaseLinkedService object itself. @@ -159,8 +177,9 @@ public CouchbaseLinkedService withEncryptedCredential(String encryptedCredential public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model CouchbaseLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model CouchbaseLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseSource.java index b1a737e9f74c6..5d75fb9b8beec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Couchbase server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CouchbaseSource.class, visible = true) @JsonTypeName("CouchbaseSource") @Fluent public final class CouchbaseSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CouchbaseSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class CouchbaseSource extends TabularSource { public CouchbaseSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseTableDataset.java index 703215a33cfa2..050f93080fd5a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Couchbase server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CouchbaseTableDataset.class, visible = true) @JsonTypeName("CouchbaseTable") @Fluent public final class CouchbaseTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CouchbaseTable"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class CouchbaseTableDataset extends Dataset { public CouchbaseTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CreateDataFlowDebugSessionRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CreateDataFlowDebugSessionRequest.java index 32e8169a05067..b04d8cb8df368 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CreateDataFlowDebugSessionRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CreateDataFlowDebugSessionRequest.java @@ -13,8 +13,7 @@ @Fluent public final class CreateDataFlowDebugSessionRequest { /* - * Compute type of the cluster. The value will be overwritten by the same setting in integration runtime if - * provided. + * Compute type of the cluster. The value will be overwritten by the same setting in integration runtime if provided. */ @JsonProperty(value = "computeType") private String computeType; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Credential.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Credential.java index 75d4633e9f8de..f2824773e79b2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Credential.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Credential.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -20,17 +21,20 @@ * The Azure Data Factory nested object which contains the information and credential which can be used to connect with * related store or compute resource. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = Credential.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Credential.class, visible = true) @JsonTypeName("Credential") @JsonSubTypes({ @JsonSubTypes.Type(name = "ManagedIdentity", value = ManagedIdentityCredential.class), @JsonSubTypes.Type(name = "ServicePrincipal", value = ServicePrincipalCredential.class) }) @Fluent public class Credential { + /* + * Type of credential. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Credential description. */ @@ -44,8 +48,7 @@ public class Credential { private List annotations; /* - * The Azure Data Factory nested object which contains the information and credential which can be used to connect - * with related store or compute resource. + * The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. */ @JsonIgnore private Map additionalProperties; @@ -54,6 +57,16 @@ public class Credential { * Creates an instance of Credential class. */ public Credential() { + this.type = "Credential"; + } + + /** + * Get the type property: Type of credential. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialListResponse.java index 9915730261fb4..5371cf089a063 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialListResponse.java @@ -6,7 +6,7 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityCredentialResourceInner; +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; @@ -19,7 +19,7 @@ public final class CredentialListResponse { * List of credentials. */ @JsonProperty(value = "value", required = true) - private List value; + private List value; /* * The link to the next page of results, if any remaining results exist. @@ -38,7 +38,7 @@ public CredentialListResponse() { * * @return the value value. */ - public List value() { + public List value() { return this.value; } @@ -48,7 +48,7 @@ public List value() { * @param value the value value to set. * @return the CredentialListResponse object itself. */ - public CredentialListResponse withValue(List value) { + public CredentialListResponse withValue(List value) { this.value = value; return this; } @@ -80,8 +80,8 @@ public CredentialListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model CredentialListResponse")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property value in model CredentialListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialOperations.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialOperations.java index 4b681a7685cf8..ce6805a7a6c10 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialOperations.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialOperations.java @@ -7,6 +7,7 @@ import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; /** * Resource collection API of CredentialOperations. @@ -22,7 +23,7 @@ public interface CredentialOperations { * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of credential resources as paginated response with {@link PagedIterable}. */ - PagedIterable listByFactory(String resourceGroupName, String factoryName); + PagedIterable listByFactory(String resourceGroupName, String factoryName); /** * List credentials. @@ -35,56 +36,60 @@ public interface CredentialOperations { * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of credential resources as paginated response with {@link PagedIterable}. */ - PagedIterable listByFactory(String resourceGroupName, String factoryName, - Context context); + PagedIterable listByFactory(String resourceGroupName, String factoryName, Context context); /** - * Gets a credential. + * Creates or updates a credential. * * @param resourceGroupName The resource group name. * @param factoryName The factory name. * @param credentialName Credential name. - * @param ifNoneMatch ETag of the credential entity. Should only be specified for get. If the ETag matches the - * existing entity tag, or if * was provided, then no content will be returned. + * @param credential Credential resource definition. + * @param ifMatch ETag of the credential entity. Should only be specified for update, for which it should match + * existing entity or can be * for unconditional update. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. - * @return a credential along with {@link Response}. + * @return credential resource type along with {@link Response}. */ - Response getWithResponse(String resourceGroupName, String factoryName, - String credentialName, String ifNoneMatch, Context context); + Response createOrUpdateWithResponse(String resourceGroupName, String factoryName, + String credentialName, CredentialResourceInner credential, String ifMatch, Context context); /** - * Gets a credential. + * Creates or updates a credential. * * @param resourceGroupName The resource group name. * @param factoryName The factory name. * @param credentialName Credential name. + * @param credential Credential resource definition. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. - * @return a credential. + * @return credential resource type. */ - ManagedIdentityCredentialResource get(String resourceGroupName, String factoryName, String credentialName); + CredentialResource createOrUpdate(String resourceGroupName, String factoryName, String credentialName, + CredentialResourceInner credential); /** - * Deletes a credential. + * Gets a credential. * * @param resourceGroupName The resource group name. * @param factoryName The factory name. * @param credentialName Credential name. + * @param ifNoneMatch ETag of the credential entity. Should only be specified for get. If the ETag matches the + * existing entity tag, or if * was provided, then no content will be returned. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. - * @return the {@link Response}. + * @return a credential along with {@link Response}. */ - Response deleteWithResponse(String resourceGroupName, String factoryName, String credentialName, - Context context); + Response getWithResponse(String resourceGroupName, String factoryName, String credentialName, + String ifNoneMatch, Context context); /** - * Deletes a credential. + * Gets a credential. * * @param resourceGroupName The resource group name. * @param factoryName The factory name. @@ -92,61 +97,34 @@ Response deleteWithResponse(String resourceGroupName, String factoryName, * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return a credential. */ - void delete(String resourceGroupName, String factoryName, String credentialName); - - /** - * Gets a credential. - * - * @param id the resource ID. - * @throws IllegalArgumentException thrown if parameters fail the validation. - * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. - * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. - * @return a credential along with {@link Response}. - */ - ManagedIdentityCredentialResource getById(String id); - - /** - * Gets a credential. - * - * @param id the resource ID. - * @param ifNoneMatch ETag of the credential entity. Should only be specified for get. If the ETag matches the - * existing entity tag, or if * was provided, then no content will be returned. - * @param context The context to associate with this operation. - * @throws IllegalArgumentException thrown if parameters fail the validation. - * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. - * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. - * @return a credential along with {@link Response}. - */ - Response getByIdWithResponse(String id, String ifNoneMatch, Context context); + CredentialResource get(String resourceGroupName, String factoryName, String credentialName); /** * Deletes a credential. * - * @param id the resource ID. + * @param resourceGroupName The resource group name. + * @param factoryName The factory name. + * @param credentialName Credential name. + * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link Response}. */ - void deleteById(String id); + Response deleteWithResponse(String resourceGroupName, String factoryName, String credentialName, + Context context); /** * Deletes a credential. * - * @param id the resource ID. - * @param context The context to associate with this operation. + * @param resourceGroupName The resource group name. + * @param factoryName The factory name. + * @param credentialName Credential name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. - * @return the {@link Response}. */ - Response deleteByIdWithResponse(String id, Context context); - - /** - * Begins definition for a new ManagedIdentityCredentialResource resource. - * - * @param name resource name. - * @return the first stage of the new ManagedIdentityCredentialResource definition. - */ - ManagedIdentityCredentialResource.DefinitionStages.Blank define(String name); + void delete(String resourceGroupName, String factoryName, String credentialName); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialReference.java index 571ad2e32783b..d22b9e799e9d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialReference.java @@ -118,12 +118,13 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model CredentialReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property type in model CredentialReference")); } if (referenceName() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property referenceName in model CredentialReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property referenceName in model CredentialReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialResource.java new file mode 100644 index 0000000000000..ea90cc5b1b5d2 --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialResource.java @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; + +/** + * An immutable client-side representation of CredentialResource. + */ +public interface CredentialResource { + /** + * Gets the id property: Fully qualified resource Id for the resource. + * + * @return the id value. + */ + String id(); + + /** + * Gets the properties property: Properties of credentials. + * + * @return the properties value. + */ + Credential properties(); + + /** + * Gets the name property: The resource name. + * + * @return the name value. + */ + String name(); + + /** + * Gets the type property: The resource type. + * + * @return the type value. + */ + String type(); + + /** + * Gets the etag property: Etag identifies change in the resource. + * + * @return the etag value. + */ + String etag(); + + /** + * Gets the inner com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner object. + * + * @return the inner object. + */ + CredentialResourceInner innerModel(); +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomActivity.java index 7b94f47c3560b..83282aead30de 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.CustomActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Custom activity type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CustomActivity.class, visible = true) @JsonTypeName("Custom") @Fluent public final class CustomActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Custom"; + /* * Custom activity properties. */ @@ -32,6 +40,16 @@ public final class CustomActivity extends ExecutionActivity { public CustomActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Custom activity properties. * @@ -291,8 +309,9 @@ public CustomActivity withAutoUserSpecification(Object autoUserSpecification) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model CustomActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model CustomActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataSourceLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataSourceLinkedService.java index 2c9e1a1112c4d..2cfe9ea593516 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataSourceLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataSourceLinkedService.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Custom linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = CustomDataSourceLinkedService.class, + visible = true) @JsonTypeName("CustomDataSource") @Fluent public final class CustomDataSourceLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CustomDataSource"; + /* * Custom linked service properties. */ @@ -31,6 +43,16 @@ public final class CustomDataSourceLinkedService extends LinkedService { public CustomDataSourceLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the typeProperties property: Custom linked service properties. * @@ -96,8 +118,9 @@ public CustomDataSourceLinkedService withAnnotations(List annotations) { public void validate() { super.validate(); if (typeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property typeProperties in model CustomDataSourceLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property typeProperties in model CustomDataSourceLinkedService")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataset.java index 2ae946536d1b7..4128e7775c57e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataset.java @@ -6,6 +6,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -14,10 +15,17 @@ /** * The custom dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CustomDataset.class, visible = true) @JsonTypeName("CustomDataset") @Fluent public final class CustomDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CustomDataset"; + /* * Custom dataset properties. */ @@ -30,6 +38,16 @@ public final class CustomDataset extends Dataset { public CustomDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the typeProperties property: Custom dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomEventsTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomEventsTrigger.java index c8a4eb9c6bfd1..788f397dd1470 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomEventsTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomEventsTrigger.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.CustomEventsTriggerTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Trigger that runs every time a custom event is received. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CustomEventsTrigger.class, visible = true) @JsonTypeName("CustomEventsTrigger") @Fluent public final class CustomEventsTrigger extends MultiplePipelineTrigger { + /* + * Trigger type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "CustomEventsTrigger"; + /* * Custom Events Trigger properties. */ @@ -31,6 +39,16 @@ public final class CustomEventsTrigger extends MultiplePipelineTrigger { public CustomEventsTrigger() { } + /** + * Get the type property: Trigger type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Custom Events Trigger properties. * @@ -172,8 +190,9 @@ public CustomEventsTrigger withScope(String scope) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model CustomEventsTrigger")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model CustomEventsTrigger")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomSetupBase.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomSetupBase.java index e8e0c270901c3..565a6fed84d4e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomSetupBase.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomSetupBase.java @@ -5,18 +5,16 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The base definition of the custom setup. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = CustomSetupBase.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CustomSetupBase.class, visible = true) @JsonTypeName("CustomSetupBase") @JsonSubTypes({ @JsonSubTypes.Type(name = "CmdkeySetup", value = CmdkeySetup.class), @@ -25,10 +23,27 @@ @JsonSubTypes.Type(name = "AzPowerShellSetup", value = AzPowerShellSetup.class) }) @Immutable public class CustomSetupBase { + /* + * The type of custom setup. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /** * Creates an instance of CustomSetupBase class. */ public CustomSetupBase() { + this.type = "CustomSetupBase"; + } + + /** + * Get the type property: The type of custom setup. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DWCopyCommandSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DWCopyCommandSettings.java index 5df1b8d4b2c22..aabec09545fb0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DWCopyCommandSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DWCopyCommandSettings.java @@ -16,17 +16,13 @@ @Fluent public final class DWCopyCommandSettings { /* - * Specifies the default values for each target column in SQL DW. The default values in the property overwrite the - * DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or - * Expression with resultType array of objects). + * Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). */ @JsonProperty(value = "defaultValues") private List defaultValues; /* - * Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string - * type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": - * "'ymd'" } + * Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" } */ @JsonProperty(value = "additionalOptions") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) @@ -39,9 +35,9 @@ public DWCopyCommandSettings() { } /** - * Get the defaultValues property: Specifies the default values for each target column in SQL DW. The default - * values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default - * value. Type: array of objects (or Expression with resultType array of objects). + * Get the defaultValues property: Specifies the default values for each target column in SQL DW. The default values + * in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. + * Type: array of objects (or Expression with resultType array of objects). * * @return the defaultValues value. */ @@ -50,9 +46,9 @@ public List defaultValues() { } /** - * Set the defaultValues property: Specifies the default values for each target column in SQL DW. The default - * values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default - * value. Type: array of objects (or Expression with resultType array of objects). + * Set the defaultValues property: Specifies the default values for each target column in SQL DW. The default values + * in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. + * Type: array of objects (or Expression with resultType array of objects). * * @param defaultValues the defaultValues value to set. * @return the DWCopyCommandSettings object itself. @@ -63,9 +59,9 @@ public DWCopyCommandSettings withDefaultValues(List d } /** - * Get the additionalOptions property: Additional options directly passed to SQL DW in Copy Command. Type: key - * value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": - * { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. + * Get the additionalOptions property: Additional options directly passed to SQL DW in Copy Command. Type: key value + * pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { + * "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. * * @return the additionalOptions value. */ @@ -74,9 +70,9 @@ public Map additionalOptions() { } /** - * Set the additionalOptions property: Additional options directly passed to SQL DW in Copy Command. Type: key - * value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": - * { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. + * Set the additionalOptions property: Additional options directly passed to SQL DW in Copy Command. Type: key value + * pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { + * "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. * * @param additionalOptions the additionalOptions value to set. * @return the DWCopyCommandSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlow.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlow.java index 2bcf2e63f9f35..ff4c7bde54563 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlow.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlow.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -14,11 +15,7 @@ /** * Azure Data Factory nested object which contains a flow with data movements and transformations. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = DataFlow.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DataFlow.class, visible = true) @JsonTypeName("DataFlow") @JsonSubTypes({ @JsonSubTypes.Type(name = "MappingDataFlow", value = MappingDataFlow.class), @@ -26,6 +23,13 @@ @JsonSubTypes.Type(name = "WranglingDataFlow", value = WranglingDataFlow.class) }) @Fluent public class DataFlow { + /* + * Type of data flow. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * The description of the data flow. */ @@ -48,6 +52,16 @@ public class DataFlow { * Creates an instance of DataFlow class. */ public DataFlow() { + this.type = "DataFlow"; + } + + /** + * Get the type property: Type of data flow. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandPayload.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandPayload.java index 4e60ea1ca0129..03bcffda40bd4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandPayload.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandPayload.java @@ -131,8 +131,9 @@ public DataFlowDebugCommandPayload withExpression(String expression) { */ public void validate() { if (streamName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property streamName in model DataFlowDebugCommandPayload")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property streamName in model DataFlowDebugCommandPayload")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugResource.java index 75ac4e485aab5..213e2a3adbfd4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugResource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugResource.java @@ -63,8 +63,9 @@ public DataFlowDebugResource withName(String name) { public void validate() { super.validate(); if (properties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property properties in model DataFlowDebugResource")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model DataFlowDebugResource")); } else { properties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowListResponse.java index 1ace6606c160b..f36032916a3bb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowListResponse.java @@ -80,8 +80,8 @@ public DataFlowListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model DataFlowListResponse")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property value in model DataFlowListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowReference.java index acdf60b34a0d7..d97b7c5a8230b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowReference.java @@ -172,12 +172,13 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model DataFlowReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property type in model DataFlowReference")); } if (referenceName() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property referenceName in model DataFlowReference")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property referenceName in model DataFlowReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataLakeAnalyticsUsqlActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataLakeAnalyticsUsqlActivity.java index 6b89844ed4aa8..b1b9b30014088 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataLakeAnalyticsUsqlActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataLakeAnalyticsUsqlActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.DataLakeAnalyticsUsqlActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Data Lake Analytics U-SQL activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DataLakeAnalyticsUsqlActivity.class, + visible = true) @JsonTypeName("DataLakeAnalyticsU-SQL") @Fluent public final class DataLakeAnalyticsUsqlActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DataLakeAnalyticsU-SQL"; + /* * Data Lake Analytics U-SQL activity properties. */ @@ -33,6 +45,16 @@ public final class DataLakeAnalyticsUsqlActivity extends ExecutionActivity { public DataLakeAnalyticsUsqlActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Data Lake Analytics U-SQL activity properties. * @@ -296,8 +318,9 @@ public DataLakeAnalyticsUsqlActivity withCompilationMode(Object compilationMode) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model DataLakeAnalyticsUsqlActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model DataLakeAnalyticsUsqlActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksNotebookActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksNotebookActivity.java index 973968a50939f..f446b726159af 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksNotebookActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksNotebookActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.DatabricksNotebookActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * DatabricksNotebook activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DatabricksNotebookActivity.class, + visible = true) @JsonTypeName("DatabricksNotebook") @Fluent public final class DatabricksNotebookActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DatabricksNotebook"; + /* * Databricks Notebook activity properties. */ @@ -33,6 +45,16 @@ public final class DatabricksNotebookActivity extends ExecutionActivity { public DatabricksNotebookActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Databricks Notebook activity properties. * @@ -115,8 +137,8 @@ public DatabricksNotebookActivity withUserProperties(List userProp } /** - * Get the notebookPath property: The absolute path of the notebook to be run in the Databricks Workspace. This - * path must begin with a slash. Type: string (or Expression with resultType string). + * Get the notebookPath property: The absolute path of the notebook to be run in the Databricks Workspace. This path + * must begin with a slash. Type: string (or Expression with resultType string). * * @return the notebookPath value. */ @@ -125,8 +147,8 @@ public Object notebookPath() { } /** - * Set the notebookPath property: The absolute path of the notebook to be run in the Databricks Workspace. This - * path must begin with a slash. Type: string (or Expression with resultType string). + * Set the notebookPath property: The absolute path of the notebook to be run in the Databricks Workspace. This path + * must begin with a slash. Type: string (or Expression with resultType string). * * @param notebookPath the notebookPath value to set. * @return the DatabricksNotebookActivity object itself. @@ -196,8 +218,9 @@ public DatabricksNotebookActivity withLibraries(List> librar public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model DatabricksNotebookActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model DatabricksNotebookActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkJarActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkJarActivity.java index cd6ff39562fdf..2bc960e8e7078 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkJarActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkJarActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.DatabricksSparkJarActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * DatabricksSparkJar activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DatabricksSparkJarActivity.class, + visible = true) @JsonTypeName("DatabricksSparkJar") @Fluent public final class DatabricksSparkJarActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DatabricksSparkJar"; + /* * Databricks SparkJar activity properties. */ @@ -33,6 +45,16 @@ public final class DatabricksSparkJarActivity extends ExecutionActivity { public DatabricksSparkJarActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Databricks SparkJar activity properties. * @@ -194,8 +216,9 @@ public DatabricksSparkJarActivity withLibraries(List> librar public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model DatabricksSparkJarActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model DatabricksSparkJarActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkPythonActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkPythonActivity.java index 1c13348bbb5bc..fd2ddf33ba75b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkPythonActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkPythonActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.DatabricksSparkPythonActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * DatabricksSparkPython activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DatabricksSparkPythonActivity.class, + visible = true) @JsonTypeName("DatabricksSparkPython") @Fluent public final class DatabricksSparkPythonActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DatabricksSparkPython"; + /* * Databricks SparkPython activity properties. */ @@ -33,6 +45,16 @@ public final class DatabricksSparkPythonActivity extends ExecutionActivity { public DatabricksSparkPythonActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Databricks SparkPython activity properties. * @@ -194,8 +216,9 @@ public DatabricksSparkPythonActivity withLibraries(List> lib public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model DatabricksSparkPythonActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model DatabricksSparkPythonActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Dataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Dataset.java index 7b9a281eac408..4bed438a7ca77 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Dataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Dataset.java @@ -12,6 +12,7 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -22,11 +23,7 @@ * The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, * folders, and documents. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = Dataset.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Dataset.class, visible = true) @JsonTypeName("Dataset") @JsonSubTypes({ @JsonSubTypes.Type(name = "AmazonS3Object", value = AmazonS3Dataset.class), @@ -135,6 +132,13 @@ @JsonSubTypes.Type(name = "ServiceNowV2Object", value = ServiceNowV2ObjectDataset.class) }) @Fluent public class Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Dataset description. */ @@ -142,15 +146,13 @@ public class Dataset { private String description; /* - * Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: - * DatasetDataElement. + * Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. */ @JsonProperty(value = "structure") private Object structure; /* - * Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), - * itemType: DatasetSchemaDataElement. + * Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. */ @JsonProperty(value = "schema") private Object schema; @@ -181,8 +183,7 @@ public class Dataset { private DatasetFolder folder; /* - * The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, - * folders, and documents. + * The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. */ @JsonIgnore private Map additionalProperties; @@ -191,6 +192,16 @@ public class Dataset { * Creates an instance of Dataset class. */ public Dataset() { + this.type = "Dataset"; + } + + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + public String type() { + return this.type; } /** @@ -377,8 +388,8 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { if (linkedServiceName() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property linkedServiceName in model Dataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property linkedServiceName in model Dataset")); } else { linkedServiceName().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetCompression.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetCompression.java index e10beac399d4c..345aa0f2935d1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetCompression.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetCompression.java @@ -118,8 +118,8 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model DatasetCompression")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property type in model DatasetCompression")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetDebugResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetDebugResource.java index 245073d9d767f..c8cabc573c4ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetDebugResource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetDebugResource.java @@ -63,8 +63,9 @@ public DatasetDebugResource withName(String name) { public void validate() { super.validate(); if (properties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property properties in model DatasetDebugResource")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property properties in model DatasetDebugResource")); } else { properties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetListResponse.java index 7b484cdf7fb8c..abbf13e5673f0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetListResponse.java @@ -80,8 +80,8 @@ public DatasetListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model DatasetListResponse")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property value in model DatasetListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetLocation.java index 182488da4dffe..b3593e9b82382 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetLocation.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -18,11 +19,7 @@ /** * Dataset location. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = DatasetLocation.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DatasetLocation.class, visible = true) @JsonTypeName("DatasetLocation") @JsonSubTypes({ @JsonSubTypes.Type(name = "AzureBlobStorageLocation", value = AzureBlobStorageLocation.class), @@ -41,6 +38,13 @@ @JsonSubTypes.Type(name = "LakeHouseLocation", value = LakeHouseLocation.class) }) @Fluent public class DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Specify the folder path of dataset. Type: string (or Expression with resultType string) */ @@ -63,6 +67,16 @@ public class DatasetLocation { * Creates an instance of DatasetLocation class. */ public DatasetLocation() { + this.type = "DatasetLocation"; + } + + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + public String type() { + return this.type; } /** @@ -88,8 +102,7 @@ public DatasetLocation withFolderPath(Object folderPath) { } /** - * Get the fileName property: Specify the file name of dataset. Type: string (or Expression with resultType - * string). + * Get the fileName property: Specify the file name of dataset. Type: string (or Expression with resultType string). * * @return the fileName value. */ @@ -98,8 +111,7 @@ public Object fileName() { } /** - * Set the fileName property: Specify the file name of dataset. Type: string (or Expression with resultType - * string). + * Set the fileName property: Specify the file name of dataset. Type: string (or Expression with resultType string). * * @param fileName the fileName value to set. * @return the DatasetLocation object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetReference.java index 36ec83cabeb5f..d5780bf1dbc2a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetReference.java @@ -107,8 +107,8 @@ public DatasetReference withParameters(Map parameters) { */ public void validate() { if (referenceName() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property referenceName in model DatasetReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property referenceName in model DatasetReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetStorageFormat.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetStorageFormat.java index f7573ff5ac413..aa83944bc55ea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetStorageFormat.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetStorageFormat.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -18,11 +19,7 @@ /** * The format definition of a storage. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = DatasetStorageFormat.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DatasetStorageFormat.class, visible = true) @JsonTypeName("DatasetStorageFormat") @JsonSubTypes({ @JsonSubTypes.Type(name = "TextFormat", value = TextFormat.class), @@ -32,6 +29,13 @@ @JsonSubTypes.Type(name = "ParquetFormat", value = ParquetFormat.class) }) @Fluent public class DatasetStorageFormat { + /* + * Type of dataset storage format. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Serializer. Type: string (or Expression with resultType string). */ @@ -54,6 +58,16 @@ public class DatasetStorageFormat { * Creates an instance of DatasetStorageFormat class. */ public DatasetStorageFormat() { + this.type = "DatasetStorageFormat"; + } + + /** + * Get the type property: Type of dataset storage format. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataworldLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataworldLinkedService.java index fe656d5764398..300d71ba6f03a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataworldLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataworldLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.DataworldLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for Dataworld. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DataworldLinkedService.class, visible = true) @JsonTypeName("Dataworld") @Fluent public final class DataworldLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Dataworld"; + /* * Dataworld linked service properties. */ @@ -32,6 +40,16 @@ public final class DataworldLinkedService extends LinkedService { public DataworldLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Dataworld linked service properties. * @@ -101,8 +119,8 @@ public DataworldLinkedService withApiToken(SecretBase apiToken) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -111,8 +129,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the DataworldLinkedService object itself. @@ -134,8 +152,9 @@ public DataworldLinkedService withEncryptedCredential(String encryptedCredential public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model DataworldLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model DataworldLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2LinkedService.java index dcaec9cd018b3..cd8e3bc176076 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2LinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.Db2LinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for DB2 data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Db2LinkedService.class, visible = true) @JsonTypeName("Db2") @Fluent public final class Db2LinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Db2"; + /* * DB2 linked service properties. */ @@ -32,6 +40,16 @@ public final class Db2LinkedService extends LinkedService { public Db2LinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: DB2 linked service properties. * @@ -79,8 +97,8 @@ public Db2LinkedService withAnnotations(List annotations) { /** * Get the connectionString property: The connection string. It is mutually exclusive with server, database, - * authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString - * or AzureKeyVaultSecretReference. + * authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString or + * AzureKeyVaultSecretReference. * * @return the connectionString value. */ @@ -90,8 +108,8 @@ public Object connectionString() { /** * Set the connectionString property: The connection string. It is mutually exclusive with server, database, - * authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString - * or AzureKeyVaultSecretReference. + * authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString or + * AzureKeyVaultSecretReference. * * @param connectionString the connectionString value to set. * @return the Db2LinkedService object itself. @@ -130,8 +148,8 @@ public Db2LinkedService withServer(Object server) { } /** - * Get the database property: Database name for connection. It is mutually exclusive with connectionString - * property. Type: string (or Expression with resultType string). + * Get the database property: Database name for connection. It is mutually exclusive with connectionString property. + * Type: string (or Expression with resultType string). * * @return the database value. */ @@ -140,8 +158,8 @@ public Object database() { } /** - * Set the database property: Database name for connection. It is mutually exclusive with connectionString - * property. Type: string (or Expression with resultType string). + * Set the database property: Database name for connection. It is mutually exclusive with connectionString property. + * Type: string (or Expression with resultType string). * * @param database the database value to set. * @return the Db2LinkedService object itself. @@ -278,9 +296,9 @@ public Db2LinkedService withCertificateCommonName(Object certificateCommonName) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString - * property. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: + * string. * * @return the encryptedCredential value. */ @@ -289,9 +307,9 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString - * property. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: + * string. * * @param encryptedCredential the encryptedCredential value to set. * @return the Db2LinkedService object itself. @@ -313,8 +331,9 @@ public Db2LinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model Db2LinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model Db2LinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2Source.java index 3512ea960c520..e2bf5f5c6fd7f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2Source.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for Db2 databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Db2Source.class, visible = true) @JsonTypeName("Db2Source") @Fluent public final class Db2Source extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Db2Source"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class Db2Source extends TabularSource { public Db2Source() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2TableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2TableDataset.java index 9641824309501..9b67b97d816e9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2TableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2TableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.Db2TableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The Db2 table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Db2TableDataset.class, visible = true) @JsonTypeName("Db2Table") @Fluent public final class Db2TableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Db2Table"; + /* * Db2 table dataset properties. */ @@ -31,6 +39,16 @@ public final class Db2TableDataset extends Dataset { public Db2TableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Db2 table dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DeleteActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DeleteActivity.java index 9cb3a34301dd0..25ebfa5d3ed46 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DeleteActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DeleteActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.DeleteActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Delete activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DeleteActivity.class, visible = true) @JsonTypeName("Delete") @Fluent public final class DeleteActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Delete"; + /* * Delete activity properties. */ @@ -31,6 +39,16 @@ public final class DeleteActivity extends ExecutionActivity { public DeleteActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Delete activity properties. * @@ -265,8 +283,9 @@ public DeleteActivity withStoreSettings(StoreReadSettings storeSettings) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model DeleteActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model DeleteActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextDataset.java index 7aa90de44de45..eb9da833cb750 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.DelimitedTextDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Delimited text dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DelimitedTextDataset.class, visible = true) @JsonTypeName("DelimitedText") @Fluent public final class DelimitedTextDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DelimitedText"; + /* * Delimited text dataset properties. */ @@ -31,6 +39,16 @@ public final class DelimitedTextDataset extends Dataset { public DelimitedTextDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Delimited text dataset properties. * @@ -173,10 +191,10 @@ public DelimitedTextDataset withRowDelimiter(Object rowDelimiter) { } /** - * Get the encodingName property: The code page name of the preferred encoding. If miss, the default value is - * UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link - * to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or - * Expression with resultType string). + * Get the encodingName property: The code page name of the preferred encoding. If miss, the default value is UTF-8, + * unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set + * supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + * resultType string). * * @return the encodingName value. */ @@ -185,10 +203,10 @@ public Object encodingName() { } /** - * Set the encodingName property: The code page name of the preferred encoding. If miss, the default value is - * UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link - * to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or - * Expression with resultType string). + * Set the encodingName property: The code page name of the preferred encoding. If miss, the default value is UTF-8, + * unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set + * supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + * resultType string). * * @param encodingName the encodingName value to set. * @return the DelimitedTextDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextReadSettings.java index aefaf3a2a0be7..d49bb1280136c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextReadSettings.java @@ -6,19 +6,30 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Delimited text read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DelimitedTextReadSettings.class, + visible = true) @JsonTypeName("DelimitedTextReadSettings") @Fluent public final class DelimitedTextReadSettings extends FormatReadSettings { /* - * Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression - * with resultType integer). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DelimitedTextReadSettings"; + + /* + * Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "skipLineCount") private Object skipLineCount; @@ -35,6 +46,16 @@ public final class DelimitedTextReadSettings extends FormatReadSettings { public DelimitedTextReadSettings() { } + /** + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the skipLineCount property: Indicates the number of non-empty rows to skip when reading data from input * files. Type: integer (or Expression with resultType integer). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSink.java index 8c32492c5102b..73d6c1d6a3b69 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity DelimitedText sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DelimitedTextSink.class, visible = true) @JsonTypeName("DelimitedTextSink") @Fluent public final class DelimitedTextSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DelimitedTextSink"; + /* * DelimitedText store settings. */ @@ -34,6 +42,16 @@ public final class DelimitedTextSink extends CopySink { public DelimitedTextSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: DelimitedText store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSource.java index 7f38a82a86057..0e4e1856d4fed 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity DelimitedText source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DelimitedTextSource.class, visible = true) @JsonTypeName("DelimitedTextSource") @Fluent public final class DelimitedTextSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DelimitedTextSource"; + /* * DelimitedText store settings. */ @@ -29,8 +37,7 @@ public final class DelimitedTextSource extends CopySource { private DelimitedTextReadSettings formatSettings; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -41,6 +48,16 @@ public final class DelimitedTextSource extends CopySource { public DelimitedTextSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: DelimitedText store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextWriteSettings.java index d3516f4d88af3..a1c604e39387e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextWriteSettings.java @@ -7,19 +7,30 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Delimited text write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DelimitedTextWriteSettings.class, + visible = true) @JsonTypeName("DelimitedTextWriteSettings") @Fluent public final class DelimitedTextWriteSettings extends FormatWriteSettings { /* - * Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with - * resultType boolean). + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DelimitedTextWriteSettings"; + + /* + * Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "quoteAllText") private Object quoteAllText; @@ -31,15 +42,13 @@ public final class DelimitedTextWriteSettings extends FormatWriteSettings { private Object fileExtension; /* - * Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or - * Expression with resultType integer). + * Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "maxRowsPerFile") private Object maxRowsPerFile; /* - * Specifies the file name pattern _. when copy from non-file based store - * without partitionOptions. Type: string (or Expression with resultType string). + * Specifies the file name pattern _. when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileNamePrefix") private Object fileNamePrefix; @@ -50,6 +59,16 @@ public final class DelimitedTextWriteSettings extends FormatWriteSettings { public DelimitedTextWriteSettings() { } + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the quoteAllText property: Indicates whether string values should always be enclosed with quotes. Type: * boolean (or Expression with resultType boolean). @@ -149,8 +168,9 @@ public DelimitedTextWriteSettings withFileNamePrefix(Object fileNamePrefix) { public void validate() { super.validate(); if (fileExtension() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property fileExtension in model DelimitedTextWriteSettings")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property fileExtension in model DelimitedTextWriteSettings")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DependencyReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DependencyReference.java index 6a3f833036e6f..e31f32ad134af 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DependencyReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DependencyReference.java @@ -5,18 +5,16 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Referenced dependency. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = DependencyReference.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DependencyReference.class, visible = true) @JsonTypeName("DependencyReference") @JsonSubTypes({ @JsonSubTypes.Type(name = "TriggerDependencyReference", value = TriggerDependencyReference.class), @@ -25,10 +23,27 @@ value = SelfDependencyTumblingWindowTriggerReference.class) }) @Immutable public class DependencyReference { + /* + * The type of dependency reference. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /** * Creates an instance of DependencyReference class. */ public DependencyReference() { + this.type = "DependencyReference"; + } + + /** + * Get the type property: The type of dependency reference. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DistcpSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DistcpSettings.java index 207c4dd0db237..accf46ea507ba 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DistcpSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DistcpSettings.java @@ -20,9 +20,7 @@ public final class DistcpSettings { private Object resourceManagerEndpoint; /* - * Specifies an existing folder path which will be used to store temp Distcp command script. The script file is - * generated by ADF and will be removed after Copy job finished. Type: string (or Expression with resultType - * string). + * Specifies an existing folder path which will be used to store temp Distcp command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tempScriptPath", required = true) private Object tempScriptPath; @@ -63,8 +61,8 @@ public DistcpSettings withResourceManagerEndpoint(Object resourceManagerEndpoint /** * Get the tempScriptPath property: Specifies an existing folder path which will be used to store temp Distcp - * command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string - * (or Expression with resultType string). + * command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string (or + * Expression with resultType string). * * @return the tempScriptPath value. */ @@ -74,8 +72,8 @@ public Object tempScriptPath() { /** * Set the tempScriptPath property: Specifies an existing folder path which will be used to store temp Distcp - * command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string - * (or Expression with resultType string). + * command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string (or + * Expression with resultType string). * * @param tempScriptPath the tempScriptPath value to set. * @return the DistcpSettings object itself. @@ -114,12 +112,13 @@ public DistcpSettings withDistcpOptions(Object distcpOptions) { */ public void validate() { if (resourceManagerEndpoint() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property resourceManagerEndpoint in model DistcpSettings")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property resourceManagerEndpoint in model DistcpSettings")); } if (tempScriptPath() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property tempScriptPath in model DistcpSettings")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property tempScriptPath in model DistcpSettings")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionDataset.java index df5155293ed33..df2eec8aae323 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.DocumentDbCollectionDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Microsoft Azure Document Database Collection dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DocumentDbCollectionDataset.class, + visible = true) @JsonTypeName("DocumentDbCollection") @Fluent public final class DocumentDbCollectionDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DocumentDbCollection"; + /* * DocumentDB Collection dataset properties. */ @@ -33,6 +45,16 @@ public final class DocumentDbCollectionDataset extends Dataset { public DocumentDbCollectionDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: DocumentDB Collection dataset properties. * @@ -139,8 +161,9 @@ public DocumentDbCollectionDataset withCollectionName(Object collectionName) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model DocumentDbCollectionDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model DocumentDbCollectionDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSink.java index bb3dabfd22f2e..1bacb0cb6ea2d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSink.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Document Database Collection sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DocumentDbCollectionSink.class, + visible = true) @JsonTypeName("DocumentDbCollectionSink") @Fluent public final class DocumentDbCollectionSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DocumentDbCollectionSink"; + /* * Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). */ @@ -23,8 +35,7 @@ public final class DocumentDbCollectionSink extends CopySink { private Object nestingSeparator; /* - * Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed - * values: insert and upsert. + * Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. */ @JsonProperty(value = "writeBehavior") private Object writeBehavior; @@ -35,6 +46,16 @@ public final class DocumentDbCollectionSink extends CopySink { public DocumentDbCollectionSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the nestingSeparator property: Nested properties separator. Default is . (dot). Type: string (or Expression * with resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSource.java index da5827fd8bf87..83358570d3d2c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSource.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Document Database Collection source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DocumentDbCollectionSource.class, + visible = true) @JsonTypeName("DocumentDbCollectionSource") @Fluent public final class DocumentDbCollectionSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DocumentDbCollectionSource"; + /* * Documents query. Type: string (or Expression with resultType string). */ @@ -29,15 +41,13 @@ public final class DocumentDbCollectionSource extends CopySource { private Object nestingSeparator; /* - * Query timeout. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -48,6 +58,16 @@ public final class DocumentDbCollectionSource extends CopySource { public DocumentDbCollectionSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Documents query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillLinkedService.java index 5d38dee160b08..e235ebcce3993 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.DrillLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Drill server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DrillLinkedService.class, visible = true) @JsonTypeName("Drill") @Fluent public final class DrillLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Drill"; + /* * Drill server linked service properties. */ @@ -32,6 +40,16 @@ public final class DrillLinkedService extends LinkedService { public DrillLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Drill server linked service properties. * @@ -126,8 +144,8 @@ public DrillLinkedService withPwd(AzureKeyVaultSecretReference pwd) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -136,8 +154,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the DrillLinkedService object itself. @@ -159,8 +177,9 @@ public DrillLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model DrillLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model DrillLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillSource.java index 5ae785f7d1c8e..c26935daed638 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Drill server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DrillSource.class, visible = true) @JsonTypeName("DrillSource") @Fluent public final class DrillSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DrillSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class DrillSource extends TabularSource { public DrillSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillTableDataset.java index 45f0b2538fd87..a779f73af710e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.DrillDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Drill server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DrillTableDataset.class, visible = true) @JsonTypeName("DrillTable") @Fluent public final class DrillTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DrillTable"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class DrillTableDataset extends Dataset { public DrillTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXLinkedService.java index 094c6e5a0df76..b71b66e3ae7c5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.DynamicsAXLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Dynamics AX linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DynamicsAXLinkedService.class, + visible = true) @JsonTypeName("DynamicsAX") @Fluent public final class DynamicsAXLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DynamicsAX"; + /* * Dynamics AX linked service properties. */ @@ -32,6 +44,16 @@ public final class DynamicsAXLinkedService extends LinkedService { public DynamicsAXLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Dynamics AX linked service properties. * @@ -205,8 +227,8 @@ public DynamicsAXLinkedService withAadResourceId(Object aadResourceId) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -215,8 +237,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the DynamicsAXLinkedService object itself. @@ -238,8 +260,9 @@ public DynamicsAXLinkedService withEncryptedCredential(String encryptedCredentia public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model DynamicsAXLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model DynamicsAXLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXResourceDataset.java index 5e6a7162c0d67..6b6d440fe4e22 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXResourceDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.DynamicsAXResourceDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * The path of the Dynamics AX OData entity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DynamicsAXResourceDataset.class, + visible = true) @JsonTypeName("DynamicsAXResource") @Fluent public final class DynamicsAXResourceDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DynamicsAXResource"; + /* * Dynamics AX OData resource dataset properties. */ @@ -32,6 +44,16 @@ public final class DynamicsAXResourceDataset extends Dataset { public DynamicsAXResourceDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Dynamics AX OData resource dataset properties. * @@ -138,8 +160,9 @@ public DynamicsAXResourceDataset withPath(Object path) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model DynamicsAXResourceDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model DynamicsAXResourceDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXSource.java index 7e1d46926b09d..6bfae080dba48 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Dynamics AX source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsAXSource.class, visible = true) @JsonTypeName("DynamicsAXSource") @Fluent public final class DynamicsAXSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DynamicsAXSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -23,9 +31,7 @@ public final class DynamicsAXSource extends TabularSource { private Object query; /* - * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read - * response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; @@ -36,6 +42,16 @@ public final class DynamicsAXSource extends TabularSource { public DynamicsAXSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmEntityDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmEntityDataset.java index b111e67de0c09..529cde23fc77d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmEntityDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmEntityDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.DynamicsCrmEntityDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * The Dynamics CRM entity dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DynamicsCrmEntityDataset.class, + visible = true) @JsonTypeName("DynamicsCrmEntity") @Fluent public final class DynamicsCrmEntityDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DynamicsCrmEntity"; + /* * Dynamics CRM entity dataset properties. */ @@ -31,6 +43,16 @@ public final class DynamicsCrmEntityDataset extends Dataset { public DynamicsCrmEntityDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Dynamics CRM entity dataset properties. * @@ -104,8 +126,7 @@ public DynamicsCrmEntityDataset withFolder(DatasetFolder folder) { } /** - * Get the entityName property: The logical name of the entity. Type: string (or Expression with resultType - * string). + * Get the entityName property: The logical name of the entity. Type: string (or Expression with resultType string). * * @return the entityName value. */ @@ -114,8 +135,7 @@ public Object entityName() { } /** - * Set the entityName property: The logical name of the entity. Type: string (or Expression with resultType - * string). + * Set the entityName property: The logical name of the entity. Type: string (or Expression with resultType string). * * @param entityName the entityName value to set. * @return the DynamicsCrmEntityDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmLinkedService.java index 4f9679ff1fcb2..77761f313a7f5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.DynamicsCrmLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Dynamics CRM linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = DynamicsCrmLinkedService.class, + visible = true) @JsonTypeName("DynamicsCrm") @Fluent public final class DynamicsCrmLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DynamicsCrm"; + /* * Dynamics CRM linked service properties. */ @@ -32,6 +44,16 @@ public final class DynamicsCrmLinkedService extends LinkedService { public DynamicsCrmLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Dynamics CRM linked service properties. * @@ -79,8 +101,8 @@ public DynamicsCrmLinkedService withAnnotations(List annotations) { /** * Get the deploymentType property: The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM - * Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string (or Expression with - * resultType string). + * Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string (or Expression with resultType + * string). * * @return the deploymentType value. */ @@ -90,8 +112,8 @@ public Object deploymentType() { /** * Set the deploymentType property: The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM - * Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string (or Expression with - * resultType string). + * Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string (or Expression with resultType + * string). * * @param deploymentType the deploymentType value to set. * @return the DynamicsCrmLinkedService object itself. @@ -181,8 +203,8 @@ public DynamicsCrmLinkedService withServiceUri(Object serviceUri) { /** * Get the organizationName property: The organization name of the Dynamics CRM instance. The property is required - * for on-prem and required for online when there are more than one Dynamics CRM instances associated with the - * user. Type: string (or Expression with resultType string). + * for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. + * Type: string (or Expression with resultType string). * * @return the organizationName value. */ @@ -192,8 +214,8 @@ public Object organizationName() { /** * Set the organizationName property: The organization name of the Dynamics CRM instance. The property is required - * for on-prem and required for online when there are more than one Dynamics CRM instances associated with the - * user. Type: string (or Expression with resultType string). + * for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. + * Type: string (or Expression with resultType string). * * @param organizationName the organizationName value to set. * @return the DynamicsCrmLinkedService object itself. @@ -307,9 +329,9 @@ public DynamicsCrmLinkedService withServicePrincipalId(Object servicePrincipalId } /** - * Get the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Get the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @return the servicePrincipalCredentialType value. */ @@ -318,9 +340,9 @@ public Object servicePrincipalCredentialType() { } /** - * Set the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Set the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @param servicePrincipalCredentialType the servicePrincipalCredentialType value to set. * @return the DynamicsCrmLinkedService object itself. @@ -363,8 +385,8 @@ public DynamicsCrmLinkedService withServicePrincipalCredential(SecretBase servic } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -373,8 +395,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the DynamicsCrmLinkedService object itself. @@ -396,8 +418,9 @@ public DynamicsCrmLinkedService withEncryptedCredential(String encryptedCredenti public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model DynamicsCrmLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model DynamicsCrmLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSink.java index e346384bc320b..f1770e7005049 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSink.java @@ -7,16 +7,24 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Dynamics CRM sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsCrmSink.class, visible = true) @JsonTypeName("DynamicsCrmSink") @Fluent public final class DynamicsCrmSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DynamicsCrmSink"; + /* * The write behavior for the operation. */ @@ -24,15 +32,13 @@ public final class DynamicsCrmSink extends CopySink { private DynamicsSinkWriteBehavior writeBehavior; /* - * The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. - * Default is false. Type: boolean (or Expression with resultType boolean). + * The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; /* - * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression - * with resultType string). + * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). */ @JsonProperty(value = "alternateKeyName") private Object alternateKeyName; @@ -43,6 +49,16 @@ public final class DynamicsCrmSink extends CopySink { public DynamicsCrmSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: The write behavior for the operation. * @@ -170,8 +186,8 @@ public DynamicsCrmSink withDisableMetricsCollection(Object disableMetricsCollect public void validate() { super.validate(); if (writeBehavior() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property writeBehavior in model DynamicsCrmSink")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property writeBehavior in model DynamicsCrmSink")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSource.java index 9ee6826307969..bd176718ee441 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSource.java @@ -6,26 +6,32 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Dynamics CRM source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsCrmSource.class, visible = true) @JsonTypeName("DynamicsCrmSource") @Fluent public final class DynamicsCrmSource extends CopySource { /* - * FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: - * string (or Expression with resultType string). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DynamicsCrmSource"; + + /* + * FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). */ @JsonProperty(value = "query") private Object query; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -36,6 +42,16 @@ public final class DynamicsCrmSource extends CopySource { public DynamicsCrmSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online * & on-premises). Type: string (or Expression with resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsEntityDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsEntityDataset.java index cebd51e792dcf..b538774f1c17f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsEntityDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsEntityDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.DynamicsEntityDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The Dynamics entity dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsEntityDataset.class, visible = true) @JsonTypeName("DynamicsEntity") @Fluent public final class DynamicsEntityDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DynamicsEntity"; + /* * Dynamics entity dataset properties. */ @@ -31,6 +39,16 @@ public final class DynamicsEntityDataset extends Dataset { public DynamicsEntityDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Dynamics entity dataset properties. * @@ -104,8 +122,7 @@ public DynamicsEntityDataset withFolder(DatasetFolder folder) { } /** - * Get the entityName property: The logical name of the entity. Type: string (or Expression with resultType - * string). + * Get the entityName property: The logical name of the entity. Type: string (or Expression with resultType string). * * @return the entityName value. */ @@ -114,8 +131,7 @@ public Object entityName() { } /** - * Set the entityName property: The logical name of the entity. Type: string (or Expression with resultType - * string). + * Set the entityName property: The logical name of the entity. Type: string (or Expression with resultType string). * * @param entityName the entityName value to set. * @return the DynamicsEntityDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsLinkedService.java index d6a8fff5b6ea7..90ddde44eabfa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.DynamicsLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Dynamics linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsLinkedService.class, visible = true) @JsonTypeName("Dynamics") @Fluent public final class DynamicsLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Dynamics"; + /* * Dynamics linked service properties. */ @@ -32,6 +40,16 @@ public final class DynamicsLinkedService extends LinkedService { public DynamicsLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Dynamics linked service properties. * @@ -103,8 +121,8 @@ public DynamicsLinkedService withDeploymentType(Object deploymentType) { } /** - * Get the hostname property: The host name of the on-premises Dynamics server. The property is required for - * on-prem and not allowed for online. Type: string (or Expression with resultType string). + * Get the hostname property: The host name of the on-premises Dynamics server. The property is required for on-prem + * and not allowed for online. Type: string (or Expression with resultType string). * * @return the hostname value. */ @@ -113,8 +131,8 @@ public Object hostname() { } /** - * Set the hostname property: The host name of the on-premises Dynamics server. The property is required for - * on-prem and not allowed for online. Type: string (or Expression with resultType string). + * Set the hostname property: The host name of the on-premises Dynamics server. The property is required for on-prem + * and not allowed for online. Type: string (or Expression with resultType string). * * @param hostname the hostname value to set. * @return the DynamicsLinkedService object itself. @@ -232,8 +250,8 @@ public DynamicsLinkedService withAuthenticationType(Object authenticationType) { } /** - * Get the username property: User name to access the Dynamics instance. Type: string (or Expression with - * resultType string). + * Get the username property: User name to access the Dynamics instance. Type: string (or Expression with resultType + * string). * * @return the username value. */ @@ -242,8 +260,8 @@ public Object username() { } /** - * Set the username property: User name to access the Dynamics instance. Type: string (or Expression with - * resultType string). + * Set the username property: User name to access the Dynamics instance. Type: string (or Expression with resultType + * string). * * @param username the username value to set. * @return the DynamicsLinkedService object itself. @@ -305,9 +323,9 @@ public DynamicsLinkedService withServicePrincipalId(Object servicePrincipalId) { } /** - * Get the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Get the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @return the servicePrincipalCredentialType value. */ @@ -316,9 +334,9 @@ public Object servicePrincipalCredentialType() { } /** - * Set the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Set the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @param servicePrincipalCredentialType the servicePrincipalCredentialType value to set. * @return the DynamicsLinkedService object itself. @@ -361,8 +379,8 @@ public DynamicsLinkedService withServicePrincipalCredential(SecretBase servicePr } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -371,8 +389,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the DynamicsLinkedService object itself. @@ -417,8 +435,9 @@ public DynamicsLinkedService withCredential(CredentialReference credential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model DynamicsLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model DynamicsLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSink.java index 29f1fe818e069..cb9c68352dbf2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSink.java @@ -7,16 +7,24 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Dynamics sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsSink.class, visible = true) @JsonTypeName("DynamicsSink") @Fluent public final class DynamicsSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DynamicsSink"; + /* * The write behavior for the operation. */ @@ -24,15 +32,13 @@ public final class DynamicsSink extends CopySink { private DynamicsSinkWriteBehavior writeBehavior; /* - * The flag indicating whether ignore null values from input dataset (except key fields) during write operation. - * Default is false. Type: boolean (or Expression with resultType boolean). + * The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; /* - * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression - * with resultType string). + * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). */ @JsonProperty(value = "alternateKeyName") private Object alternateKeyName; @@ -43,6 +49,16 @@ public final class DynamicsSink extends CopySink { public DynamicsSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: The write behavior for the operation. * @@ -170,8 +186,8 @@ public DynamicsSink withDisableMetricsCollection(Object disableMetricsCollection public void validate() { super.validate(); if (writeBehavior() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property writeBehavior in model DynamicsSink")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property writeBehavior in model DynamicsSink")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSource.java index 3e0fe70872fc2..0fd68abd15fca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSource.java @@ -6,26 +6,32 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Dynamics source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsSource.class, visible = true) @JsonTypeName("DynamicsSource") @Fluent public final class DynamicsSource extends CopySource { /* - * FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string - * (or Expression with resultType string). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "DynamicsSource"; + + /* + * FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). */ @JsonProperty(value = "query") private Object query; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -37,8 +43,18 @@ public DynamicsSource() { } /** - * Get the query property: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online - * & on-premises). Type: string (or Expression with resultType string). + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the query property: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & + * on-premises). Type: string (or Expression with resultType string). * * @return the query value. */ @@ -47,8 +63,8 @@ public Object query() { } /** - * Set the query property: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online - * & on-premises). Type: string (or Expression with resultType string). + * Set the query property: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & + * on-premises). Type: string (or Expression with resultType string). * * @param query the query value to set. * @return the DynamicsSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaLinkedService.java index e4f6f37c158b3..58136fb9e0a5e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.EloquaLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Eloqua server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = EloquaLinkedService.class, visible = true) @JsonTypeName("Eloqua") @Fluent public final class EloquaLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Eloqua"; + /* * Eloqua server linked service properties. */ @@ -32,6 +40,16 @@ public final class EloquaLinkedService extends LinkedService { public EloquaLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Eloqua server linked service properties. * @@ -199,8 +217,8 @@ public EloquaLinkedService withUseHostVerification(Object useHostVerification) { } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -209,8 +227,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the EloquaLinkedService object itself. @@ -224,8 +242,8 @@ public EloquaLinkedService withUsePeerVerification(Object usePeerVerification) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -234,8 +252,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the EloquaLinkedService object itself. @@ -257,8 +275,9 @@ public EloquaLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model EloquaLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model EloquaLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaObjectDataset.java index 46730008bd39b..58054d6fb2527 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Eloqua server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = EloquaObjectDataset.class, visible = true) @JsonTypeName("EloquaObject") @Fluent public final class EloquaObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "EloquaObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class EloquaObjectDataset extends Dataset { public EloquaObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaSource.java index 4a368f2f4169f..d911ff07ddec8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Eloqua server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = EloquaSource.class, visible = true) @JsonTypeName("EloquaSource") @Fluent public final class EloquaSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "EloquaSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class EloquaSource extends TabularSource { public EloquaSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EncryptionConfiguration.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EncryptionConfiguration.java index 6eedcc9e596bc..783e9f9dc42ab 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EncryptionConfiguration.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EncryptionConfiguration.java @@ -32,8 +32,7 @@ public final class EncryptionConfiguration { private String keyVersion; /* - * User assigned identity to use to authenticate to customer's key vault. If not provided Managed Service Identity - * will be used. + * User assigned identity to use to authenticate to customer's key vault. If not provided Managed Service Identity will be used. */ @JsonProperty(value = "identity") private CmkIdentityDefinition identity; @@ -105,8 +104,8 @@ public EncryptionConfiguration withKeyVersion(String keyVersion) { } /** - * Get the identity property: User assigned identity to use to authenticate to customer's key vault. If not - * provided Managed Service Identity will be used. + * Get the identity property: User assigned identity to use to authenticate to customer's key vault. If not provided + * Managed Service Identity will be used. * * @return the identity value. */ @@ -115,8 +114,8 @@ public CmkIdentityDefinition identity() { } /** - * Set the identity property: User assigned identity to use to authenticate to customer's key vault. If not - * provided Managed Service Identity will be used. + * Set the identity property: User assigned identity to use to authenticate to customer's key vault. If not provided + * Managed Service Identity will be used. * * @param identity the identity value to set. * @return the EncryptionConfiguration object itself. @@ -133,12 +132,14 @@ public EncryptionConfiguration withIdentity(CmkIdentityDefinition identity) { */ public void validate() { if (keyName() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property keyName in model EncryptionConfiguration")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property keyName in model EncryptionConfiguration")); } if (vaultBaseUrl() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property vaultBaseUrl in model EncryptionConfiguration")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property vaultBaseUrl in model EncryptionConfiguration")); } if (identity() != null) { identity().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EnvironmentVariableSetup.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EnvironmentVariableSetup.java index 9fb475e6a1df5..c061df854c4f3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EnvironmentVariableSetup.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EnvironmentVariableSetup.java @@ -8,16 +8,28 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.EnvironmentVariableSetupTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The custom setup of setting environment variable. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = EnvironmentVariableSetup.class, + visible = true) @JsonTypeName("EnvironmentVariableSetup") @Fluent public final class EnvironmentVariableSetup extends CustomSetupBase { + /* + * The type of custom setup. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "EnvironmentVariableSetup"; + /* * Add environment variable type properties. */ @@ -30,6 +42,16 @@ public final class EnvironmentVariableSetup extends CustomSetupBase { public EnvironmentVariableSetup() { } + /** + * Get the type property: The type of custom setup. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Add environment variable type properties. * @@ -94,8 +116,9 @@ public EnvironmentVariableSetup withVariableValue(String variableValue) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model EnvironmentVariableSetup")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model EnvironmentVariableSetup")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelDataset.java index f2992d63bd1ae..ee5eb4a35f983 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.ExcelDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Excel dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ExcelDataset.class, visible = true) @JsonTypeName("Excel") @Fluent public final class ExcelDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Excel"; + /* * Excel dataset properties. */ @@ -31,6 +39,16 @@ public final class ExcelDataset extends Dataset { public ExcelDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Excel dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelSource.java index beefc71b5a2fe..f810ac63ca9be 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity excel source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ExcelSource.class, visible = true) @JsonTypeName("ExcelSource") @Fluent public final class ExcelSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ExcelSource"; + /* * Excel store settings. */ @@ -23,8 +31,7 @@ public final class ExcelSource extends CopySource { private StoreReadSettings storeSettings; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -35,6 +42,16 @@ public final class ExcelSource extends CopySource { public ExcelSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: Excel store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivity.java index c05d673fc01d3..8fcf24b204e94 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ExecuteDataFlowActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Execute data flow activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = ExecuteDataFlowActivity.class, + visible = true) @JsonTypeName("ExecuteDataFlow") @Fluent public final class ExecuteDataFlowActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ExecuteDataFlow"; + /* * Execute data flow activity properties. */ @@ -31,6 +43,16 @@ public final class ExecuteDataFlowActivity extends ExecutionActivity { public ExecuteDataFlowActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Execute data flow activity properties. * @@ -255,8 +277,8 @@ public ExecuteDataFlowActivity withContinueOnError(Object continueOnError) { } /** - * Get the runConcurrently property: Concurrent run setting used for data flow execution. Allows sinks with the - * same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). + * Get the runConcurrently property: Concurrent run setting used for data flow execution. Allows sinks with the same + * save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). * * @return the runConcurrently value. */ @@ -265,8 +287,8 @@ public Object runConcurrently() { } /** - * Set the runConcurrently property: Concurrent run setting used for data flow execution. Allows sinks with the - * same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). + * Set the runConcurrently property: Concurrent run setting used for data flow execution. Allows sinks with the same + * save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). * * @param runConcurrently the runConcurrently value to set. * @return the ExecuteDataFlowActivity object itself. @@ -280,8 +302,8 @@ public ExecuteDataFlowActivity withRunConcurrently(Object runConcurrently) { } /** - * Get the sourceStagingConcurrency property: Specify number of parallel staging for sources applicable to the - * sink. Type: integer (or Expression with resultType integer). + * Get the sourceStagingConcurrency property: Specify number of parallel staging for sources applicable to the sink. + * Type: integer (or Expression with resultType integer). * * @return the sourceStagingConcurrency value. */ @@ -290,8 +312,8 @@ public Object sourceStagingConcurrency() { } /** - * Set the sourceStagingConcurrency property: Specify number of parallel staging for sources applicable to the - * sink. Type: integer (or Expression with resultType integer). + * Set the sourceStagingConcurrency property: Specify number of parallel staging for sources applicable to the sink. + * Type: integer (or Expression with resultType integer). * * @param sourceStagingConcurrency the sourceStagingConcurrency value to set. * @return the ExecuteDataFlowActivity object itself. @@ -313,8 +335,9 @@ public ExecuteDataFlowActivity withSourceStagingConcurrency(Object sourceStaging public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ExecuteDataFlowActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ExecuteDataFlowActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivityTypePropertiesCompute.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivityTypePropertiesCompute.java index 2a72b40c93fc6..e16002ac00889 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivityTypePropertiesCompute.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivityTypePropertiesCompute.java @@ -13,15 +13,13 @@ @Fluent public final class ExecuteDataFlowActivityTypePropertiesCompute { /* - * Compute type of the cluster which will execute data flow job. Possible values include: 'General', - * 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression with resultType string) + * Compute type of the cluster which will execute data flow job. Possible values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression with resultType string) */ @JsonProperty(value = "computeType") private Object computeType; /* - * Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and - * 272. Type: integer (or Expression with resultType integer) + * Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer) */ @JsonProperty(value = "coreCount") private Object coreCount; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutePipelineActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutePipelineActivity.java index 5becefd302432..4d93abec05343 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutePipelineActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutePipelineActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ExecutePipelineActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Execute pipeline activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = ExecutePipelineActivity.class, + visible = true) @JsonTypeName("ExecutePipeline") @Fluent public final class ExecutePipelineActivity extends ControlActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ExecutePipeline"; + /* * Execute pipeline activity policy. */ @@ -38,6 +50,16 @@ public final class ExecutePipelineActivity extends ControlActivity { public ExecutePipelineActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the policy property: Execute pipeline activity policy. * @@ -204,8 +226,9 @@ public void validate() { policy().validate(); } if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ExecutePipelineActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ExecutePipelineActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteSsisPackageActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteSsisPackageActivity.java index 1535cb0e2055b..0136489a22175 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteSsisPackageActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteSsisPackageActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ExecuteSsisPackageActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Execute SSIS package activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = ExecuteSsisPackageActivity.class, + visible = true) @JsonTypeName("ExecuteSSISPackage") @Fluent public final class ExecuteSsisPackageActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ExecuteSSISPackage"; + /* * Execute SSIS package activity properties. */ @@ -33,6 +45,16 @@ public final class ExecuteSsisPackageActivity extends ExecutionActivity { public ExecuteSsisPackageActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Execute SSIS package activity properties. * @@ -407,8 +429,9 @@ public ExecuteSsisPackageActivity withLogLocation(SsisLogLocation logLocation) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ExecuteSsisPackageActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ExecuteSsisPackageActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteWranglingDataflowActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteWranglingDataflowActivity.java index 24446e757bd9a..6baf85db56851 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteWranglingDataflowActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteWranglingDataflowActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ExecutePowerQueryActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Execute power query activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = ExecuteWranglingDataflowActivity.class, + visible = true) @JsonTypeName("ExecuteWranglingDataflow") @Fluent public final class ExecuteWranglingDataflowActivity extends Activity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ExecuteWranglingDataflow"; + /* * Execute power query activity properties. */ @@ -38,6 +50,16 @@ public final class ExecuteWranglingDataflowActivity extends Activity { public ExecuteWranglingDataflowActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Execute power query activity properties. * @@ -312,8 +334,8 @@ public ExecuteWranglingDataflowActivity withContinueOnError(Object continueOnErr } /** - * Get the runConcurrently property: Concurrent run setting used for data flow execution. Allows sinks with the - * same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). + * Get the runConcurrently property: Concurrent run setting used for data flow execution. Allows sinks with the same + * save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). * * @return the runConcurrently value. */ @@ -322,8 +344,8 @@ public Object runConcurrently() { } /** - * Set the runConcurrently property: Concurrent run setting used for data flow execution. Allows sinks with the - * same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). + * Set the runConcurrently property: Concurrent run setting used for data flow execution. Allows sinks with the same + * save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). * * @param runConcurrently the runConcurrently value to set. * @return the ExecuteWranglingDataflowActivity object itself. @@ -337,8 +359,8 @@ public ExecuteWranglingDataflowActivity withRunConcurrently(Object runConcurrent } /** - * Get the sourceStagingConcurrency property: Specify number of parallel staging for sources applicable to the - * sink. Type: integer (or Expression with resultType integer). + * Get the sourceStagingConcurrency property: Specify number of parallel staging for sources applicable to the sink. + * Type: integer (or Expression with resultType integer). * * @return the sourceStagingConcurrency value. */ @@ -347,8 +369,8 @@ public Object sourceStagingConcurrency() { } /** - * Set the sourceStagingConcurrency property: Specify number of parallel staging for sources applicable to the - * sink. Type: integer (or Expression with resultType integer). + * Set the sourceStagingConcurrency property: Specify number of parallel staging for sources applicable to the sink. + * Type: integer (or Expression with resultType integer). * * @param sourceStagingConcurrency the sourceStagingConcurrency value to set. * @return the ExecuteWranglingDataflowActivity object itself. @@ -370,8 +392,9 @@ public ExecuteWranglingDataflowActivity withSourceStagingConcurrency(Object sour public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ExecuteWranglingDataflowActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ExecuteWranglingDataflowActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutionActivity.java index 0398a6a76bb88..82e839e1947f9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutionActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutionActivity.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -14,11 +15,7 @@ /** * Base class for all execution activities. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = ExecutionActivity.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ExecutionActivity.class, visible = true) @JsonTypeName("Execution") @JsonSubTypes({ @JsonSubTypes.Type(name = "Copy", value = CopyActivity.class), @@ -49,6 +46,13 @@ @JsonSubTypes.Type(name = "SparkJob", value = SynapseSparkJobDefinitionActivity.class) }) @Fluent public class ExecutionActivity extends Activity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Execution"; + /* * Linked service reference. */ @@ -67,6 +71,16 @@ public class ExecutionActivity extends Activity { public ExecutionActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the linkedServiceName property: Linked service reference. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExportSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExportSettings.java index 8f1a61f3dfc92..3e0abc822f319 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExportSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExportSettings.java @@ -8,7 +8,9 @@ import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -17,11 +19,7 @@ /** * Export command settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = ExportSettings.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ExportSettings.class, visible = true) @JsonTypeName("ExportSettings") @JsonSubTypes({ @JsonSubTypes.Type(name = "SnowflakeExportCopyCommand", value = SnowflakeExportCopyCommand.class), @@ -30,6 +28,13 @@ value = AzureDatabricksDeltaLakeExportCommand.class) }) @Fluent public class ExportSettings { + /* + * The export setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Export command settings. */ @@ -40,6 +45,16 @@ public class ExportSettings { * Creates an instance of ExportSettings class. */ public ExportSettings() { + this.type = "ExportSettings"; + } + + /** + * Get the type property: The export setting type. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExposureControlBatchRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExposureControlBatchRequest.java index 7f240b0614ea0..8e3ebedced8b1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExposureControlBatchRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExposureControlBatchRequest.java @@ -54,8 +54,9 @@ public List exposureControlRequests() { */ public void validate() { if (exposureControlRequests() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property exposureControlRequests in model ExposureControlBatchRequest")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property exposureControlRequests in model ExposureControlBatchRequest")); } else { exposureControlRequests().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Expression.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Expression.java index 6a55f18014d02..47c0c2f6ed726 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Expression.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Expression.java @@ -78,8 +78,8 @@ public Expression withValue(String value) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model Expression")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property value in model Expression")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryGitHubConfiguration.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryGitHubConfiguration.java index cc8a732e18a2e..bcf60bf662149 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryGitHubConfiguration.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryGitHubConfiguration.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Factory's GitHub repo information. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = FactoryGitHubConfiguration.class, + visible = true) @JsonTypeName("FactoryGitHubConfiguration") @Fluent public final class FactoryGitHubConfiguration extends FactoryRepoConfiguration { + /* + * Type of repo configuration. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "FactoryGitHubConfiguration"; + /* * GitHub Enterprise host name. For example: `https://github.mydomain.com` */ @@ -40,6 +52,16 @@ public final class FactoryGitHubConfiguration extends FactoryRepoConfiguration { public FactoryGitHubConfiguration() { } + /** + * Get the type property: Type of repo configuration. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the hostname property: GitHub Enterprise host name. For example: `https://github.mydomain.com`. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryIdentity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryIdentity.java index 6bceb6c1d4d35..669dea6c1f331 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryIdentity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryIdentity.java @@ -112,8 +112,8 @@ public FactoryIdentity withUserAssignedIdentities(Map userAssign */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model FactoryIdentity")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property type in model FactoryIdentity")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryListResponse.java index e2a11533954fe..ef3f83299f6de 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryListResponse.java @@ -80,8 +80,8 @@ public FactoryListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model FactoryListResponse")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property value in model FactoryListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryRepoConfiguration.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryRepoConfiguration.java index db5581946d27d..3bdcc7a5136b3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryRepoConfiguration.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryRepoConfiguration.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; @@ -16,15 +17,22 @@ */ @JsonTypeInfo( use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, property = "type", - defaultImpl = FactoryRepoConfiguration.class) + defaultImpl = FactoryRepoConfiguration.class, + visible = true) @JsonTypeName("FactoryRepoConfiguration") @JsonSubTypes({ @JsonSubTypes.Type(name = "FactoryVSTSConfiguration", value = FactoryVstsConfiguration.class), @JsonSubTypes.Type(name = "FactoryGitHubConfiguration", value = FactoryGitHubConfiguration.class) }) @Fluent public class FactoryRepoConfiguration { + /* + * Type of repo configuration. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Account name. */ @@ -65,6 +73,16 @@ public class FactoryRepoConfiguration { * Creates an instance of FactoryRepoConfiguration class. */ public FactoryRepoConfiguration() { + this.type = "FactoryRepoConfiguration"; + } + + /** + * Get the type property: Type of repo configuration. + * + * @return the type value. + */ + public String type() { + return this.type; } /** @@ -194,20 +212,24 @@ public FactoryRepoConfiguration withDisablePublish(Boolean disablePublish) { */ public void validate() { if (accountName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property accountName in model FactoryRepoConfiguration")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property accountName in model FactoryRepoConfiguration")); } if (repositoryName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property repositoryName in model FactoryRepoConfiguration")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property repositoryName in model FactoryRepoConfiguration")); } if (collaborationBranch() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property collaborationBranch in model FactoryRepoConfiguration")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property collaborationBranch in model FactoryRepoConfiguration")); } if (rootFolder() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property rootFolder in model FactoryRepoConfiguration")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property rootFolder in model FactoryRepoConfiguration")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryVstsConfiguration.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryVstsConfiguration.java index 9ca1faf68ee01..44148236a548a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryVstsConfiguration.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryVstsConfiguration.java @@ -7,16 +7,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Factory's VSTS repo information. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = FactoryVstsConfiguration.class, + visible = true) @JsonTypeName("FactoryVSTSConfiguration") @Fluent public final class FactoryVstsConfiguration extends FactoryRepoConfiguration { + /* + * Type of repo configuration. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "FactoryVSTSConfiguration"; + /* * VSTS project name. */ @@ -35,6 +47,16 @@ public final class FactoryVstsConfiguration extends FactoryRepoConfiguration { public FactoryVstsConfiguration() { } + /** + * Get the type property: Type of repo configuration. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the projectName property: VSTS project name. * @@ -138,8 +160,9 @@ public FactoryVstsConfiguration withDisablePublish(Boolean disablePublish) { public void validate() { super.validate(); if (projectName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property projectName in model FactoryVstsConfiguration")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property projectName in model FactoryVstsConfiguration")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FailActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FailActivity.java index debf8031fe32c..22bfd038a0c60 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FailActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FailActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.FailActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,13 +16,20 @@ /** * This activity will fail within its own scope and output a custom error message and error code. The error message and * code can provided either as a string literal or as an expression that can be evaluated to a string at runtime. The - * activity scope can be the whole pipeline or a control activity (e.g. foreach, switch, until), if the fail activity - * is contained in it. + * activity scope can be the whole pipeline or a control activity (e.g. foreach, switch, until), if the fail activity is + * contained in it. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FailActivity.class, visible = true) @JsonTypeName("Fail") @Fluent public final class FailActivity extends ControlActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Fail"; + /* * Fail activity properties. */ @@ -34,6 +42,16 @@ public final class FailActivity extends ControlActivity { public FailActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Fail activity properties. * @@ -158,8 +176,9 @@ public FailActivity withErrorCode(Object errorCode) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model FailActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model FailActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLinkedService.java index 6c31c163163d9..51547fb2944ac 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.FileServerLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * File system linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = FileServerLinkedService.class, + visible = true) @JsonTypeName("FileServer") @Fluent public final class FileServerLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "FileServer"; + /* * File system linked service properties. */ @@ -32,6 +44,16 @@ public final class FileServerLinkedService extends LinkedService { public FileServerLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: File system linked service properties. * @@ -147,8 +169,8 @@ public FileServerLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -157,8 +179,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the FileServerLinkedService object itself. @@ -180,8 +202,9 @@ public FileServerLinkedService withEncryptedCredential(String encryptedCredentia public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model FileServerLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model FileServerLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLocation.java index ab01680a52919..ae9953722d076 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLocation.java @@ -5,22 +5,41 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of file server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FileServerLocation.class, visible = true) @JsonTypeName("FileServerLocation") @Fluent public final class FileServerLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "FileServerLocation"; + /** * Creates an instance of FileServerLocation class. */ public FileServerLocation() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerReadSettings.java index 6fa7003a55512..12459f8326edb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerReadSettings.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * File server read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FileServerReadSettings.class, visible = true) @JsonTypeName("FileServerReadSettings") @Fluent public final class FileServerReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "FileServerReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -36,8 +43,7 @@ public final class FileServerReadSettings extends StoreReadSettings { private Object wildcardFileName; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; @@ -49,15 +55,13 @@ public final class FileServerReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; @@ -75,8 +79,7 @@ public final class FileServerReadSettings extends StoreReadSettings { private Object modifiedDatetimeEnd; /* - * Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string - * (or Expression with resultType string). + * Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileFilter") private Object fileFilter; @@ -88,8 +91,18 @@ public FileServerReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -98,8 +111,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the FileServerReadSettings object itself. @@ -154,8 +167,8 @@ public FileServerReadSettings withWildcardFileName(Object wildcardFileName) { } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -164,8 +177,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the FileServerReadSettings object itself. @@ -198,8 +211,8 @@ public FileServerReadSettings withEnablePartitionDiscovery(Object enablePartitio } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -208,8 +221,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the FileServerReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerWriteSettings.java index fba9d9071a0c4..db64c1e4a0c28 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerWriteSettings.java @@ -5,6 +5,8 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -12,16 +14,37 @@ /** * File server write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = FileServerWriteSettings.class, + visible = true) @JsonTypeName("FileServerWriteSettings") @Fluent public final class FileServerWriteSettings extends StoreWriteSettings { + /* + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "FileServerWriteSettings"; + /** * Creates an instance of FileServerWriteSettings class. */ public FileServerWriteSettings() { } + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileShareDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileShareDataset.java index 4dafbda7437d7..494a4429a9f71 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileShareDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileShareDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.FileShareDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * An on-premises file system dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FileShareDataset.class, visible = true) @JsonTypeName("FileShare") @Fluent public final class FileShareDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "FileShare"; + /* * On-premises file system dataset properties. */ @@ -31,6 +39,16 @@ public final class FileShareDataset extends Dataset { public FileShareDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: On-premises file system dataset properties. * @@ -104,8 +122,8 @@ public FileShareDataset withFolder(DatasetFolder folder) { } /** - * Get the folderPath property: The path of the on-premises file system. Type: string (or Expression with - * resultType string). + * Get the folderPath property: The path of the on-premises file system. Type: string (or Expression with resultType + * string). * * @return the folderPath value. */ @@ -114,8 +132,8 @@ public Object folderPath() { } /** - * Set the folderPath property: The path of the on-premises file system. Type: string (or Expression with - * resultType string). + * Set the folderPath property: The path of the on-premises file system. Type: string (or Expression with resultType + * string). * * @param folderPath the folderPath value to set. * @return the FileShareDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSink.java index e6c8f094d47ee..e6ad3cb20dc6f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity file system sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FileSystemSink.class, visible = true) @JsonTypeName("FileSystemSink") @Fluent public final class FileSystemSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "FileSystemSink"; + /* * The type of copy behavior for copy sink. */ @@ -28,6 +36,16 @@ public final class FileSystemSink extends CopySink { public FileSystemSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the copyBehavior property: The type of copy behavior for copy sink. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSource.java index 37c6ec59069b8..bbd4a481b67ab 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSource.java @@ -6,26 +6,32 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity file system source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FileSystemSource.class, visible = true) @JsonTypeName("FileSystemSource") @Fluent public final class FileSystemSource extends CopySource { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "FileSystemSource"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -37,8 +43,18 @@ public FileSystemSource() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -47,8 +63,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the FileSystemSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FilterActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FilterActivity.java index db9a2f5cd1855..837601e810da5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FilterActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FilterActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.FilterActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Filter and return results from input array based on the conditions. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FilterActivity.class, visible = true) @JsonTypeName("Filter") @Fluent public final class FilterActivity extends ControlActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Filter"; + /* * Filter activity properties. */ @@ -31,6 +39,16 @@ public final class FilterActivity extends ControlActivity { public FilterActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Filter activity properties. * @@ -149,8 +167,9 @@ public FilterActivity withCondition(Expression condition) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model FilterActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model FilterActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Flowlet.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Flowlet.java index a055fa5c66503..cafa494131df0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Flowlet.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Flowlet.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.FlowletTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -14,10 +15,17 @@ /** * Data flow flowlet. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Flowlet.class, visible = true) @JsonTypeName("Flowlet") @Fluent public final class Flowlet extends DataFlow { + /* + * Type of data flow. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Flowlet"; + /* * Flowlet type properties. */ @@ -30,6 +38,16 @@ public final class Flowlet extends DataFlow { public Flowlet() { } + /** + * Get the type property: Type of data flow. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Flowlet type properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ForEachActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ForEachActivity.java index 9aa9f3c13e4ab..30242c36b49b4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ForEachActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ForEachActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ForEachActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * This activity is used for iterating over a collection and execute given activities. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ForEachActivity.class, visible = true) @JsonTypeName("ForEach") @Fluent public final class ForEachActivity extends ControlActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ForEach"; + /* * ForEach activity properties. */ @@ -31,6 +39,16 @@ public final class ForEachActivity extends ControlActivity { public ForEachActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: ForEach activity properties. * @@ -197,8 +215,9 @@ public ForEachActivity withActivities(List activities) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model ForEachActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ForEachActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatReadSettings.java index 0ca218510f949..1bbfecd157c77 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatReadSettings.java @@ -8,7 +8,9 @@ import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -17,11 +19,7 @@ /** * Format read settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = FormatReadSettings.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FormatReadSettings.class, visible = true) @JsonTypeName("FormatReadSettings") @JsonSubTypes({ @JsonSubTypes.Type(name = "ParquetReadSettings", value = ParquetReadSettings.class), @@ -31,6 +29,13 @@ @JsonSubTypes.Type(name = "BinaryReadSettings", value = BinaryReadSettings.class) }) @Fluent public class FormatReadSettings { + /* + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Format read settings. */ @@ -41,6 +46,16 @@ public class FormatReadSettings { * Creates an instance of FormatReadSettings class. */ public FormatReadSettings() { + this.type = "FormatReadSettings"; + } + + /** + * Get the type property: The read setting type. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatWriteSettings.java index 7b7cb57561a7c..a5c6354a4e734 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatWriteSettings.java @@ -8,7 +8,9 @@ import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -17,11 +19,7 @@ /** * Format write settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = FormatWriteSettings.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FormatWriteSettings.class, visible = true) @JsonTypeName("FormatWriteSettings") @JsonSubTypes({ @JsonSubTypes.Type(name = "AvroWriteSettings", value = AvroWriteSettings.class), @@ -31,6 +29,13 @@ @JsonSubTypes.Type(name = "JsonWriteSettings", value = JsonWriteSettings.class) }) @Fluent public class FormatWriteSettings { + /* + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Format write settings. */ @@ -41,6 +46,16 @@ public class FormatWriteSettings { * Creates an instance of FormatWriteSettings class. */ public FormatWriteSettings() { + this.type = "FormatWriteSettings"; + } + + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpReadSettings.java index d4b9d14a52b3f..1db514ee6f72c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpReadSettings.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Ftp read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FtpReadSettings.class, visible = true) @JsonTypeName("FtpReadSettings") @Fluent public final class FtpReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "FtpReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -42,36 +49,31 @@ public final class FtpReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; /* - * Specify whether to use binary transfer mode for FTP stores. Type: boolean (or Expression with resultType - * boolean). + * Specify whether to use binary transfer mode for FTP stores. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "useBinaryTransfer") private Object useBinaryTransfer; /* - * If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with - * resultType boolean). + * If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "disableChunking") private Object disableChunking; @@ -83,8 +85,18 @@ public FtpReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -93,8 +105,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the FtpReadSettings object itself. @@ -105,8 +117,7 @@ public FtpReadSettings withRecursive(Object recursive) { } /** - * Get the wildcardFolderPath property: Ftp wildcardFolderPath. Type: string (or Expression with resultType - * string). + * Get the wildcardFolderPath property: Ftp wildcardFolderPath. Type: string (or Expression with resultType string). * * @return the wildcardFolderPath value. */ @@ -115,8 +126,7 @@ public Object wildcardFolderPath() { } /** - * Set the wildcardFolderPath property: Ftp wildcardFolderPath. Type: string (or Expression with resultType - * string). + * Set the wildcardFolderPath property: Ftp wildcardFolderPath. Type: string (or Expression with resultType string). * * @param wildcardFolderPath the wildcardFolderPath value to set. * @return the FtpReadSettings object itself. @@ -169,8 +179,8 @@ public FtpReadSettings withEnablePartitionDiscovery(Object enablePartitionDiscov } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -179,8 +189,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the FtpReadSettings object itself. @@ -213,8 +223,8 @@ public FtpReadSettings withDeleteFilesAfterCompletion(Object deleteFilesAfterCom } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -223,8 +233,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the FtpReadSettings object itself. @@ -235,8 +245,8 @@ public FtpReadSettings withFileListPath(Object fileListPath) { } /** - * Get the useBinaryTransfer property: Specify whether to use binary transfer mode for FTP stores. Type: boolean - * (or Expression with resultType boolean). + * Get the useBinaryTransfer property: Specify whether to use binary transfer mode for FTP stores. Type: boolean (or + * Expression with resultType boolean). * * @return the useBinaryTransfer value. */ @@ -245,8 +255,8 @@ public Object useBinaryTransfer() { } /** - * Set the useBinaryTransfer property: Specify whether to use binary transfer mode for FTP stores. Type: boolean - * (or Expression with resultType boolean). + * Set the useBinaryTransfer property: Specify whether to use binary transfer mode for FTP stores. Type: boolean (or + * Expression with resultType boolean). * * @param useBinaryTransfer the useBinaryTransfer value to set. * @return the FtpReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLinkedService.java index f15395596cf31..9f7d3dc3fe853 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.FtpServerLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * A FTP server Linked Service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FtpServerLinkedService.class, visible = true) @JsonTypeName("FtpServer") @Fluent public final class FtpServerLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "FtpServer"; + /* * Properties specific to this linked service type. */ @@ -32,6 +40,16 @@ public final class FtpServerLinkedService extends LinkedService { public FtpServerLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this linked service type. * @@ -149,8 +167,7 @@ public FtpServerLinkedService withAuthenticationType(FtpAuthenticationType authe } /** - * Get the username property: Username to logon the FTP server. Type: string (or Expression with resultType - * string). + * Get the username property: Username to logon the FTP server. Type: string (or Expression with resultType string). * * @return the username value. */ @@ -159,8 +176,7 @@ public Object username() { } /** - * Set the username property: Username to logon the FTP server. Type: string (or Expression with resultType - * string). + * Set the username property: Username to logon the FTP server. Type: string (or Expression with resultType string). * * @param username the username value to set. * @return the FtpServerLinkedService object itself. @@ -197,8 +213,8 @@ public FtpServerLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -207,8 +223,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the FtpServerLinkedService object itself. @@ -222,8 +238,8 @@ public FtpServerLinkedService withEncryptedCredential(String encryptedCredential } /** - * Get the enableSsl property: If true, connect to the FTP server over SSL/TLS channel. Default value is true. - * Type: boolean (or Expression with resultType boolean). + * Get the enableSsl property: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: + * boolean (or Expression with resultType boolean). * * @return the enableSsl value. */ @@ -232,8 +248,8 @@ public Object enableSsl() { } /** - * Set the enableSsl property: If true, connect to the FTP server over SSL/TLS channel. Default value is true. - * Type: boolean (or Expression with resultType boolean). + * Set the enableSsl property: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: + * boolean (or Expression with resultType boolean). * * @param enableSsl the enableSsl value to set. * @return the FtpServerLinkedService object itself. @@ -247,19 +263,20 @@ public FtpServerLinkedService withEnableSsl(Object enableSsl) { } /** - * Get the enableServerCertificateValidation property: If true, validate the FTP server SSL certificate when - * connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). + * Get the enableServerCertificateValidation property: If true, validate the FTP server SSL certificate when connect + * over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). * * @return the enableServerCertificateValidation value. */ public Object enableServerCertificateValidation() { - return this.innerTypeProperties() == null ? null + return this.innerTypeProperties() == null + ? null : this.innerTypeProperties().enableServerCertificateValidation(); } /** - * Set the enableServerCertificateValidation property: If true, validate the FTP server SSL certificate when - * connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). + * Set the enableServerCertificateValidation property: If true, validate the FTP server SSL certificate when connect + * over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). * * @param enableServerCertificateValidation the enableServerCertificateValidation value to set. * @return the FtpServerLinkedService object itself. @@ -281,8 +298,9 @@ public FtpServerLinkedService withEnableServerCertificateValidation(Object enabl public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model FtpServerLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model FtpServerLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLocation.java index 589cc1c979d2c..49d946e697b56 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLocation.java @@ -5,22 +5,41 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of ftp server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FtpServerLocation.class, visible = true) @JsonTypeName("FtpServerLocation") @Fluent public final class FtpServerLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "FtpServerLocation"; + /** * Creates an instance of FtpServerLocation class. */ public FtpServerLocation() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetMetadataActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetMetadataActivity.java index 7022dbd67a5d9..9eb686446fbd8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetMetadataActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetMetadataActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.GetMetadataActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Activity to get metadata of dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GetMetadataActivity.class, visible = true) @JsonTypeName("GetMetadata") @Fluent public final class GetMetadataActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GetMetadata"; + /* * GetMetadata activity properties. */ @@ -31,6 +39,16 @@ public final class GetMetadataActivity extends ExecutionActivity { public GetMetadataActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: GetMetadata activity properties. * @@ -213,8 +231,9 @@ public GetMetadataActivity withFormatSettings(FormatReadSettings formatSettings) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model GetMetadataActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model GetMetadataActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GitHubAccessTokenRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GitHubAccessTokenRequest.java index 66acdc1ff28ce..61ce7d9c9ddc2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GitHubAccessTokenRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GitHubAccessTokenRequest.java @@ -130,15 +130,17 @@ public GitHubAccessTokenRequest withGitHubAccessTokenBaseUrl(String gitHubAccess */ public void validate() { if (gitHubAccessCode() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property gitHubAccessCode in model GitHubAccessTokenRequest")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property gitHubAccessCode in model GitHubAccessTokenRequest")); } if (gitHubClientSecret() != null) { gitHubClientSecret().validate(); } if (gitHubAccessTokenBaseUrl() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property gitHubAccessTokenBaseUrl in model GitHubAccessTokenRequest")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property gitHubAccessTokenBaseUrl in model GitHubAccessTokenRequest")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterListResponse.java index bf3c48ce0bfc7..76ed6216cfbc1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterListResponse.java @@ -80,8 +80,9 @@ public GlobalParameterListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model GlobalParameterListResponse")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property value in model GlobalParameterListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterSpecification.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterSpecification.java index 224d91540a1b0..9af2187fa49ce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterSpecification.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterSpecification.java @@ -78,12 +78,14 @@ public GlobalParameterSpecification withValue(Object value) { */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model GlobalParameterSpecification")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property type in model GlobalParameterSpecification")); } if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model GlobalParameterSpecification")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property value in model GlobalParameterSpecification")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsLinkedService.java index 9304f17c287aa..b3cdb3e774c00 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.GoogleAdWordsLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Google AdWords service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = GoogleAdWordsLinkedService.class, + visible = true) @JsonTypeName("GoogleAdWords") @Fluent public final class GoogleAdWordsLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleAdWords"; + /* * Google AdWords service linked service properties. */ @@ -33,6 +45,16 @@ public final class GoogleAdWordsLinkedService extends LinkedService { public GoogleAdWordsLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Google AdWords service linked service properties. * @@ -278,8 +300,8 @@ public GoogleAdWordsLinkedService withEmail(Object email) { /** * Get the keyFilePath property: (Deprecated) The full path to the .p12 key file that is used to authenticate the - * service account email address and can only be used on self-hosted IR. Type: string (or Expression with - * resultType string). + * service account email address and can only be used on self-hosted IR. Type: string (or Expression with resultType + * string). * * @return the keyFilePath value. */ @@ -289,8 +311,8 @@ public Object keyFilePath() { /** * Set the keyFilePath property: (Deprecated) The full path to the .p12 key file that is used to authenticate the - * service account email address and can only be used on self-hosted IR. Type: string (or Expression with - * resultType string). + * service account email address and can only be used on self-hosted IR. Type: string (or Expression with resultType + * string). * * @param keyFilePath the keyFilePath value to set. * @return the GoogleAdWordsLinkedService object itself. @@ -464,8 +486,8 @@ public GoogleAdWordsLinkedService withSupportLegacyDataTypes(Object supportLegac } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -474,8 +496,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the GoogleAdWordsLinkedService object itself. @@ -497,8 +519,9 @@ public GoogleAdWordsLinkedService withEncryptedCredential(String encryptedCreden public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model GoogleAdWordsLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model GoogleAdWordsLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsObjectDataset.java index e74a3e40f993b..d05bc19136a85 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Google AdWords service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = GoogleAdWordsObjectDataset.class, + visible = true) @JsonTypeName("GoogleAdWordsObject") @Fluent public final class GoogleAdWordsObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleAdWordsObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +43,16 @@ public final class GoogleAdWordsObjectDataset extends Dataset { public GoogleAdWordsObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsSource.java index 54f6b61a4f868..c5af09052511a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Google AdWords service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GoogleAdWordsSource.class, visible = true) @JsonTypeName("GoogleAdWordsSource") @Fluent public final class GoogleAdWordsSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleAdWordsSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class GoogleAdWordsSource extends TabularSource { public GoogleAdWordsSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryLinkedService.java index 6831a7b87be8d..96bf6ecf8d6d5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Google BigQuery service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = GoogleBigQueryLinkedService.class, + visible = true) @JsonTypeName("GoogleBigQuery") @Fluent public final class GoogleBigQueryLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleBigQuery"; + /* * Google BigQuery service linked service properties. */ @@ -33,6 +45,16 @@ public final class GoogleBigQueryLinkedService extends LinkedService { public GoogleBigQueryLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Google BigQuery service linked service properties. * @@ -129,9 +151,9 @@ public GoogleBigQueryLinkedService withAdditionalProjects(Object additionalProje } /** - * Get the requestGoogleDriveScope property: Whether to request access to Google Drive. Allowing Google Drive - * access enables support for federated tables that combine BigQuery data with data from Google Drive. The default - * value is false. Type: string (or Expression with resultType string). + * Get the requestGoogleDriveScope property: Whether to request access to Google Drive. Allowing Google Drive access + * enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is + * false. Type: string (or Expression with resultType string). * * @return the requestGoogleDriveScope value. */ @@ -140,9 +162,9 @@ public Object requestGoogleDriveScope() { } /** - * Set the requestGoogleDriveScope property: Whether to request access to Google Drive. Allowing Google Drive - * access enables support for federated tables that combine BigQuery data with data from Google Drive. The default - * value is false. Type: string (or Expression with resultType string). + * Set the requestGoogleDriveScope property: Whether to request access to Google Drive. Allowing Google Drive access + * enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is + * false. Type: string (or Expression with resultType string). * * @param requestGoogleDriveScope the requestGoogleDriveScope value to set. * @return the GoogleBigQueryLinkedService object itself. @@ -279,9 +301,8 @@ public GoogleBigQueryLinkedService withEmail(Object email) { } /** - * Get the keyFilePath property: The full path to the .p12 key file that is used to authenticate the service - * account email address and can only be used on self-hosted IR. Type: string (or Expression with resultType - * string). + * Get the keyFilePath property: The full path to the .p12 key file that is used to authenticate the service account + * email address and can only be used on self-hosted IR. Type: string (or Expression with resultType string). * * @return the keyFilePath value. */ @@ -290,9 +311,8 @@ public Object keyFilePath() { } /** - * Set the keyFilePath property: The full path to the .p12 key file that is used to authenticate the service - * account email address and can only be used on self-hosted IR. Type: string (or Expression with resultType - * string). + * Set the keyFilePath property: The full path to the .p12 key file that is used to authenticate the service account + * email address and can only be used on self-hosted IR. Type: string (or Expression with resultType string). * * @param keyFilePath the keyFilePath value to set. * @return the GoogleBigQueryLinkedService object itself. @@ -306,10 +326,9 @@ public GoogleBigQueryLinkedService withKeyFilePath(Object keyFilePath) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. Type: string (or Expression with resultType - * string). + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. Type: string (or Expression with resultType string). * * @return the trustedCertPath value. */ @@ -318,10 +337,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. Type: string (or Expression with resultType - * string). + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. Type: string (or Expression with resultType string). * * @param trustedCertPath the trustedCertPath value to set. * @return the GoogleBigQueryLinkedService object itself. @@ -360,8 +378,8 @@ public GoogleBigQueryLinkedService withUseSystemTrustStore(Object useSystemTrust } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -370,8 +388,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the GoogleBigQueryLinkedService object itself. @@ -393,8 +411,9 @@ public GoogleBigQueryLinkedService withEncryptedCredential(String encryptedCrede public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model GoogleBigQueryLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model GoogleBigQueryLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryObjectDataset.java index dd592406acbe9..e5e76b17a3dcc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Google BigQuery service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = GoogleBigQueryObjectDataset.class, + visible = true) @JsonTypeName("GoogleBigQueryObject") @Fluent public final class GoogleBigQueryObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleBigQueryObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +43,16 @@ public final class GoogleBigQueryObjectDataset extends Dataset { public GoogleBigQueryObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQuerySource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQuerySource.java index 6f9d94aaf6a9a..5adec7c3e1d32 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQuerySource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQuerySource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Google BigQuery service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GoogleBigQuerySource.class, visible = true) @JsonTypeName("GoogleBigQuerySource") @Fluent public final class GoogleBigQuerySource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleBigQuerySource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class GoogleBigQuerySource extends TabularSource { public GoogleBigQuerySource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2LinkedService.java index 57ca16792de8d..57e2335264622 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2LinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryV2LinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Google BigQuery service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = GoogleBigQueryV2LinkedService.class, + visible = true) @JsonTypeName("GoogleBigQueryV2") @Fluent public final class GoogleBigQueryV2LinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleBigQueryV2"; + /* * Google BigQuery service linked service properties. */ @@ -33,6 +45,16 @@ public final class GoogleBigQueryV2LinkedService extends LinkedService { public GoogleBigQueryV2LinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Google BigQuery service linked service properties. * @@ -225,8 +247,8 @@ public GoogleBigQueryV2LinkedService withKeyFileContent(SecretBase keyFileConten } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -235,8 +257,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the GoogleBigQueryV2LinkedService object itself. @@ -258,8 +280,9 @@ public GoogleBigQueryV2LinkedService withEncryptedCredential(String encryptedCre public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model GoogleBigQueryV2LinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model GoogleBigQueryV2LinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2ObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2ObjectDataset.java index e5cff01f36924..a330f1fe846c8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2ObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2ObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryV2DatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Google BigQuery service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = GoogleBigQueryV2ObjectDataset.class, + visible = true) @JsonTypeName("GoogleBigQueryV2Object") @Fluent public final class GoogleBigQueryV2ObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleBigQueryV2Object"; + /* * Properties specific to this dataset type. */ @@ -31,6 +43,16 @@ public final class GoogleBigQueryV2ObjectDataset extends Dataset { public GoogleBigQueryV2ObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2Source.java index 404a8aacbe6e6..8e211f9afb140 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2Source.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Google BigQuery service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GoogleBigQueryV2Source.class, visible = true) @JsonTypeName("GoogleBigQueryV2Source") @Fluent public final class GoogleBigQueryV2Source extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleBigQueryV2Source"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class GoogleBigQueryV2Source extends TabularSource { public GoogleBigQueryV2Source() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLinkedService.java index 65b7c73db8dcf..e8c13791f584b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.GoogleCloudStorageLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for Google Cloud Storage. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = GoogleCloudStorageLinkedService.class, + visible = true) @JsonTypeName("GoogleCloudStorage") @Fluent public final class GoogleCloudStorageLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleCloudStorage"; + /* * Google Cloud Storage linked service properties. */ @@ -33,6 +45,16 @@ public final class GoogleCloudStorageLinkedService extends LinkedService { public GoogleCloudStorageLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Google Cloud Storage linked service properties. * @@ -129,9 +151,9 @@ public GoogleCloudStorageLinkedService withSecretAccessKey(SecretBase secretAcce } /** - * Get the serviceUrl property: This value specifies the endpoint to access with the Google Cloud Storage - * Connector. This is an optional property; change it only if you want to try a different service endpoint or want - * to switch between https and http. Type: string (or Expression with resultType string). + * Get the serviceUrl property: This value specifies the endpoint to access with the Google Cloud Storage Connector. + * This is an optional property; change it only if you want to try a different service endpoint or want to switch + * between https and http. Type: string (or Expression with resultType string). * * @return the serviceUrl value. */ @@ -140,9 +162,9 @@ public Object serviceUrl() { } /** - * Set the serviceUrl property: This value specifies the endpoint to access with the Google Cloud Storage - * Connector. This is an optional property; change it only if you want to try a different service endpoint or want - * to switch between https and http. Type: string (or Expression with resultType string). + * Set the serviceUrl property: This value specifies the endpoint to access with the Google Cloud Storage Connector. + * This is an optional property; change it only if you want to try a different service endpoint or want to switch + * between https and http. Type: string (or Expression with resultType string). * * @param serviceUrl the serviceUrl value to set. * @return the GoogleCloudStorageLinkedService object itself. @@ -156,8 +178,8 @@ public GoogleCloudStorageLinkedService withServiceUrl(Object serviceUrl) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -166,8 +188,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the GoogleCloudStorageLinkedService object itself. @@ -189,8 +211,9 @@ public GoogleCloudStorageLinkedService withEncryptedCredential(String encryptedC public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model GoogleCloudStorageLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model GoogleCloudStorageLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLocation.java index 040946ccb4b3e..3be7b45ae0263 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLocation.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of Google Cloud Storage dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = GoogleCloudStorageLocation.class, + visible = true) @JsonTypeName("GoogleCloudStorageLocation") @Fluent public final class GoogleCloudStorageLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleCloudStorageLocation"; + /* * Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string) */ @@ -34,6 +46,16 @@ public final class GoogleCloudStorageLocation extends DatasetLocation { public GoogleCloudStorageLocation() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the bucketName property: Specify the bucketName of Google Cloud Storage. Type: string (or Expression with * resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageReadSettings.java index 46f922d6665f2..9f41b62c6fdf7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageReadSettings.java @@ -6,19 +6,30 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Google Cloud Storage read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = GoogleCloudStorageReadSettings.class, + visible = true) @JsonTypeName("GoogleCloudStorageReadSettings") @Fluent public final class GoogleCloudStorageReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleCloudStorageReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -42,8 +53,7 @@ public final class GoogleCloudStorageReadSettings extends StoreReadSettings { private Object prefix; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; @@ -55,15 +65,13 @@ public final class GoogleCloudStorageReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; @@ -87,8 +95,18 @@ public GoogleCloudStorageReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -97,8 +115,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the GoogleCloudStorageReadSettings object itself. @@ -175,8 +193,8 @@ public GoogleCloudStorageReadSettings withPrefix(Object prefix) { } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -185,8 +203,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the GoogleCloudStorageReadSettings object itself. @@ -219,8 +237,8 @@ public GoogleCloudStorageReadSettings withEnablePartitionDiscovery(Object enable } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -229,8 +247,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the GoogleCloudStorageReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleSheetsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleSheetsLinkedService.java index b8bcf1dc192f5..4c897c140b415 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleSheetsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleSheetsLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.GoogleSheetsLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for GoogleSheets. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = GoogleSheetsLinkedService.class, + visible = true) @JsonTypeName("GoogleSheets") @Fluent public final class GoogleSheetsLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GoogleSheets"; + /* * GoogleSheets linked service properties. */ @@ -32,6 +44,16 @@ public final class GoogleSheetsLinkedService extends LinkedService { public GoogleSheetsLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: GoogleSheets linked service properties. * @@ -101,8 +123,8 @@ public GoogleSheetsLinkedService withApiToken(SecretBase apiToken) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -111,8 +133,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the GoogleSheetsLinkedService object itself. @@ -134,8 +156,9 @@ public GoogleSheetsLinkedService withEncryptedCredential(String encryptedCredent public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model GoogleSheetsLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model GoogleSheetsLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumLinkedService.java index 5d393abeeac8c..90130d74a9531 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.GreenplumLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Greenplum Database linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GreenplumLinkedService.class, visible = true) @JsonTypeName("Greenplum") @Fluent public final class GreenplumLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Greenplum"; + /* * Greenplum Database linked service properties. */ @@ -32,6 +40,16 @@ public final class GreenplumLinkedService extends LinkedService { public GreenplumLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Greenplum Database linked service properties. * @@ -126,8 +144,8 @@ public GreenplumLinkedService withPwd(AzureKeyVaultSecretReference pwd) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -136,8 +154,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the GreenplumLinkedService object itself. @@ -159,8 +177,9 @@ public GreenplumLinkedService withEncryptedCredential(String encryptedCredential public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model GreenplumLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model GreenplumLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumSource.java index b43eb6fe6e0b0..40b4ad95ce5c2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Greenplum Database source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GreenplumSource.class, visible = true) @JsonTypeName("GreenplumSource") @Fluent public final class GreenplumSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GreenplumSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class GreenplumSource extends TabularSource { public GreenplumSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumTableDataset.java index 4adbf28308b0f..7130b4907542a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GreenplumDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Greenplum Database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GreenplumTableDataset.class, visible = true) @JsonTypeName("GreenplumTable") @Fluent public final class GreenplumTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "GreenplumTable"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class GreenplumTableDataset extends Dataset { public GreenplumTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseLinkedService.java index bf748ced31217..b4ac3a167daa9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.HBaseLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * HBase server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HBaseLinkedService.class, visible = true) @JsonTypeName("HBase") @Fluent public final class HBaseLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HBase"; + /* * HBase server linked service properties. */ @@ -32,6 +40,16 @@ public final class HBaseLinkedService extends LinkedService { public HBaseLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: HBase server linked service properties. * @@ -245,9 +263,9 @@ public HBaseLinkedService withEnableSsl(Object enableSsl) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @return the trustedCertPath value. */ @@ -256,9 +274,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @param trustedCertPath the trustedCertPath value to set. * @return the HBaseLinkedService object itself. @@ -322,8 +340,8 @@ public HBaseLinkedService withAllowSelfSignedServerCert(Object allowSelfSignedSe } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -332,8 +350,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HBaseLinkedService object itself. @@ -355,8 +373,9 @@ public HBaseLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model HBaseLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model HBaseLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseObjectDataset.java index 723796f284db6..1afb99c79cab3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * HBase server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HBaseObjectDataset.class, visible = true) @JsonTypeName("HBaseObject") @Fluent public final class HBaseObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HBaseObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class HBaseObjectDataset extends Dataset { public HBaseObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseSource.java index 382e32bb2fbdb..44616b29f0ec9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity HBase server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HBaseSource.class, visible = true) @JsonTypeName("HBaseSource") @Fluent public final class HBaseSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HBaseSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class HBaseSource extends TabularSource { public HBaseSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightHiveActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightHiveActivity.java index a81aa742665dc..0b1db815e4d59 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightHiveActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightHiveActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightHiveActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * HDInsight Hive activity type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HDInsightHiveActivity.class, visible = true) @JsonTypeName("HDInsightHive") @Fluent public final class HDInsightHiveActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HDInsightHive"; + /* * HDInsight Hive activity properties. */ @@ -32,6 +40,16 @@ public final class HDInsightHiveActivity extends ExecutionActivity { public HDInsightHiveActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: HDInsight Hive activity properties. * @@ -275,8 +293,8 @@ public HDInsightHiveActivity withVariables(Map variables) { } /** - * Get the queryTimeout property: Query timeout value (in minutes). Effective when the HDInsight cluster is with - * ESP (Enterprise Security Package). + * Get the queryTimeout property: Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP + * (Enterprise Security Package). * * @return the queryTimeout value. */ @@ -285,8 +303,8 @@ public Integer queryTimeout() { } /** - * Set the queryTimeout property: Query timeout value (in minutes). Effective when the HDInsight cluster is with - * ESP (Enterprise Security Package). + * Set the queryTimeout property: Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP + * (Enterprise Security Package). * * @param queryTimeout the queryTimeout value to set. * @return the HDInsightHiveActivity object itself. @@ -308,8 +326,9 @@ public HDInsightHiveActivity withQueryTimeout(Integer queryTimeout) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model HDInsightHiveActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model HDInsightHiveActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightLinkedService.java index 17dca500e2b73..a8b83b9658e1e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * HDInsight linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HDInsightLinkedService.class, visible = true) @JsonTypeName("HDInsight") @Fluent public final class HDInsightLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HDInsight"; + /* * HDInsight linked service properties. */ @@ -32,6 +40,16 @@ public final class HDInsightLinkedService extends LinkedService { public HDInsightLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: HDInsight linked service properties. * @@ -195,8 +213,8 @@ public HDInsightLinkedService withHcatalogLinkedServiceName(LinkedServiceReferen } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -205,8 +223,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HDInsightLinkedService object itself. @@ -278,8 +296,9 @@ public HDInsightLinkedService withFileSystem(Object fileSystem) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model HDInsightLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model HDInsightLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightMapReduceActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightMapReduceActivity.java index ec68f2a6353da..5fe8d402a8dac 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightMapReduceActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightMapReduceActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightMapReduceActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * HDInsight MapReduce activity type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = HDInsightMapReduceActivity.class, + visible = true) @JsonTypeName("HDInsightMapReduce") @Fluent public final class HDInsightMapReduceActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HDInsightMapReduce"; + /* * HDInsight MapReduce activity properties. */ @@ -33,6 +45,16 @@ public final class HDInsightMapReduceActivity extends ExecutionActivity { public HDInsightMapReduceActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: HDInsight MapReduce activity properties. * @@ -307,8 +329,9 @@ public HDInsightMapReduceActivity withDefines(Map defines) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model HDInsightMapReduceActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model HDInsightMapReduceActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightOnDemandLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightOnDemandLinkedService.java index 8e85f78681415..d2f71e13bd7e0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightOnDemandLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightOnDemandLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightOnDemandLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * HDInsight ondemand linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = HDInsightOnDemandLinkedService.class, + visible = true) @JsonTypeName("HDInsightOnDemand") @Fluent public final class HDInsightOnDemandLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HDInsightOnDemand"; + /* * HDInsight ondemand linked service properties. */ @@ -33,6 +45,16 @@ public final class HDInsightOnDemandLinkedService extends LinkedService { public HDInsightOnDemandLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: HDInsight ondemand linked service properties. * @@ -131,8 +153,7 @@ public HDInsightOnDemandLinkedService withTimeToLive(Object timeToLive) { } /** - * Get the version property: Version of the HDInsight cluster.  Type: string (or Expression with resultType - * string). + * Get the version property: Version of the HDInsight cluster.  Type: string (or Expression with resultType string). * * @return the version value. */ @@ -141,8 +162,7 @@ public Object version() { } /** - * Set the version property: Version of the HDInsight cluster.  Type: string (or Expression with resultType - * string). + * Set the version property: Version of the HDInsight cluster.  Type: string (or Expression with resultType string). * * @param version the version value to set. * @return the HDInsightOnDemandLinkedService object itself. @@ -181,8 +201,8 @@ public HDInsightOnDemandLinkedService withLinkedServiceName(LinkedServiceReferen } /** - * Get the hostSubscriptionId property: The customer’s subscription to host the cluster. Type: string (or - * Expression with resultType string). + * Get the hostSubscriptionId property: The customer’s subscription to host the cluster. Type: string (or Expression + * with resultType string). * * @return the hostSubscriptionId value. */ @@ -191,8 +211,8 @@ public Object hostSubscriptionId() { } /** - * Set the hostSubscriptionId property: The customer’s subscription to host the cluster. Type: string (or - * Expression with resultType string). + * Set the hostSubscriptionId property: The customer’s subscription to host the cluster. Type: string (or Expression + * with resultType string). * * @param hostSubscriptionId the hostSubscriptionId value to set. * @return the HDInsightOnDemandLinkedService object itself. @@ -329,8 +349,8 @@ public HDInsightOnDemandLinkedService withClusterNamePrefix(Object clusterNamePr } /** - * Get the clusterUsername property: The username to access the cluster. Type: string (or Expression with - * resultType string). + * Get the clusterUsername property: The username to access the cluster. Type: string (or Expression with resultType + * string). * * @return the clusterUsername value. */ @@ -339,8 +359,8 @@ public Object clusterUsername() { } /** - * Set the clusterUsername property: The username to access the cluster. Type: string (or Expression with - * resultType string). + * Set the clusterUsername property: The username to access the cluster. Type: string (or Expression with resultType + * string). * * @param clusterUsername the clusterUsername value to set. * @return the HDInsightOnDemandLinkedService object itself. @@ -575,8 +595,8 @@ public HDInsightOnDemandLinkedService withHBaseConfiguration(Object hBaseConfigu } /** - * Get the hdfsConfiguration property: Specifies the HDFS configuration parameters (hdfs-site.xml) for the - * HDInsight cluster. + * Get the hdfsConfiguration property: Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight + * cluster. * * @return the hdfsConfiguration value. */ @@ -585,8 +605,8 @@ public Object hdfsConfiguration() { } /** - * Set the hdfsConfiguration property: Specifies the HDFS configuration parameters (hdfs-site.xml) for the - * HDInsight cluster. + * Set the hdfsConfiguration property: Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight + * cluster. * * @param hdfsConfiguration the hdfsConfiguration value to set. * @return the HDInsightOnDemandLinkedService object itself. @@ -600,8 +620,8 @@ public HDInsightOnDemandLinkedService withHdfsConfiguration(Object hdfsConfigura } /** - * Get the hiveConfiguration property: Specifies the hive configuration parameters (hive-site.xml) for the - * HDInsight cluster. + * Get the hiveConfiguration property: Specifies the hive configuration parameters (hive-site.xml) for the HDInsight + * cluster. * * @return the hiveConfiguration value. */ @@ -610,8 +630,8 @@ public Object hiveConfiguration() { } /** - * Set the hiveConfiguration property: Specifies the hive configuration parameters (hive-site.xml) for the - * HDInsight cluster. + * Set the hiveConfiguration property: Specifies the hive configuration parameters (hive-site.xml) for the HDInsight + * cluster. * * @param hiveConfiguration the hiveConfiguration value to set. * @return the HDInsightOnDemandLinkedService object itself. @@ -700,8 +720,8 @@ public HDInsightOnDemandLinkedService withStormConfiguration(Object stormConfigu } /** - * Get the yarnConfiguration property: Specifies the Yarn configuration parameters (yarn-site.xml) for the - * HDInsight cluster. + * Get the yarnConfiguration property: Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight + * cluster. * * @return the yarnConfiguration value. */ @@ -710,8 +730,8 @@ public Object yarnConfiguration() { } /** - * Set the yarnConfiguration property: Specifies the Yarn configuration parameters (yarn-site.xml) for the - * HDInsight cluster. + * Set the yarnConfiguration property: Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight + * cluster. * * @param yarnConfiguration the yarnConfiguration value to set. * @return the HDInsightOnDemandLinkedService object itself. @@ -725,8 +745,8 @@ public HDInsightOnDemandLinkedService withYarnConfiguration(Object yarnConfigura } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -735,8 +755,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HDInsightOnDemandLinkedService object itself. @@ -927,8 +947,9 @@ public HDInsightOnDemandLinkedService withCredential(CredentialReference credent public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model HDInsightOnDemandLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model HDInsightOnDemandLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightPigActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightPigActivity.java index 7043337f373e4..987bb38d8a2e2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightPigActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightPigActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightPigActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * HDInsight Pig activity type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HDInsightPigActivity.class, visible = true) @JsonTypeName("HDInsightPig") @Fluent public final class HDInsightPigActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HDInsightPig"; + /* * HDInsight Pig activity properties. */ @@ -32,6 +40,16 @@ public final class HDInsightPigActivity extends ExecutionActivity { public HDInsightPigActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: HDInsight Pig activity properties. * @@ -262,8 +280,9 @@ public HDInsightPigActivity withDefines(Map defines) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model HDInsightPigActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model HDInsightPigActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightSparkActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightSparkActivity.java index 9454460f1f19b..205dd4775e7c6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightSparkActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightSparkActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightSparkActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * HDInsight Spark activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HDInsightSparkActivity.class, visible = true) @JsonTypeName("HDInsightSpark") @Fluent public final class HDInsightSparkActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HDInsightSpark"; + /* * HDInsight spark activity properties. */ @@ -32,6 +40,16 @@ public final class HDInsightSparkActivity extends ExecutionActivity { public HDInsightSparkActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: HDInsight spark activity properties. * @@ -210,8 +228,8 @@ public HDInsightSparkActivity withGetDebugInfo(HDInsightActivityDebugInfoOption } /** - * Get the sparkJobLinkedService property: The storage linked service for uploading the entry file and - * dependencies, and for receiving logs. + * Get the sparkJobLinkedService property: The storage linked service for uploading the entry file and dependencies, + * and for receiving logs. * * @return the sparkJobLinkedService value. */ @@ -220,8 +238,8 @@ public LinkedServiceReference sparkJobLinkedService() { } /** - * Set the sparkJobLinkedService property: The storage linked service for uploading the entry file and - * dependencies, and for receiving logs. + * Set the sparkJobLinkedService property: The storage linked service for uploading the entry file and dependencies, + * and for receiving logs. * * @param sparkJobLinkedService the sparkJobLinkedService value to set. * @return the HDInsightSparkActivity object itself. @@ -314,8 +332,9 @@ public HDInsightSparkActivity withSparkConfig(Map sparkConfig) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model HDInsightSparkActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model HDInsightSparkActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightStreamingActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightStreamingActivity.java index f88f6b366a9ef..3b3d062b36944 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightStreamingActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightStreamingActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightStreamingActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * HDInsight streaming activity type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = HDInsightStreamingActivity.class, + visible = true) @JsonTypeName("HDInsightStreaming") @Fluent public final class HDInsightStreamingActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HDInsightStreaming"; + /* * HDInsight streaming activity properties. */ @@ -33,6 +45,16 @@ public final class HDInsightStreamingActivity extends ExecutionActivity { public HDInsightStreamingActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: HDInsight streaming activity properties. * @@ -399,8 +421,9 @@ public HDInsightStreamingActivity withDefines(Map defines) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model HDInsightStreamingActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model HDInsightStreamingActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLinkedService.java index a44dbc4e1491f..8d1dd4fbc6ae5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.HdfsLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Hadoop Distributed File System (HDFS) linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HdfsLinkedService.class, visible = true) @JsonTypeName("Hdfs") @Fluent public final class HdfsLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Hdfs"; + /* * HDFS linked service properties. */ @@ -32,6 +40,16 @@ public final class HdfsLinkedService extends LinkedService { public HdfsLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: HDFS linked service properties. * @@ -128,8 +146,8 @@ public HdfsLinkedService withAuthenticationType(Object authenticationType) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -138,8 +156,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HdfsLinkedService object itself. @@ -209,8 +227,9 @@ public HdfsLinkedService withPassword(SecretBase password) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model HdfsLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model HdfsLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLocation.java index 60790ea8eb7db..508003d8f009e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLocation.java @@ -5,22 +5,41 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of HDFS. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HdfsLocation.class, visible = true) @JsonTypeName("HdfsLocation") @Fluent public final class HdfsLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HdfsLocation"; + /** * Creates an instance of HdfsLocation class. */ public HdfsLocation() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsReadSettings.java index a958256a6899c..d8bd494827db2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsReadSettings.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * HDFS read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HdfsReadSettings.class, visible = true) @JsonTypeName("HdfsReadSettings") @Fluent public final class HdfsReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HdfsReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -36,8 +43,7 @@ public final class HdfsReadSettings extends StoreReadSettings { private Object wildcardFileName; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; @@ -49,8 +55,7 @@ public final class HdfsReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; @@ -74,8 +79,7 @@ public final class HdfsReadSettings extends StoreReadSettings { private DistcpSettings distcpSettings; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; @@ -87,8 +91,18 @@ public HdfsReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -97,8 +111,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the HdfsReadSettings object itself. @@ -151,8 +165,8 @@ public HdfsReadSettings withWildcardFileName(Object wildcardFileName) { } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -161,8 +175,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the HdfsReadSettings object itself. @@ -195,8 +209,8 @@ public HdfsReadSettings withEnablePartitionDiscovery(Object enablePartitionDisco } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -205,8 +219,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the HdfsReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsSource.java index 1231a349f4d1f..7cf8eb84e7f1c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsSource.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity HDFS source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HdfsSource.class, visible = true) @JsonTypeName("HdfsSource") @Fluent public final class HdfsSource extends CopySource { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HdfsSource"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -36,8 +43,18 @@ public HdfsSource() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -46,8 +63,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the HdfsSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveLinkedService.java index 5744b65f3677f..38f16d66bc583 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.HiveLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Hive Server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HiveLinkedService.class, visible = true) @JsonTypeName("Hive") @Fluent public final class HiveLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Hive"; + /* * Hive Server linked service properties. */ @@ -32,6 +40,16 @@ public final class HiveLinkedService extends LinkedService { public HiveLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Hive Server linked service properties. * @@ -78,8 +96,8 @@ public HiveLinkedService withAnnotations(List annotations) { } /** - * Get the host property: IP address or host name of the Hive server, separated by ';' for multiple hosts (only - * when serviceDiscoveryMode is enable). + * Get the host property: IP address or host name of the Hive server, separated by ';' for multiple hosts (only when + * serviceDiscoveryMode is enable). * * @return the host value. */ @@ -88,8 +106,8 @@ public Object host() { } /** - * Set the host property: IP address or host name of the Hive server, separated by ';' for multiple hosts (only - * when serviceDiscoveryMode is enable). + * Set the host property: IP address or host name of the Hive server, separated by ';' for multiple hosts (only when + * serviceDiscoveryMode is enable). * * @param host the host value to set. * @return the HiveLinkedService object itself. @@ -241,8 +259,8 @@ public HiveLinkedService withZooKeeperNameSpace(Object zooKeeperNameSpace) { } /** - * Get the useNativeQuery property: Specifies whether the driver uses native HiveQL queries,or converts them into - * an equivalent form in HiveQL. + * Get the useNativeQuery property: Specifies whether the driver uses native HiveQL queries,or converts them into an + * equivalent form in HiveQL. * * @return the useNativeQuery value. */ @@ -251,8 +269,8 @@ public Object useNativeQuery() { } /** - * Set the useNativeQuery property: Specifies whether the driver uses native HiveQL queries,or converts them into - * an equivalent form in HiveQL. + * Set the useNativeQuery property: Specifies whether the driver uses native HiveQL queries,or converts them into an + * equivalent form in HiveQL. * * @param useNativeQuery the useNativeQuery value to set. * @return the HiveLinkedService object itself. @@ -360,9 +378,9 @@ public HiveLinkedService withEnableSsl(Object enableSsl) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @return the trustedCertPath value. */ @@ -371,9 +389,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @param trustedCertPath the trustedCertPath value to set. * @return the HiveLinkedService object itself. @@ -462,8 +480,8 @@ public HiveLinkedService withAllowSelfSignedServerCert(Object allowSelfSignedSer } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -472,8 +490,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HiveLinkedService object itself. @@ -495,8 +513,9 @@ public HiveLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model HiveLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model HiveLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveObjectDataset.java index f3a02d71cc689..603a9c43528f8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.HiveDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Hive Server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HiveObjectDataset.class, visible = true) @JsonTypeName("HiveObject") @Fluent public final class HiveObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HiveObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class HiveObjectDataset extends Dataset { public HiveObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveSource.java index 7fce73939aff5..a16b33b7ebd72 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Hive Server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HiveSource.class, visible = true) @JsonTypeName("HiveSource") @Fluent public final class HiveSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HiveSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class HiveSource extends TabularSource { public HiveSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpDataset.java index 5372185d77449..67279aeb56712 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.HttpDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * A file in an HTTP web server. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HttpDataset.class, visible = true) @JsonTypeName("HttpFile") @Fluent public final class HttpDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HttpFile"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class HttpDataset extends Dataset { public HttpDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * @@ -129,8 +147,8 @@ public HttpDataset withRelativeUrl(Object relativeUrl) { } /** - * Get the requestMethod property: The HTTP method for the HTTP request. Type: string (or Expression with - * resultType string). + * Get the requestMethod property: The HTTP method for the HTTP request. Type: string (or Expression with resultType + * string). * * @return the requestMethod value. */ @@ -139,8 +157,8 @@ public Object requestMethod() { } /** - * Set the requestMethod property: The HTTP method for the HTTP request. Type: string (or Expression with - * resultType string). + * Set the requestMethod property: The HTTP method for the HTTP request. Type: string (or Expression with resultType + * string). * * @param requestMethod the requestMethod value to set. * @return the HttpDataset object itself. @@ -154,8 +172,7 @@ public HttpDataset withRequestMethod(Object requestMethod) { } /** - * Get the requestBody property: The body for the HTTP request. Type: string (or Expression with resultType - * string). + * Get the requestBody property: The body for the HTTP request. Type: string (or Expression with resultType string). * * @return the requestBody value. */ @@ -164,8 +181,7 @@ public Object requestBody() { } /** - * Set the requestBody property: The body for the HTTP request. Type: string (or Expression with resultType - * string). + * Set the requestBody property: The body for the HTTP request. Type: string (or Expression with resultType string). * * @param requestBody the requestBody value to set. * @return the HttpDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpLinkedService.java index bd8b8851b1b2e..3e0cdb30a5eaa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.HttpLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for an HTTP source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HttpLinkedService.class, visible = true) @JsonTypeName("HttpServer") @Fluent public final class HttpLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HttpServer"; + /* * Properties specific to this linked service type. */ @@ -32,6 +40,16 @@ public final class HttpLinkedService extends LinkedService { public HttpLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this linked service type. * @@ -202,8 +220,8 @@ public HttpLinkedService withAuthHeaders(Object authHeaders) { /** * Get the embeddedCertData property: Base64 encoded certificate data for ClientCertificate authentication. For - * on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password - * should be specified. Type: string (or Expression with resultType string). + * on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should + * be specified. Type: string (or Expression with resultType string). * * @return the embeddedCertData value. */ @@ -213,8 +231,8 @@ public Object embeddedCertData() { /** * Set the embeddedCertData property: Base64 encoded certificate data for ClientCertificate authentication. For - * on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password - * should be specified. Type: string (or Expression with resultType string). + * on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should + * be specified. Type: string (or Expression with resultType string). * * @param embeddedCertData the embeddedCertData value to set. * @return the HttpLinkedService object itself. @@ -255,8 +273,8 @@ public HttpLinkedService withCertThumbprint(Object certThumbprint) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -265,8 +283,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HttpLinkedService object itself. @@ -286,7 +304,8 @@ public HttpLinkedService withEncryptedCredential(String encryptedCredential) { * @return the enableServerCertificateValidation value. */ public Object enableServerCertificateValidation() { - return this.innerTypeProperties() == null ? null + return this.innerTypeProperties() == null + ? null : this.innerTypeProperties().enableServerCertificateValidation(); } @@ -314,8 +333,9 @@ public HttpLinkedService withEnableServerCertificateValidation(Object enableServ public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model HttpLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model HttpLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpReadSettings.java index 2d70f9782e3fc..4d1a82b72c060 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpReadSettings.java @@ -6,47 +6,50 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Http read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HttpReadSettings.class, visible = true) @JsonTypeName("HttpReadSettings") @Fluent public final class HttpReadSettings extends StoreReadSettings { /* - * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType - * string). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HttpReadSettings"; + + /* + * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). */ @JsonProperty(value = "requestMethod") private Object requestMethod; /* - * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType - * string). + * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). */ @JsonProperty(value = "requestBody") private Object requestBody; /* - * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType - * string). + * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */ @JsonProperty(value = "additionalHeaders") private Object additionalHeaders; /* - * Specifies the timeout for a HTTP client to get HTTP response from HTTP server. Type: string (or Expression with - * resultType string). + * Specifies the timeout for a HTTP client to get HTTP response from HTTP server. Type: string (or Expression with resultType string). */ @JsonProperty(value = "requestTimeout") private Object requestTimeout; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -57,6 +60,16 @@ public final class HttpReadSettings extends StoreReadSettings { public HttpReadSettings() { } + /** + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the requestMethod property: The HTTP method used to call the RESTful API. The default is GET. Type: string * (or Expression with resultType string). @@ -80,8 +93,8 @@ public HttpReadSettings withRequestMethod(Object requestMethod) { } /** - * Get the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string - * (or Expression with resultType string). + * Get the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or + * Expression with resultType string). * * @return the requestBody value. */ @@ -90,8 +103,8 @@ public Object requestBody() { } /** - * Set the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string - * (or Expression with resultType string). + * Set the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or + * Expression with resultType string). * * @param requestBody the requestBody value to set. * @return the HttpReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpServerLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpServerLocation.java index 72cc16ebb9289..b7a1e53eddf0e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpServerLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpServerLocation.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of http server. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HttpServerLocation.class, visible = true) @JsonTypeName("HttpServerLocation") @Fluent public final class HttpServerLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HttpServerLocation"; + /* * Specify the relativeUrl of http server. Type: string (or Expression with resultType string) */ @@ -29,8 +37,18 @@ public HttpServerLocation() { } /** - * Get the relativeUrl property: Specify the relativeUrl of http server. Type: string (or Expression with - * resultType string). + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the relativeUrl property: Specify the relativeUrl of http server. Type: string (or Expression with resultType + * string). * * @return the relativeUrl value. */ @@ -39,8 +57,8 @@ public Object relativeUrl() { } /** - * Set the relativeUrl property: Specify the relativeUrl of http server. Type: string (or Expression with - * resultType string). + * Set the relativeUrl property: Specify the relativeUrl of http server. Type: string (or Expression with resultType + * string). * * @param relativeUrl the relativeUrl value to set. * @return the HttpServerLocation object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpSource.java index c97c24c7cd21a..5b4f4b1dae111 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpSource.java @@ -6,20 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for an HTTP file. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HttpSource.class, visible = true) @JsonTypeName("HttpSource") @Fluent public final class HttpSource extends CopySource { /* - * Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent - * to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HttpSource"; + + /* + * Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; @@ -30,6 +36,16 @@ public final class HttpSource extends CopySource { public HttpSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the httpRequestTimeout property: Specifies the timeout for a HTTP client to get HTTP response from HTTP * server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotLinkedService.java index 25f481ca75938..d9b06fda1477d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.HubspotLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Hubspot Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HubspotLinkedService.class, visible = true) @JsonTypeName("Hubspot") @Fluent public final class HubspotLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Hubspot"; + /* * Hubspot Service linked service properties. */ @@ -32,6 +40,16 @@ public final class HubspotLinkedService extends LinkedService { public HubspotLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Hubspot Service linked service properties. * @@ -220,8 +238,8 @@ public HubspotLinkedService withUseHostVerification(Object useHostVerification) } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -230,8 +248,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the HubspotLinkedService object itself. @@ -245,8 +263,8 @@ public HubspotLinkedService withUsePeerVerification(Object usePeerVerification) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -255,8 +273,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the HubspotLinkedService object itself. @@ -278,8 +296,9 @@ public HubspotLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model HubspotLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model HubspotLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotObjectDataset.java index 75836c805be0a..b76f907c3d134 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Hubspot Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HubspotObjectDataset.class, visible = true) @JsonTypeName("HubspotObject") @Fluent public final class HubspotObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HubspotObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class HubspotObjectDataset extends Dataset { public HubspotObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotSource.java index 89623e09c9d6d..275913bd28bf8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Hubspot Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HubspotSource.class, visible = true) @JsonTypeName("HubspotSource") @Fluent public final class HubspotSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "HubspotSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class HubspotSource extends TabularSource { public HubspotSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IfConditionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IfConditionActivity.java index c09dc6c233447..3372a30cd3886 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IfConditionActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IfConditionActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.IfConditionActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ * This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property * or the ifFalseActivities property depending on the result of the expression. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = IfConditionActivity.class, visible = true) @JsonTypeName("IfCondition") @Fluent public final class IfConditionActivity extends ControlActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "IfCondition"; + /* * IfCondition activity properties. */ @@ -32,6 +40,16 @@ public final class IfConditionActivity extends ControlActivity { public IfConditionActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: IfCondition activity properties. * @@ -96,8 +114,8 @@ public IfConditionActivity withUserProperties(List userProperties) } /** - * Get the expression property: An expression that would evaluate to Boolean. This is used to determine the block - * of activities (ifTrueActivities or ifFalseActivities) that will be executed. + * Get the expression property: An expression that would evaluate to Boolean. This is used to determine the block of + * activities (ifTrueActivities or ifFalseActivities) that will be executed. * * @return the expression value. */ @@ -106,8 +124,8 @@ public Expression expression() { } /** - * Set the expression property: An expression that would evaluate to Boolean. This is used to determine the block - * of activities (ifTrueActivities or ifFalseActivities) that will be executed. + * Set the expression property: An expression that would evaluate to Boolean. This is used to determine the block of + * activities (ifTrueActivities or ifFalseActivities) that will be executed. * * @param expression the expression value to set. * @return the IfConditionActivity object itself. @@ -146,8 +164,8 @@ public IfConditionActivity withIfTrueActivities(List ifTrueActivities) } /** - * Get the ifFalseActivities property: List of activities to execute if expression is evaluated to false. This is - * an optional property and if not provided, the activity will exit without any action. + * Get the ifFalseActivities property: List of activities to execute if expression is evaluated to false. This is an + * optional property and if not provided, the activity will exit without any action. * * @return the ifFalseActivities value. */ @@ -156,8 +174,8 @@ public List ifFalseActivities() { } /** - * Set the ifFalseActivities property: List of activities to execute if expression is evaluated to false. This is - * an optional property and if not provided, the activity will exit without any action. + * Set the ifFalseActivities property: List of activities to execute if expression is evaluated to false. This is an + * optional property and if not provided, the activity will exit without any action. * * @param ifFalseActivities the ifFalseActivities value to set. * @return the IfConditionActivity object itself. @@ -179,8 +197,9 @@ public IfConditionActivity withIfFalseActivities(List ifFalseActivitie public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model IfConditionActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model IfConditionActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaLinkedService.java index 94afc219e3769..429551efeb428 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ImpalaLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Impala server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ImpalaLinkedService.class, visible = true) @JsonTypeName("Impala") @Fluent public final class ImpalaLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Impala"; + /* * Impala server linked service properties. */ @@ -32,6 +40,16 @@ public final class ImpalaLinkedService extends LinkedService { public ImpalaLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Impala server linked service properties. * @@ -222,9 +240,9 @@ public ImpalaLinkedService withEnableSsl(Object enableSsl) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @return the trustedCertPath value. */ @@ -233,9 +251,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @param trustedCertPath the trustedCertPath value to set. * @return the ImpalaLinkedService object itself. @@ -324,8 +342,8 @@ public ImpalaLinkedService withAllowSelfSignedServerCert(Object allowSelfSignedS } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -334,8 +352,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ImpalaLinkedService object itself. @@ -357,8 +375,9 @@ public ImpalaLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ImpalaLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ImpalaLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaObjectDataset.java index 0e237280c101f..584b747050ffc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.ImpalaDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Impala server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ImpalaObjectDataset.class, visible = true) @JsonTypeName("ImpalaObject") @Fluent public final class ImpalaObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ImpalaObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class ImpalaObjectDataset extends Dataset { public ImpalaObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaSource.java index 5c210ac509824..de89e35bbc930 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Impala server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ImpalaSource.class, visible = true) @JsonTypeName("ImpalaSource") @Fluent public final class ImpalaSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ImpalaSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class ImpalaSource extends TabularSource { public ImpalaSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImportSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImportSettings.java index bcca02d7338bf..8439dd5179e66 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImportSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImportSettings.java @@ -8,7 +8,9 @@ import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -17,11 +19,7 @@ /** * Import command settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = ImportSettings.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ImportSettings.class, visible = true) @JsonTypeName("ImportSettings") @JsonSubTypes({ @JsonSubTypes.Type( @@ -30,6 +28,13 @@ @JsonSubTypes.Type(name = "SnowflakeImportCopyCommand", value = SnowflakeImportCopyCommand.class) }) @Fluent public class ImportSettings { + /* + * The import setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Import command settings. */ @@ -40,6 +45,16 @@ public class ImportSettings { * Creates an instance of ImportSettings class. */ public ImportSettings() { + this.type = "ImportSettings"; + } + + /** + * Get the type property: The import setting type. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixLinkedService.java index 354d06888144f..e4bec2839c778 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.InformixLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Informix linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = InformixLinkedService.class, visible = true) @JsonTypeName("Informix") @Fluent public final class InformixLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Informix"; + /* * Informix linked service properties. */ @@ -32,6 +40,16 @@ public final class InformixLinkedService extends LinkedService { public InformixLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Informix linked service properties. * @@ -79,8 +97,8 @@ public InformixLinkedService withAnnotations(List annotations) { /** * Get the connectionString property: The non-access credential portion of the connection string as well as an - * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression - * with resultType string. + * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with + * resultType string. * * @return the connectionString value. */ @@ -90,8 +108,8 @@ public Object connectionString() { /** * Set the connectionString property: The non-access credential portion of the connection string as well as an - * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression - * with resultType string. + * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with + * resultType string. * * @param connectionString the connectionString value to set. * @return the InformixLinkedService object itself. @@ -203,8 +221,8 @@ public InformixLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -213,8 +231,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the InformixLinkedService object itself. @@ -236,8 +254,9 @@ public InformixLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model InformixLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model InformixLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSink.java index d6bbdd040d315..9611e7933ad65 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Informix sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = InformixSink.class, visible = true) @JsonTypeName("InformixSink") @Fluent public final class InformixSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "InformixSink"; + /* * A query to execute before starting the copy. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class InformixSink extends CopySink { public InformixSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preCopyScript property: A query to execute before starting the copy. Type: string (or Expression with * resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSource.java index c8de786bbea30..3c77b3b4b2add 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for Informix. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = InformixSource.class, visible = true) @JsonTypeName("InformixSource") @Fluent public final class InformixSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "InformixSource"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class InformixSource extends TabularSource { public InformixSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixTableDataset.java index f779acf32bb8c..dc036bdc561ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.InformixTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The Informix table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = InformixTableDataset.class, visible = true) @JsonTypeName("InformixTable") @Fluent public final class InformixTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "InformixTable"; + /* * Informix table dataset properties. */ @@ -31,6 +39,16 @@ public final class InformixTableDataset extends Dataset { public InformixTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Informix table dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntime.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntime.java index 5bbfd7952d052..b4682e719f193 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntime.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntime.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -18,17 +19,20 @@ /** * Azure Data Factory nested object which serves as a compute resource for activities. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = IntegrationRuntime.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = IntegrationRuntime.class, visible = true) @JsonTypeName("IntegrationRuntime") @JsonSubTypes({ @JsonSubTypes.Type(name = "Managed", value = ManagedIntegrationRuntime.class), @JsonSubTypes.Type(name = "SelfHosted", value = SelfHostedIntegrationRuntime.class) }) @Fluent public class IntegrationRuntime { + /* + * Type of integration runtime. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private IntegrationRuntimeType type; + /* * Integration runtime description. */ @@ -45,6 +49,16 @@ public class IntegrationRuntime { * Creates an instance of IntegrationRuntime class. */ public IntegrationRuntime() { + this.type = IntegrationRuntimeType.fromString("IntegrationRuntime"); + } + + /** + * Get the type property: Type of integration runtime. + * + * @return the type value. + */ + public IntegrationRuntimeType type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeComputeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeComputeProperties.java index 90393170f73e1..ead93be430e5b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeComputeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeComputeProperties.java @@ -18,8 +18,7 @@ @Fluent public final class IntegrationRuntimeComputeProperties { /* - * The location for managed integration runtime. The supported regions could be found on - * https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities + * The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities */ @JsonProperty(value = "location") private String location; @@ -141,8 +140,8 @@ public IntegrationRuntimeComputeProperties withNumberOfNodes(Integer numberOfNod } /** - * Get the maxParallelExecutionsPerNode property: Maximum parallel executions count per node for managed - * integration runtime. + * Get the maxParallelExecutionsPerNode property: Maximum parallel executions count per node for managed integration + * runtime. * * @return the maxParallelExecutionsPerNode value. */ @@ -151,8 +150,8 @@ public Integer maxParallelExecutionsPerNode() { } /** - * Set the maxParallelExecutionsPerNode property: Maximum parallel executions count per node for managed - * integration runtime. + * Set the maxParallelExecutionsPerNode property: Maximum parallel executions count per node for managed integration + * runtime. * * @param maxParallelExecutionsPerNode the maxParallelExecutionsPerNode value to set. * @return the IntegrationRuntimeComputeProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataFlowProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataFlowProperties.java index 50d4b5679ae68..58e5b2311539e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataFlowProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataFlowProperties.java @@ -25,8 +25,7 @@ public final class IntegrationRuntimeDataFlowProperties { private DataFlowComputeType computeType; /* - * Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and - * 272. + * Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. */ @JsonProperty(value = "coreCount") private Integer coreCount; @@ -38,8 +37,7 @@ public final class IntegrationRuntimeDataFlowProperties { private Integer timeToLive; /* - * Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is - * reached if this is set as false. Default is true. + * Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Default is true. */ @JsonProperty(value = "cleanup") private Boolean cleanup; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDebugResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDebugResource.java index 89029df89023b..5e5e063d21b8b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDebugResource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDebugResource.java @@ -63,8 +63,9 @@ public IntegrationRuntimeDebugResource withName(String name) { public void validate() { super.validate(); if (properties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property properties in model IntegrationRuntimeDebugResource")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model IntegrationRuntimeDebugResource")); } else { properties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeListResponse.java index 466b85c9732ea..f542f36107e3b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeListResponse.java @@ -80,8 +80,9 @@ public IntegrationRuntimeListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property value in model IntegrationRuntimeListResponse")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property value in model IntegrationRuntimeListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeReference.java index 4785f1faa4eb5..ca43e8c982b32 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeReference.java @@ -107,8 +107,9 @@ public IntegrationRuntimeReference withParameters(Map parameters */ public void validate() { if (referenceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property referenceName in model IntegrationRuntimeReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property referenceName in model IntegrationRuntimeReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisCatalogInfo.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisCatalogInfo.java index 3d509f9279f7f..93ccc8e3da0b0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisCatalogInfo.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisCatalogInfo.java @@ -36,8 +36,7 @@ public final class IntegrationRuntimeSsisCatalogInfo { private SecureString catalogAdminPassword; /* - * The pricing tier for the catalog database. The valid values could be found in - * https://azure.microsoft.com/en-us/pricing/details/sql-database/ + * The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/ */ @JsonProperty(value = "catalogPricingTier") private IntegrationRuntimeSsisCatalogPricingTier catalogPricingTier; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeStatus.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeStatus.java index 157788ffe7960..dae087f52d073 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeStatus.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeStatus.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -20,15 +21,22 @@ */ @JsonTypeInfo( use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, property = "type", - defaultImpl = IntegrationRuntimeStatus.class) + defaultImpl = IntegrationRuntimeStatus.class, + visible = true) @JsonTypeName("IntegrationRuntimeStatus") @JsonSubTypes({ @JsonSubTypes.Type(name = "Managed", value = ManagedIntegrationRuntimeStatus.class), @JsonSubTypes.Type(name = "SelfHosted", value = SelfHostedIntegrationRuntimeStatus.class) }) @Fluent public class IntegrationRuntimeStatus { + /* + * Type of integration runtime. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private IntegrationRuntimeType type; + /* * The data factory name which the integration runtime belong to. */ @@ -51,6 +59,16 @@ public class IntegrationRuntimeStatus { * Creates an instance of IntegrationRuntimeStatus class. */ public IntegrationRuntimeStatus() { + this.type = IntegrationRuntimeType.fromString("IntegrationRuntimeStatus"); + } + + /** + * Get the type property: Type of integration runtime. + * + * @return the type value. + */ + public IntegrationRuntimeType type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraLinkedService.java index 8211911df1390..76d822b7046d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.JiraLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Jira Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JiraLinkedService.class, visible = true) @JsonTypeName("Jira") @Fluent public final class JiraLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Jira"; + /* * Jira Service linked service properties. */ @@ -32,6 +40,16 @@ public final class JiraLinkedService extends LinkedService { public JiraLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Jira Service linked service properties. * @@ -101,8 +119,8 @@ public JiraLinkedService withHost(Object host) { } /** - * Get the port property: The TCP port that the Jira server uses to listen for client connections. The default - * value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. + * Get the port property: The TCP port that the Jira server uses to listen for client connections. The default value + * is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. * * @return the port value. */ @@ -111,8 +129,8 @@ public Object port() { } /** - * Set the port property: The TCP port that the Jira server uses to listen for client connections. The default - * value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. + * Set the port property: The TCP port that the Jira server uses to listen for client connections. The default value + * is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. * * @param port the port value to set. * @return the JiraLinkedService object itself. @@ -222,8 +240,8 @@ public JiraLinkedService withUseHostVerification(Object useHostVerification) { } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -232,8 +250,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the JiraLinkedService object itself. @@ -247,8 +265,8 @@ public JiraLinkedService withUsePeerVerification(Object usePeerVerification) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -257,8 +275,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the JiraLinkedService object itself. @@ -280,8 +298,9 @@ public JiraLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model JiraLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model JiraLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraObjectDataset.java index f47057d278232..7e8a897de5b70 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Jira Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JiraObjectDataset.class, visible = true) @JsonTypeName("JiraObject") @Fluent public final class JiraObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "JiraObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class JiraObjectDataset extends Dataset { public JiraObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraSource.java index 748f93f5de91c..528baedaaf3c5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Jira Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JiraSource.class, visible = true) @JsonTypeName("JiraSource") @Fluent public final class JiraSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "JiraSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class JiraSource extends TabularSource { public JiraSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonDataset.java index 2ff8620b4f7ec..ddd1d51b5823e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.JsonDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Json dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JsonDataset.class, visible = true) @JsonTypeName("Json") @Fluent public final class JsonDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Json"; + /* * Json dataset properties. */ @@ -31,6 +39,16 @@ public final class JsonDataset extends Dataset { public JsonDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Json dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonFormat.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonFormat.java index 6bd72aab734a4..d440d8a148f21 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonFormat.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonFormat.java @@ -6,51 +6,50 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The data stored in JSON format. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JsonFormat.class, visible = true) @JsonTypeName("JsonFormat") @Fluent public final class JsonFormat extends DatasetStorageFormat { /* - * File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value - * is 'setOfObjects'. It is case-sensitive. + * Type of dataset storage format. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "JsonFormat"; + + /* + * File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. */ @JsonProperty(value = "filePattern") private Object filePattern; /* - * The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with - * resultType string). + * The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). */ @JsonProperty(value = "nestingSeparator") private Object nestingSeparator; /* - * The code page name of the preferred encoding. If not provided, the default value is 'utf-8', unless the byte - * order mark (BOM) denotes another Unicode encoding. The full list of supported values can be found in the 'Name' - * column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. - * Type: string (or Expression with resultType string). + * The code page name of the preferred encoding. If not provided, the default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full list of supported values can be found in the 'Name' column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). */ @JsonProperty(value = "encodingName") private Object encodingName; /* - * The JSONPath of the JSON array element to be flattened. Example: "$.ArrayPath". Type: string (or Expression with - * resultType string). + * The JSONPath of the JSON array element to be flattened. Example: "$.ArrayPath". Type: string (or Expression with resultType string). */ @JsonProperty(value = "jsonNodeReference") private Object jsonNodeReference; /* - * The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. - * For fields under root object, start with "$"; for fields inside the array chosen by jsonNodeReference property, - * start from the array element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: - * object (or Expression with resultType object). + * The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. For fields under root object, start with "$"; for fields inside the array chosen by jsonNodeReference property, start from the array element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or Expression with resultType object). */ @JsonProperty(value = "jsonPathDefinition") private Object jsonPathDefinition; @@ -61,6 +60,16 @@ public final class JsonFormat extends DatasetStorageFormat { public JsonFormat() { } + /** + * Get the type property: Type of dataset storage format. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the filePattern property: File pattern of JSON. To be more specific, the way of separating a collection of * JSON objects. The default value is 'setOfObjects'. It is case-sensitive. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonReadSettings.java index 5f9b8111724ed..9018c0c7a33b1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonReadSettings.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Json read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JsonReadSettings.class, visible = true) @JsonTypeName("JsonReadSettings") @Fluent public final class JsonReadSettings extends FormatReadSettings { + /* + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "JsonReadSettings"; + /* * Compression settings. */ @@ -28,6 +36,16 @@ public final class JsonReadSettings extends FormatReadSettings { public JsonReadSettings() { } + /** + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the compressionProperties property: Compression settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSink.java index cdb6fcc7cb94e..dc350a33e3162 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Json sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JsonSink.class, visible = true) @JsonTypeName("JsonSink") @Fluent public final class JsonSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "JsonSink"; + /* * Json store settings. */ @@ -34,6 +42,16 @@ public final class JsonSink extends CopySink { public JsonSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: Json store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSource.java index 7e040f035a413..976297f8dde3d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Json source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JsonSource.class, visible = true) @JsonTypeName("JsonSource") @Fluent public final class JsonSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "JsonSource"; + /* * Json store settings. */ @@ -29,8 +37,7 @@ public final class JsonSource extends CopySource { private JsonReadSettings formatSettings; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -41,6 +48,16 @@ public final class JsonSource extends CopySource { public JsonSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: Json store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonWriteSettings.java index 99ca589e589d1..db41ad14b4c9d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonWriteSettings.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Json write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JsonWriteSettings.class, visible = true) @JsonTypeName("JsonWriteSettings") @Fluent public final class JsonWriteSettings extends FormatWriteSettings { /* - * File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default - * value is 'setOfObjects'. It is case-sensitive. + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "JsonWriteSettings"; + + /* + * File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. */ @JsonProperty(value = "filePattern") private Object filePattern; @@ -29,6 +36,16 @@ public final class JsonWriteSettings extends FormatWriteSettings { public JsonWriteSettings() { } + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the filePattern property: File pattern of JSON. This setting controls the way a collection of JSON objects * will be treated. The default value is 'setOfObjects'. It is case-sensitive. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLinkedService.java index a62d7a7bff11d..71f0960456318 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.LakeHouseLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Microsoft Fabric LakeHouse linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseLinkedService.class, visible = true) @JsonTypeName("LakeHouse") @Fluent public final class LakeHouseLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "LakeHouse"; + /* * Microsoft Fabric LakeHouse linked service properties. */ @@ -32,6 +40,16 @@ public final class LakeHouseLinkedService extends LinkedService { public LakeHouseLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Microsoft Fabric LakeHouse linked service properties. * @@ -203,8 +221,8 @@ public LakeHouseLinkedService withTenant(Object tenant) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -213,8 +231,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the LakeHouseLinkedService object itself. @@ -228,9 +246,9 @@ public LakeHouseLinkedService withEncryptedCredential(String encryptedCredential } /** - * Get the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Get the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @return the servicePrincipalCredentialType value. */ @@ -239,9 +257,9 @@ public Object servicePrincipalCredentialType() { } /** - * Set the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Set the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @param servicePrincipalCredentialType the servicePrincipalCredentialType value to set. * @return the LakeHouseLinkedService object itself. @@ -292,8 +310,9 @@ public LakeHouseLinkedService withServicePrincipalCredential(SecretBase serviceP public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model LakeHouseLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model LakeHouseLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLocation.java index 400e1282ddc99..945cd1db6377c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLocation.java @@ -5,22 +5,41 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of Microsoft Fabric LakeHouse Files dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseLocation.class, visible = true) @JsonTypeName("LakeHouseLocation") @Fluent public final class LakeHouseLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "LakeHouseLocation"; + /** * Creates an instance of LakeHouseLocation class. */ public LakeHouseLocation() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseReadSettings.java index 3ad98ec8c1fbc..c35fd19ab6320 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseReadSettings.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Microsoft Fabric LakeHouse Files read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseReadSettings.class, visible = true) @JsonTypeName("LakeHouseReadSettings") @Fluent public final class LakeHouseReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "LakeHouseReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -36,8 +43,7 @@ public final class LakeHouseReadSettings extends StoreReadSettings { private Object wildcardFileName; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; @@ -49,15 +55,13 @@ public final class LakeHouseReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; @@ -81,8 +85,18 @@ public LakeHouseReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -91,8 +105,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the LakeHouseReadSettings object itself. @@ -125,8 +139,8 @@ public LakeHouseReadSettings withWildcardFolderPath(Object wildcardFolderPath) { } /** - * Get the wildcardFileName property: Microsoft Fabric LakeHouse Files wildcardFileName. Type: string (or - * Expression with resultType string). + * Get the wildcardFileName property: Microsoft Fabric LakeHouse Files wildcardFileName. Type: string (or Expression + * with resultType string). * * @return the wildcardFileName value. */ @@ -135,8 +149,8 @@ public Object wildcardFileName() { } /** - * Set the wildcardFileName property: Microsoft Fabric LakeHouse Files wildcardFileName. Type: string (or - * Expression with resultType string). + * Set the wildcardFileName property: Microsoft Fabric LakeHouse Files wildcardFileName. Type: string (or Expression + * with resultType string). * * @param wildcardFileName the wildcardFileName value to set. * @return the LakeHouseReadSettings object itself. @@ -147,8 +161,8 @@ public LakeHouseReadSettings withWildcardFileName(Object wildcardFileName) { } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -157,8 +171,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the LakeHouseReadSettings object itself. @@ -191,8 +205,8 @@ public LakeHouseReadSettings withEnablePartitionDiscovery(Object enablePartition } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -201,8 +215,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the LakeHouseReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableDataset.java index d454fb0a17abf..9cdca12719727 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.LakeHouseTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Microsoft Fabric LakeHouse Table. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseTableDataset.class, visible = true) @JsonTypeName("LakeHouseTable") @Fluent public final class LakeHouseTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "LakeHouseTable"; + /* * Microsoft Fabric LakeHouse Table dataset properties. */ @@ -31,6 +39,16 @@ public final class LakeHouseTableDataset extends Dataset { public LakeHouseTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Microsoft Fabric LakeHouse Table dataset properties. * @@ -104,8 +122,8 @@ public LakeHouseTableDataset withFolder(DatasetFolder folder) { } /** - * Get the table property: The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with - * resultType string). + * Get the table property: The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with resultType + * string). * * @return the table value. */ @@ -114,8 +132,8 @@ public Object table() { } /** - * Set the table property: The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with - * resultType string). + * Set the table property: The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with resultType + * string). * * @param table the table value to set. * @return the LakeHouseTableDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSink.java index 8013f24083d4d..741c8efa828df 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity for Microsoft Fabric LakeHouse Table sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseTableSink.class, visible = true) @JsonTypeName("LakeHouseTableSink") @Fluent public final class LakeHouseTableSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "LakeHouseTableSink"; + /* * The type of table action for LakeHouse Table sink. Possible values include: "None", "Append", "Overwrite". */ @@ -23,15 +31,13 @@ public final class LakeHouseTableSink extends CopySink { private Object tableActionOption; /* - * Create partitions in folder structure based on one or multiple columns. Each distinct column value (pair) will - * be a new partition. Possible values include: "None", "PartitionByKey". + * Create partitions in folder structure based on one or multiple columns. Each distinct column value (pair) will be a new partition. Possible values include: "None", "PartitionByKey". */ @JsonProperty(value = "partitionOption") private Object partitionOption; /* - * Specify the partition column names from sink columns. Type: array of objects (or Expression with resultType - * array of objects). + * Specify the partition column names from sink columns. Type: array of objects (or Expression with resultType array of objects). */ @JsonProperty(value = "partitionNameList") private Object partitionNameList; @@ -42,6 +48,16 @@ public final class LakeHouseTableSink extends CopySink { public LakeHouseTableSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the tableActionOption property: The type of table action for LakeHouse Table sink. Possible values include: * "None", "Append", "Overwrite". diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSource.java index c66c2afcb0f90..22c8eaf9646e7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for Microsoft Fabric LakeHouse Table. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseTableSource.class, visible = true) @JsonTypeName("LakeHouseTableSource") @Fluent public final class LakeHouseTableSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "LakeHouseTableSource"; + /* * Query an older snapshot by timestamp. Type: string (or Expression with resultType string). */ @@ -29,8 +37,7 @@ public final class LakeHouseTableSource extends CopySource { private Object versionAsOf; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -42,8 +49,18 @@ public LakeHouseTableSource() { } /** - * Get the timestampAsOf property: Query an older snapshot by timestamp. Type: string (or Expression with - * resultType string). + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the timestampAsOf property: Query an older snapshot by timestamp. Type: string (or Expression with resultType + * string). * * @return the timestampAsOf value. */ @@ -52,8 +69,8 @@ public Object timestampAsOf() { } /** - * Set the timestampAsOf property: Query an older snapshot by timestamp. Type: string (or Expression with - * resultType string). + * Set the timestampAsOf property: Query an older snapshot by timestamp. Type: string (or Expression with resultType + * string). * * @param timestampAsOf the timestampAsOf value to set. * @return the LakeHouseTableSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseWriteSettings.java index cd9ab31ff433b..ac14793498e1e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseWriteSettings.java @@ -5,6 +5,8 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -12,16 +14,33 @@ /** * Microsoft Fabric LakeHouse Files write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseWriteSettings.class, visible = true) @JsonTypeName("LakeHouseWriteSettings") @Fluent public final class LakeHouseWriteSettings extends StoreWriteSettings { + /* + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "LakeHouseWriteSettings"; + /** * Creates an instance of LakeHouseWriteSettings class. */ public LakeHouseWriteSettings() { } + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeKeyAuthorization.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeKeyAuthorization.java index 4edff3efc5315..5bd3d1a2b4942 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeKeyAuthorization.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeKeyAuthorization.java @@ -7,16 +7,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The key authorization type integration runtime. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "authorizationType") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "authorizationType", + defaultImpl = LinkedIntegrationRuntimeKeyAuthorization.class, + visible = true) @JsonTypeName("Key") @Fluent public final class LinkedIntegrationRuntimeKeyAuthorization extends LinkedIntegrationRuntimeType { + /* + * The authorization type for integration runtime sharing. + */ + @JsonTypeId + @JsonProperty(value = "authorizationType", required = true) + private String authorizationType = "Key"; + /* * The key used for authorization. */ @@ -29,6 +41,16 @@ public final class LinkedIntegrationRuntimeKeyAuthorization extends LinkedIntegr public LinkedIntegrationRuntimeKeyAuthorization() { } + /** + * Get the authorizationType property: The authorization type for integration runtime sharing. + * + * @return the authorizationType value. + */ + @Override + public String authorizationType() { + return this.authorizationType; + } + /** * Get the key property: The key used for authorization. * @@ -58,8 +80,9 @@ public LinkedIntegrationRuntimeKeyAuthorization withKey(SecureString key) { public void validate() { super.validate(); if (key() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property key in model LinkedIntegrationRuntimeKeyAuthorization")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property key in model LinkedIntegrationRuntimeKeyAuthorization")); } else { key().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRbacAuthorization.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRbacAuthorization.java index e45b5a1c27db5..91844a66bfdfc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRbacAuthorization.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRbacAuthorization.java @@ -7,16 +7,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The role based access control (RBAC) authorization type integration runtime. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "authorizationType") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "authorizationType", + defaultImpl = LinkedIntegrationRuntimeRbacAuthorization.class, + visible = true) @JsonTypeName("RBAC") @Fluent public final class LinkedIntegrationRuntimeRbacAuthorization extends LinkedIntegrationRuntimeType { + /* + * The authorization type for integration runtime sharing. + */ + @JsonTypeId + @JsonProperty(value = "authorizationType", required = true) + private String authorizationType = "RBAC"; + /* * The resource identifier of the integration runtime to be shared. */ @@ -35,6 +47,16 @@ public final class LinkedIntegrationRuntimeRbacAuthorization extends LinkedInteg public LinkedIntegrationRuntimeRbacAuthorization() { } + /** + * Get the authorizationType property: The authorization type for integration runtime sharing. + * + * @return the authorizationType value. + */ + @Override + public String authorizationType() { + return this.authorizationType; + } + /** * Get the resourceId property: The resource identifier of the integration runtime to be shared. * @@ -84,8 +106,9 @@ public LinkedIntegrationRuntimeRbacAuthorization withCredential(CredentialRefere public void validate() { super.validate(); if (resourceId() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property resourceId in model LinkedIntegrationRuntimeRbacAuthorization")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property resourceId in model LinkedIntegrationRuntimeRbacAuthorization")); } if (credential() != null) { credential().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRequest.java index ea505e92d479a..a5c406c517e38 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRequest.java @@ -52,8 +52,9 @@ public LinkedIntegrationRuntimeRequest withLinkedFactoryName(String linkedFactor */ public void validate() { if (linkedFactoryName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property linkedFactoryName in model LinkedIntegrationRuntimeRequest")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property linkedFactoryName in model LinkedIntegrationRuntimeRequest")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeType.java index 24e99f47c1c32..19d5d6be3f61c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeType.java @@ -5,7 +5,9 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; @@ -14,19 +16,36 @@ */ @JsonTypeInfo( use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, property = "authorizationType", - defaultImpl = LinkedIntegrationRuntimeType.class) + defaultImpl = LinkedIntegrationRuntimeType.class, + visible = true) @JsonTypeName("LinkedIntegrationRuntimeType") @JsonSubTypes({ @JsonSubTypes.Type(name = "Key", value = LinkedIntegrationRuntimeKeyAuthorization.class), @JsonSubTypes.Type(name = "RBAC", value = LinkedIntegrationRuntimeRbacAuthorization.class) }) @Immutable public class LinkedIntegrationRuntimeType { + /* + * The authorization type for integration runtime sharing. + */ + @JsonTypeId + @JsonProperty(value = "authorizationType", required = true) + private String authorizationType; + /** * Creates an instance of LinkedIntegrationRuntimeType class. */ public LinkedIntegrationRuntimeType() { + this.authorizationType = "LinkedIntegrationRuntimeType"; + } + + /** + * Get the authorizationType property: The authorization type for integration runtime sharing. + * + * @return the authorizationType value. + */ + public String authorizationType() { + return this.authorizationType; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedService.java index 58e62f220ab6c..74ac1697d19ef 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedService.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -21,11 +22,7 @@ * The nested object which contains the information and credential which can be used to connect with related store or * compute resource. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = LinkedService.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LinkedService.class, visible = true) @JsonTypeName("LinkedService") @JsonSubTypes({ @JsonSubTypes.Type(name = "AzureStorage", value = AzureStorageLinkedService.class), @@ -151,6 +148,13 @@ @JsonSubTypes.Type(name = "ServiceNowV2", value = ServiceNowV2LinkedService.class) }) @Fluent public class LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * The integration runtime reference. */ @@ -177,8 +181,7 @@ public class LinkedService { private List annotations; /* - * The nested object which contains the information and credential which can be used to connect with related store - * or compute resource. + * The nested object which contains the information and credential which can be used to connect with related store or compute resource. */ @JsonIgnore private Map additionalProperties; @@ -187,6 +190,16 @@ public class LinkedService { * Creates an instance of LinkedService class. */ public LinkedService() { + this.type = "LinkedService"; + } + + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceDebugResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceDebugResource.java index ae242bb94e908..3cc8115c53a05 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceDebugResource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceDebugResource.java @@ -63,8 +63,9 @@ public LinkedServiceDebugResource withName(String name) { public void validate() { super.validate(); if (properties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property properties in model LinkedServiceDebugResource")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model LinkedServiceDebugResource")); } else { properties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceListResponse.java index 0b45d70ac479b..fa2bdf17844c2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceListResponse.java @@ -80,8 +80,9 @@ public LinkedServiceListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model LinkedServiceListResponse")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property value in model LinkedServiceListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceReference.java index 69310b692ffe2..7c69e3f1842ac 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceReference.java @@ -107,8 +107,9 @@ public LinkedServiceReference withParameters(Map parameters) { */ public void validate() { if (referenceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property referenceName in model LinkedServiceReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property referenceName in model LinkedServiceReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogLocationSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogLocationSettings.java index 2529e283c2b22..a57fd5a842450 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogLocationSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogLocationSettings.java @@ -20,8 +20,7 @@ public final class LogLocationSettings { private LinkedServiceReference linkedServiceName; /* - * The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType - * string). + * The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). */ @JsonProperty(value = "path") private Object path; @@ -81,8 +80,9 @@ public LogLocationSettings withPath(Object path) { */ public void validate() { if (linkedServiceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property linkedServiceName in model LogLocationSettings")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property linkedServiceName in model LogLocationSettings")); } else { linkedServiceName().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogSettings.java index e3c3669318524..2cb732c4fb0bb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogSettings.java @@ -109,8 +109,9 @@ public void validate() { copyActivityLogSettings().validate(); } if (logLocationSettings() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property logLocationSettings in model LogSettings")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property logLocationSettings in model LogSettings")); } else { logLocationSettings().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogStorageSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogStorageSettings.java index 553b1c937eb5e..2b2548f5b691a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogStorageSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogStorageSettings.java @@ -25,8 +25,7 @@ public final class LogStorageSettings { private LinkedServiceReference linkedServiceName; /* - * The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType - * string). + * The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). */ @JsonProperty(value = "path") private Object path; @@ -177,8 +176,9 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { if (linkedServiceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property linkedServiceName in model LogStorageSettings")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property linkedServiceName in model LogStorageSettings")); } else { linkedServiceName().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LookupActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LookupActivity.java index e5a2f7825be6f..62217c0bb6103 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LookupActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LookupActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.LookupActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Lookup activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LookupActivity.class, visible = true) @JsonTypeName("Lookup") @Fluent public final class LookupActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Lookup"; + /* * Lookup activity properties. */ @@ -31,6 +39,16 @@ public final class LookupActivity extends ExecutionActivity { public LookupActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Lookup activity properties. * @@ -192,8 +210,9 @@ public LookupActivity withFirstRowOnly(Object firstRowOnly) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model LookupActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model LookupActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoLinkedService.java index 75b1defe96c31..285254617c3ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.MagentoLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Magento server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MagentoLinkedService.class, visible = true) @JsonTypeName("Magento") @Fluent public final class MagentoLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Magento"; + /* * Magento server linked service properties. */ @@ -32,6 +40,16 @@ public final class MagentoLinkedService extends LinkedService { public MagentoLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Magento server linked service properties. * @@ -174,8 +192,8 @@ public MagentoLinkedService withUseHostVerification(Object useHostVerification) } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -184,8 +202,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the MagentoLinkedService object itself. @@ -199,8 +217,8 @@ public MagentoLinkedService withUsePeerVerification(Object usePeerVerification) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -209,8 +227,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the MagentoLinkedService object itself. @@ -232,8 +250,9 @@ public MagentoLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model MagentoLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model MagentoLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoObjectDataset.java index 6e7511b6f6071..82dbf391f1488 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Magento server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MagentoObjectDataset.class, visible = true) @JsonTypeName("MagentoObject") @Fluent public final class MagentoObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MagentoObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class MagentoObjectDataset extends Dataset { public MagentoObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoSource.java index 982b5e857b386..402b3e141231f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Magento server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MagentoSource.class, visible = true) @JsonTypeName("MagentoSource") @Fluent public final class MagentoSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MagentoSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class MagentoSource extends TabularSource { public MagentoSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIdentityCredential.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIdentityCredential.java index 4a9b1996dc61d..d6959766dc3b9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIdentityCredential.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIdentityCredential.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -14,10 +15,21 @@ /** * Managed identity credential. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = ManagedIdentityCredential.class, + visible = true) @JsonTypeName("ManagedIdentity") @Fluent public final class ManagedIdentityCredential extends Credential { + /* + * Type of credential. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ManagedIdentity"; + /* * Managed identity credential properties. */ @@ -30,6 +42,16 @@ public final class ManagedIdentityCredential extends Credential { public ManagedIdentityCredential() { } + /** + * Get the type property: Type of credential. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Managed identity credential properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIdentityCredentialResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIdentityCredentialResource.java index 0fdcd88c16586..bdea8f9cc7169 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIdentityCredentialResource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIdentityCredentialResource.java @@ -4,215 +4,73 @@ package com.azure.resourcemanager.datafactory.models; -import com.azure.core.util.Context; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityCredentialResourceInner; +import com.azure.core.annotation.Fluent; +import com.azure.core.util.logging.ClientLogger; +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; +import com.fasterxml.jackson.annotation.JsonProperty; /** - * An immutable client-side representation of ManagedIdentityCredentialResource. + * Credential resource type. */ -public interface ManagedIdentityCredentialResource { - /** - * Gets the id property: Fully qualified resource Id for the resource. - * - * @return the id value. +@Fluent +public final class ManagedIdentityCredentialResource extends CredentialResourceInner { + /* + * Managed Identity Credential properties. */ - String id(); + @JsonProperty(value = "properties", required = true) + private ManagedIdentityCredential properties; /** - * Gets the properties property: Managed Identity Credential properties. - * - * @return the properties value. + * Creates an instance of ManagedIdentityCredentialResource class. */ - ManagedIdentityCredential properties(); - - /** - * Gets the name property: The resource name. - * - * @return the name value. - */ - String name(); - - /** - * Gets the type property: The resource type. - * - * @return the type value. - */ - String type(); - - /** - * Gets the etag property: Etag identifies change in the resource. - * - * @return the etag value. - */ - String etag(); + public ManagedIdentityCredentialResource() { + } /** - * Gets the name of the resource group. + * Get the properties property: Managed Identity Credential properties. * - * @return the name of the resource group. + * @return the properties value. */ - String resourceGroupName(); + public ManagedIdentityCredential properties() { + return this.properties; + } /** - * Gets the inner com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityCredentialResourceInner object. + * Set the properties property: Managed Identity Credential properties. * - * @return the inner object. + * @param properties the properties value to set. + * @return the ManagedIdentityCredentialResource object itself. */ - ManagedIdentityCredentialResourceInner innerModel(); - - /** - * The entirety of the ManagedIdentityCredentialResource definition. - */ - interface Definition extends DefinitionStages.Blank, DefinitionStages.WithParentResource, - DefinitionStages.WithProperties, DefinitionStages.WithCreate { + public ManagedIdentityCredentialResource withProperties(ManagedIdentityCredential properties) { + this.properties = properties; + return this; } /** - * The ManagedIdentityCredentialResource definition stages. + * {@inheritDoc} */ - interface DefinitionStages { - /** - * The first stage of the ManagedIdentityCredentialResource definition. - */ - interface Blank extends WithParentResource { - } - - /** - * The stage of the ManagedIdentityCredentialResource definition allowing to specify parent resource. - */ - interface WithParentResource { - /** - * Specifies resourceGroupName, factoryName. - * - * @param resourceGroupName The resource group name. - * @param factoryName The factory name. - * @return the next definition stage. - */ - WithProperties withExistingFactory(String resourceGroupName, String factoryName); - } - - /** - * The stage of the ManagedIdentityCredentialResource definition allowing to specify properties. - */ - interface WithProperties { - /** - * Specifies the properties property: Managed Identity Credential properties.. - * - * @param properties Managed Identity Credential properties. - * @return the next definition stage. - */ - WithCreate withProperties(ManagedIdentityCredential properties); - } - - /** - * The stage of the ManagedIdentityCredentialResource definition which contains all the minimum required - * properties for the resource to be created, but also allows for any other optional properties to be specified. - */ - interface WithCreate extends DefinitionStages.WithIfMatch { - /** - * Executes the create request. - * - * @return the created resource. - */ - ManagedIdentityCredentialResource create(); - - /** - * Executes the create request. - * - * @param context The context to associate with this operation. - * @return the created resource. - */ - ManagedIdentityCredentialResource create(Context context); - } - - /** - * The stage of the ManagedIdentityCredentialResource definition allowing to specify ifMatch. - */ - interface WithIfMatch { - /** - * Specifies the ifMatch property: ETag of the credential entity. Should only be specified for update, for - * which it should match existing entity or can be * for unconditional update.. - * - * @param ifMatch ETag of the credential entity. Should only be specified for update, for which it should - * match existing entity or can be * for unconditional update. - * @return the next definition stage. - */ - WithCreate withIfMatch(String ifMatch); - } + @Override + public ManagedIdentityCredentialResource withId(String id) { + super.withId(id); + return this; } /** - * Begins update for the ManagedIdentityCredentialResource resource. + * Validates the instance. * - * @return the stage of resource update. - */ - ManagedIdentityCredentialResource.Update update(); - - /** - * The template for ManagedIdentityCredentialResource update. - */ - interface Update extends UpdateStages.WithProperties, UpdateStages.WithIfMatch { - /** - * Executes the update request. - * - * @return the updated resource. - */ - ManagedIdentityCredentialResource apply(); - - /** - * Executes the update request. - * - * @param context The context to associate with this operation. - * @return the updated resource. - */ - ManagedIdentityCredentialResource apply(Context context); - } - - /** - * The ManagedIdentityCredentialResource update stages. + * @throws IllegalArgumentException thrown if the instance is not valid. */ - interface UpdateStages { - /** - * The stage of the ManagedIdentityCredentialResource update allowing to specify properties. - */ - interface WithProperties { - /** - * Specifies the properties property: Managed Identity Credential properties.. - * - * @param properties Managed Identity Credential properties. - * @return the next definition stage. - */ - Update withProperties(ManagedIdentityCredential properties); - } - - /** - * The stage of the ManagedIdentityCredentialResource update allowing to specify ifMatch. - */ - interface WithIfMatch { - /** - * Specifies the ifMatch property: ETag of the credential entity. Should only be specified for update, for - * which it should match existing entity or can be * for unconditional update.. - * - * @param ifMatch ETag of the credential entity. Should only be specified for update, for which it should - * match existing entity or can be * for unconditional update. - * @return the next definition stage. - */ - Update withIfMatch(String ifMatch); + @Override + public void validate() { + super.validate(); + if (properties() == null) { + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model ManagedIdentityCredentialResource")); + } else { + properties().validate(); } } - /** - * Refreshes the resource to sync with Azure. - * - * @return the refreshed resource. - */ - ManagedIdentityCredentialResource refresh(); - - /** - * Refreshes the resource to sync with Azure. - * - * @param context The context to associate with this operation. - * @return the refreshed resource. - */ - ManagedIdentityCredentialResource refresh(Context context); + private static final ClientLogger LOGGER = new ClientLogger(ManagedIdentityCredentialResource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntime.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntime.java index 0db03f12e9d99..44635a286948b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntime.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntime.java @@ -8,16 +8,28 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ManagedIntegrationRuntimeTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Managed integration runtime, including managed elastic and managed dedicated integration runtimes. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = ManagedIntegrationRuntime.class, + visible = true) @JsonTypeName("Managed") @Fluent public final class ManagedIntegrationRuntime extends IntegrationRuntime { + /* + * Type of integration runtime. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private IntegrationRuntimeType type = IntegrationRuntimeType.MANAGED; + /* * Integration runtime state, only valid for managed dedicated integration runtime. */ @@ -42,6 +54,16 @@ public final class ManagedIntegrationRuntime extends IntegrationRuntime { public ManagedIntegrationRuntime() { } + /** + * Get the type property: Type of integration runtime. + * + * @return the type value. + */ + @Override + public IntegrationRuntimeType type() { + return this.type; + } + /** * Get the state property: Integration runtime state, only valid for managed dedicated integration runtime. * @@ -136,8 +158,8 @@ public ManagedIntegrationRuntime withSsisProperties(IntegrationRuntimeSsisProper } /** - * Get the customerVirtualNetwork property: The name of virtual network to which Azure-SSIS integration runtime - * will join. + * Get the customerVirtualNetwork property: The name of virtual network to which Azure-SSIS integration runtime will + * join. * * @return the customerVirtualNetwork value. */ @@ -146,8 +168,8 @@ public IntegrationRuntimeCustomerVirtualNetwork customerVirtualNetwork() { } /** - * Set the customerVirtualNetwork property: The name of virtual network to which Azure-SSIS integration runtime - * will join. + * Set the customerVirtualNetwork property: The name of virtual network to which Azure-SSIS integration runtime will + * join. * * @param customerVirtualNetwork the customerVirtualNetwork value to set. * @return the ManagedIntegrationRuntime object itself. @@ -170,8 +192,9 @@ public IntegrationRuntimeCustomerVirtualNetwork customerVirtualNetwork() { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ManagedIntegrationRuntime")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ManagedIntegrationRuntime")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeStatus.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeStatus.java index cc5a90833c901..28e9ddb2e341a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeStatus.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeStatus.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ManagedIntegrationRuntimeStatusTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.time.OffsetDateTime; @@ -16,10 +17,21 @@ /** * Managed integration runtime status. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = ManagedIntegrationRuntimeStatus.class, + visible = true) @JsonTypeName("Managed") @Immutable public final class ManagedIntegrationRuntimeStatus extends IntegrationRuntimeStatus { + /* + * Type of integration runtime. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private IntegrationRuntimeType type = IntegrationRuntimeType.MANAGED; + /* * Managed integration runtime status type properties. */ @@ -33,6 +45,16 @@ public final class ManagedIntegrationRuntimeStatus extends IntegrationRuntimeSta public ManagedIntegrationRuntimeStatus() { } + /** + * Get the type property: Type of integration runtime. + * + * @return the type value. + */ + @Override + public IntegrationRuntimeType type() { + return this.type; + } + /** * Get the innerTypeProperties property: Managed integration runtime status type properties. * @@ -87,8 +109,9 @@ public ManagedIntegrationRuntimeOperationResult lastOperation() { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ManagedIntegrationRuntimeStatus")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ManagedIntegrationRuntimeStatus")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpoint.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpoint.java index 5b33404fa39a9..b9fac08b25363 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpoint.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpoint.java @@ -136,8 +136,8 @@ public Boolean isReserved() { } /** - * Get the privateLinkResourceId property: The ARM resource ID of the resource to which the managed private - * endpoint is created. + * Get the privateLinkResourceId property: The ARM resource ID of the resource to which the managed private endpoint + * is created. * * @return the privateLinkResourceId value. */ @@ -146,8 +146,8 @@ public String privateLinkResourceId() { } /** - * Set the privateLinkResourceId property: The ARM resource ID of the resource to which the managed private - * endpoint is created. + * Set the privateLinkResourceId property: The ARM resource ID of the resource to which the managed private endpoint + * is created. * * @param privateLinkResourceId the privateLinkResourceId value to set. * @return the ManagedPrivateEndpoint object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpointListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpointListResponse.java index c9f4e1bde4ec6..6a738e4f200da 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpointListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpointListResponse.java @@ -80,8 +80,9 @@ public ManagedPrivateEndpointListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property value in model ManagedPrivateEndpointListResponse")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property value in model ManagedPrivateEndpointListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkListResponse.java index d47f037abf330..1c8f31c293208 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkListResponse.java @@ -80,8 +80,9 @@ public ManagedVirtualNetworkListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property value in model ManagedVirtualNetworkListResponse")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property value in model ManagedVirtualNetworkListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkReference.java index 06245aa78c177..8c02d97832045 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkReference.java @@ -78,12 +78,14 @@ public ManagedVirtualNetworkReference withReferenceName(String referenceName) { */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model ManagedVirtualNetworkReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property type in model ManagedVirtualNetworkReference")); } if (referenceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property referenceName in model ManagedVirtualNetworkReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property referenceName in model ManagedVirtualNetworkReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperConnection.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperConnection.java index 6d87aa5b10205..a1a5f96fb56c2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperConnection.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperConnection.java @@ -33,8 +33,7 @@ public final class MapperConnection { private ConnectionType type; /* - * A boolean indicating whether linked service is of type inline dataset. Currently only inline datasets are - * supported. + * A boolean indicating whether linked service is of type inline dataset. Currently only inline datasets are supported. */ @JsonProperty(value = "isInlineDataset") private Boolean isInlineDataset; @@ -164,8 +163,8 @@ public void validate() { linkedService().validate(); } if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model MapperConnection")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property type in model MapperConnection")); } if (commonDslConnectorProperties() != null) { commonDslConnectorProperties().forEach(e -> e.validate()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MappingDataFlow.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MappingDataFlow.java index a722c5c079109..b3a0f8de23961 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MappingDataFlow.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MappingDataFlow.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.MappingDataFlowTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -14,10 +15,17 @@ /** * Mapping data flow. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MappingDataFlow.class, visible = true) @JsonTypeName("MappingDataFlow") @Fluent public final class MappingDataFlow extends DataFlow { + /* + * Type of data flow. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MappingDataFlow"; + /* * Mapping data flow type properties. */ @@ -30,6 +38,16 @@ public final class MappingDataFlow extends DataFlow { public MappingDataFlow() { } + /** + * Get the type property: Type of data flow. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Mapping data flow type properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBLinkedService.java index 2fbebf6fb5078..79d5e2891448e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.MariaDBLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * MariaDB server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MariaDBLinkedService.class, visible = true) @JsonTypeName("MariaDB") @Fluent public final class MariaDBLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MariaDB"; + /* * MariaDB server linked service properties. */ @@ -32,6 +40,16 @@ public final class MariaDBLinkedService extends LinkedService { public MariaDBLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: MariaDB server linked service properties. * @@ -243,8 +261,8 @@ public MariaDBLinkedService withPassword(AzureKeyVaultSecretReference password) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -253,8 +271,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the MariaDBLinkedService object itself. @@ -276,8 +294,9 @@ public MariaDBLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model MariaDBLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model MariaDBLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBSource.java index 0118e5e4f9f78..1eda874fde684 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity MariaDB server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MariaDBSource.class, visible = true) @JsonTypeName("MariaDBSource") @Fluent public final class MariaDBSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MariaDBSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class MariaDBSource extends TabularSource { public MariaDBSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBTableDataset.java index b73bac39aecf3..6f6259306d3f3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * MariaDB server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MariaDBTableDataset.class, visible = true) @JsonTypeName("MariaDBTable") @Fluent public final class MariaDBTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MariaDBTable"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class MariaDBTableDataset extends Dataset { public MariaDBTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoLinkedService.java index 1445b91bc1b4e..e78ce562b1ca0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.MarketoLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Marketo server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MarketoLinkedService.class, visible = true) @JsonTypeName("Marketo") @Fluent public final class MarketoLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Marketo"; + /* * Marketo server linked service properties. */ @@ -32,6 +40,16 @@ public final class MarketoLinkedService extends LinkedService { public MarketoLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Marketo server linked service properties. * @@ -197,8 +215,8 @@ public MarketoLinkedService withUseHostVerification(Object useHostVerification) } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -207,8 +225,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the MarketoLinkedService object itself. @@ -222,8 +240,8 @@ public MarketoLinkedService withUsePeerVerification(Object usePeerVerification) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -232,8 +250,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the MarketoLinkedService object itself. @@ -255,8 +273,9 @@ public MarketoLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model MarketoLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model MarketoLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoObjectDataset.java index 73b0b07c99fa5..8e832754bdc0e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Marketo server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MarketoObjectDataset.class, visible = true) @JsonTypeName("MarketoObject") @Fluent public final class MarketoObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MarketoObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class MarketoObjectDataset extends Dataset { public MarketoObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoSource.java index ab6e17652c004..86b376ee7b0da 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Marketo server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MarketoSource.class, visible = true) @JsonTypeName("MarketoSource") @Fluent public final class MarketoSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MarketoSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class MarketoSource extends TabularSource { public MarketoSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessLinkedService.java index 23737154f7229..a3725e92617e6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.MicrosoftAccessLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Microsoft Access linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = MicrosoftAccessLinkedService.class, + visible = true) @JsonTypeName("MicrosoftAccess") @Fluent public final class MicrosoftAccessLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MicrosoftAccess"; + /* * Microsoft Access linked service properties. */ @@ -33,6 +45,16 @@ public final class MicrosoftAccessLinkedService extends LinkedService { public MicrosoftAccessLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Microsoft Access linked service properties. * @@ -80,8 +102,8 @@ public MicrosoftAccessLinkedService withAnnotations(List annotations) { /** * Get the connectionString property: The non-access credential portion of the connection string as well as an - * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression - * with resultType string. + * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with + * resultType string. * * @return the connectionString value. */ @@ -91,8 +113,8 @@ public Object connectionString() { /** * Set the connectionString property: The non-access credential portion of the connection string as well as an - * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression - * with resultType string. + * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with + * resultType string. * * @param connectionString the connectionString value to set. * @return the MicrosoftAccessLinkedService object itself. @@ -204,8 +226,8 @@ public MicrosoftAccessLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -214,8 +236,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the MicrosoftAccessLinkedService object itself. @@ -237,8 +259,9 @@ public MicrosoftAccessLinkedService withEncryptedCredential(String encryptedCred public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model MicrosoftAccessLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model MicrosoftAccessLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSink.java index 478a053ec0eb2..8297cdd042247 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Microsoft Access sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MicrosoftAccessSink.class, visible = true) @JsonTypeName("MicrosoftAccessSink") @Fluent public final class MicrosoftAccessSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MicrosoftAccessSink"; + /* * A query to execute before starting the copy. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class MicrosoftAccessSink extends CopySink { public MicrosoftAccessSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preCopyScript property: A query to execute before starting the copy. Type: string (or Expression with * resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSource.java index 67ba3874c4ad6..01a631c395dae 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for Microsoft Access. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MicrosoftAccessSource.class, visible = true) @JsonTypeName("MicrosoftAccessSource") @Fluent public final class MicrosoftAccessSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MicrosoftAccessSource"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class MicrosoftAccessSource extends CopySource { private Object query; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -35,6 +42,16 @@ public final class MicrosoftAccessSource extends CopySource { public MicrosoftAccessSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessTableDataset.java index 0f5a4e944a4b5..534f439af58d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.MicrosoftAccessTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * The Microsoft Access table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = MicrosoftAccessTableDataset.class, + visible = true) @JsonTypeName("MicrosoftAccessTable") @Fluent public final class MicrosoftAccessTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MicrosoftAccessTable"; + /* * Microsoft Access table dataset properties. */ @@ -31,6 +43,16 @@ public final class MicrosoftAccessTableDataset extends Dataset { public MicrosoftAccessTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Microsoft Access table dataset properties. * @@ -104,8 +126,7 @@ public MicrosoftAccessTableDataset withFolder(DatasetFolder folder) { } /** - * Get the tableName property: The Microsoft Access table name. Type: string (or Expression with resultType - * string). + * Get the tableName property: The Microsoft Access table name. Type: string (or Expression with resultType string). * * @return the tableName value. */ @@ -114,8 +135,7 @@ public Object tableName() { } /** - * Set the tableName property: The Microsoft Access table name. Type: string (or Expression with resultType - * string). + * Set the tableName property: The Microsoft Access table name. Type: string (or Expression with resultType string). * * @param tableName the tableName value to set. * @return the MicrosoftAccessTableDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasCollectionDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasCollectionDataset.java index 3dfea62991470..0bee1cc82a1ab 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasCollectionDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasCollectionDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.MongoDbAtlasCollectionDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * The MongoDB Atlas database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = MongoDbAtlasCollectionDataset.class, + visible = true) @JsonTypeName("MongoDbAtlasCollection") @Fluent public final class MongoDbAtlasCollectionDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MongoDbAtlasCollection"; + /* * MongoDB Atlas database dataset properties. */ @@ -33,6 +45,16 @@ public final class MongoDbAtlasCollectionDataset extends Dataset { public MongoDbAtlasCollectionDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: MongoDB Atlas database dataset properties. * @@ -139,8 +161,9 @@ public MongoDbAtlasCollectionDataset withCollection(Object collection) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model MongoDbAtlasCollectionDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model MongoDbAtlasCollectionDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasLinkedService.java index a6912144dd6c0..a4eff9abf6548 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.MongoDbAtlasLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for MongoDB Atlas data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = MongoDbAtlasLinkedService.class, + visible = true) @JsonTypeName("MongoDbAtlas") @Fluent public final class MongoDbAtlasLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MongoDbAtlas"; + /* * MongoDB Atlas linked service properties. */ @@ -32,6 +44,16 @@ public final class MongoDbAtlasLinkedService extends LinkedService { public MongoDbAtlasLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: MongoDB Atlas linked service properties. * @@ -161,8 +183,9 @@ public MongoDbAtlasLinkedService withDriverVersion(Object driverVersion) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model MongoDbAtlasLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model MongoDbAtlasLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSink.java index 8e46709ed3bf9..d4c6f26c72d4b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSink.java @@ -6,20 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity MongoDB Atlas sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbAtlasSink.class, visible = true) @JsonTypeName("MongoDbAtlasSink") @Fluent public final class MongoDbAtlasSink extends CopySink { /* - * Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). - * The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression - * with resultType string). + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MongoDbAtlasSink"; + + /* + * Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). */ @JsonProperty(value = "writeBehavior") private Object writeBehavior; @@ -30,6 +36,16 @@ public final class MongoDbAtlasSink extends CopySink { public MongoDbAtlasSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: Specifies whether the document with same key to be overwritten (upsert) rather * than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSource.java index 3f8ae6cb3f938..72d2dda2cb14b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSource.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for a MongoDB Atlas database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbAtlasSource.class, visible = true) @JsonTypeName("MongoDbAtlasSource") @Fluent public final class MongoDbAtlasSource extends CopySource { /* - * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter - * or pass an empty document ({}). Type: string (or Expression with resultType string). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MongoDbAtlasSource"; + + /* + * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). */ @JsonProperty(value = "filter") private Object filter; @@ -30,23 +37,19 @@ public final class MongoDbAtlasSource extends CopySource { private MongoDbCursorMethodsProperties cursorMethods; /* - * Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most - * cases, modifying the batch size will not affect the user or the application. This property's main purpose is to - * avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). + * Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "batchSize") private Object batchSize; /* - * Query timeout. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -57,6 +60,16 @@ public final class MongoDbAtlasSource extends CopySource { public MongoDbAtlasSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the filter property: Specifies selection filter using query operators. To return all documents in a * collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType @@ -103,8 +116,8 @@ public MongoDbAtlasSource withCursorMethods(MongoDbCursorMethodsProperties curso /** * Get the batchSize property: Specifies the number of documents to return in each batch of the response from - * MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. - * This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with + * MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This + * property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with * resultType integer). * * @return the batchSize value. @@ -115,8 +128,8 @@ public Object batchSize() { /** * Set the batchSize property: Specifies the number of documents to return in each batch of the response from - * MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. - * This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with + * MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This + * property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with * resultType integer). * * @param batchSize the batchSize value to set. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCollectionDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCollectionDataset.java index 2b42d53695799..20964d323d9c9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCollectionDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCollectionDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.MongoDbCollectionDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * The MongoDB database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = MongoDbCollectionDataset.class, + visible = true) @JsonTypeName("MongoDbCollection") @Fluent public final class MongoDbCollectionDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MongoDbCollection"; + /* * MongoDB database dataset properties. */ @@ -32,6 +44,16 @@ public final class MongoDbCollectionDataset extends Dataset { public MongoDbCollectionDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: MongoDB database dataset properties. * @@ -138,8 +160,9 @@ public MongoDbCollectionDataset withCollectionName(Object collectionName) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model MongoDbCollectionDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model MongoDbCollectionDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCursorMethodsProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCursorMethodsProperties.java index ea4327ca6721e..c33e7587c7305 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCursorMethodsProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCursorMethodsProperties.java @@ -18,29 +18,25 @@ @Fluent public final class MongoDbCursorMethodsProperties { /* - * Specifies the fields to return in the documents that match the query filter. To return all fields in the - * matching documents, omit this parameter. Type: string (or Expression with resultType string). + * Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). */ @JsonProperty(value = "project") private Object project; /* - * Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType - * string). Type: string (or Expression with resultType string). + * Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). */ @JsonProperty(value = "sort") private Object sort; /* - * Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful - * in implementing paginated results. Type: integer (or Expression with resultType integer). + * Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "skip") private Object skip; /* - * Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a - * SQL database. Type: integer (or Expression with resultType integer). + * Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "limit") private Object limit; @@ -124,8 +120,8 @@ public MongoDbCursorMethodsProperties withSkip(Object skip) { } /** - * Get the limit property: Specifies the maximum number of documents the server returns. limit() is analogous to - * the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). + * Get the limit property: Specifies the maximum number of documents the server returns. limit() is analogous to the + * LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). * * @return the limit value. */ @@ -134,8 +130,8 @@ public Object limit() { } /** - * Set the limit property: Specifies the maximum number of documents the server returns. limit() is analogous to - * the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). + * Set the limit property: Specifies the maximum number of documents the server returns. limit() is analogous to the + * LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). * * @param limit the limit value to set. * @return the MongoDbCursorMethodsProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbLinkedService.java index acf3ef2d6dc21..51587da715779 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.MongoDbLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for MongoDb data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbLinkedService.class, visible = true) @JsonTypeName("MongoDb") @Fluent public final class MongoDbLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MongoDb"; + /* * MongoDB linked service properties. */ @@ -32,6 +40,16 @@ public final class MongoDbLinkedService extends LinkedService { public MongoDbLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: MongoDB linked service properties. * @@ -297,8 +315,8 @@ public MongoDbLinkedService withAllowSelfSignedServerCert(Object allowSelfSigned } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -307,8 +325,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the MongoDbLinkedService object itself. @@ -330,8 +348,9 @@ public MongoDbLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model MongoDbLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model MongoDbLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbSource.java index 50795bae0dd8e..f52898095a6da 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for a MongoDB database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbSource.class, visible = true) @JsonTypeName("MongoDbSource") @Fluent public final class MongoDbSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MongoDbSource"; + /* * Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class MongoDbSource extends CopySource { private Object query; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -35,6 +42,16 @@ public final class MongoDbSource extends CopySource { public MongoDbSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Should be a SQL-92 query expression. Type: string (or Expression with * resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2CollectionDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2CollectionDataset.java index c7db5c6b09c4f..cf58821251017 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2CollectionDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2CollectionDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.MongoDbV2CollectionDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * The MongoDB database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = MongoDbV2CollectionDataset.class, + visible = true) @JsonTypeName("MongoDbV2Collection") @Fluent public final class MongoDbV2CollectionDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MongoDbV2Collection"; + /* * MongoDB database dataset properties. */ @@ -33,6 +45,16 @@ public final class MongoDbV2CollectionDataset extends Dataset { public MongoDbV2CollectionDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: MongoDB database dataset properties. * @@ -139,8 +161,9 @@ public MongoDbV2CollectionDataset withCollection(Object collection) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model MongoDbV2CollectionDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model MongoDbV2CollectionDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2LinkedService.java index 802e15aaccaba..e33513aa95888 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2LinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.MongoDbV2LinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for MongoDB data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbV2LinkedService.class, visible = true) @JsonTypeName("MongoDbV2") @Fluent public final class MongoDbV2LinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MongoDbV2"; + /* * MongoDB linked service properties. */ @@ -32,6 +40,16 @@ public final class MongoDbV2LinkedService extends LinkedService { public MongoDbV2LinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: MongoDB linked service properties. * @@ -136,8 +154,9 @@ public MongoDbV2LinkedService withDatabase(Object database) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model MongoDbV2LinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model MongoDbV2LinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Sink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Sink.java index 9779f0c17d342..b90af08aa6684 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Sink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Sink.java @@ -6,20 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity MongoDB sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbV2Sink.class, visible = true) @JsonTypeName("MongoDbV2Sink") @Fluent public final class MongoDbV2Sink extends CopySink { /* - * Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). - * The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression - * with resultType string). + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MongoDbV2Sink"; + + /* + * Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). */ @JsonProperty(value = "writeBehavior") private Object writeBehavior; @@ -30,6 +36,16 @@ public final class MongoDbV2Sink extends CopySink { public MongoDbV2Sink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: Specifies whether the document with same key to be overwritten (upsert) rather * than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Source.java index 2bbe8a8be96ed..c17d9eb277969 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Source.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for a MongoDB database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbV2Source.class, visible = true) @JsonTypeName("MongoDbV2Source") @Fluent public final class MongoDbV2Source extends CopySource { /* - * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter - * or pass an empty document ({}). Type: string (or Expression with resultType string). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MongoDbV2Source"; + + /* + * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). */ @JsonProperty(value = "filter") private Object filter; @@ -30,23 +37,19 @@ public final class MongoDbV2Source extends CopySource { private MongoDbCursorMethodsProperties cursorMethods; /* - * Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, - * modifying the batch size will not affect the user or the application. This property's main purpose is to avoid - * hit the limitation of response size. Type: integer (or Expression with resultType integer). + * Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "batchSize") private Object batchSize; /* - * Query timeout. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -57,6 +60,16 @@ public final class MongoDbV2Source extends CopySource { public MongoDbV2Source() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the filter property: Specifies selection filter using query operators. To return all documents in a * collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MultiplePipelineTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MultiplePipelineTrigger.java index 1b9389bbb0682..ad52f9cf605a3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MultiplePipelineTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MultiplePipelineTrigger.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,9 +17,9 @@ */ @JsonTypeInfo( use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, property = "type", - defaultImpl = MultiplePipelineTrigger.class) + defaultImpl = MultiplePipelineTrigger.class, + visible = true) @JsonTypeName("MultiplePipelineTrigger") @JsonSubTypes({ @JsonSubTypes.Type(name = "ScheduleTrigger", value = ScheduleTrigger.class), @@ -27,6 +28,13 @@ @JsonSubTypes.Type(name = "CustomEventsTrigger", value = CustomEventsTrigger.class) }) @Fluent public class MultiplePipelineTrigger extends Trigger { + /* + * Trigger type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MultiplePipelineTrigger"; + /* * Pipelines that need to be started. */ @@ -39,6 +47,16 @@ public class MultiplePipelineTrigger extends Trigger { public MultiplePipelineTrigger() { } + /** + * Get the type property: Trigger type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the pipelines property: Pipelines that need to be started. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlLinkedService.java index 62bd1e346bbb8..c18fbd2bb8351 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.MySqlLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for MySQL data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MySqlLinkedService.class, visible = true) @JsonTypeName("MySql") @Fluent public final class MySqlLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MySql"; + /* * MySQL linked service properties. */ @@ -32,6 +40,16 @@ public final class MySqlLinkedService extends LinkedService { public MySqlLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: MySQL linked service properties. * @@ -293,8 +311,8 @@ public MySqlLinkedService withPassword(AzureKeyVaultSecretReference password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -303,8 +321,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the MySqlLinkedService object itself. @@ -326,8 +344,9 @@ public MySqlLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model MySqlLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model MySqlLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlSource.java index d366101e5b8e5..0f5be970a1302 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for MySQL databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MySqlSource.class, visible = true) @JsonTypeName("MySqlSource") @Fluent public final class MySqlSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MySqlSource"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class MySqlSource extends TabularSource { public MySqlSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlTableDataset.java index b17f95c688a43..ac005be1e65d9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.MySqlTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The MySQL table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MySqlTableDataset.class, visible = true) @JsonTypeName("MySqlTable") @Fluent public final class MySqlTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "MySqlTable"; + /* * MySQL table dataset properties. */ @@ -31,6 +39,16 @@ public final class MySqlTableDataset extends Dataset { public MySqlTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: MySQL table dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaLinkedService.java index ef26b7ec6b6ff..2f962b0fbb02d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.NetezzaLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Netezza linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = NetezzaLinkedService.class, visible = true) @JsonTypeName("Netezza") @Fluent public final class NetezzaLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Netezza"; + /* * Netezza linked service properties. */ @@ -32,6 +40,16 @@ public final class NetezzaLinkedService extends LinkedService { public NetezzaLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Netezza linked service properties. * @@ -126,8 +144,8 @@ public NetezzaLinkedService withPwd(AzureKeyVaultSecretReference pwd) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -136,8 +154,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the NetezzaLinkedService object itself. @@ -159,8 +177,9 @@ public NetezzaLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model NetezzaLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model NetezzaLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaPartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaPartitionSettings.java index fa92742b6a7d7..a8950158ecf0d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaPartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaPartitionSettings.java @@ -13,22 +13,19 @@ @Fluent public final class NetezzaPartitionSettings { /* - * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or - * Expression with resultType string). + * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /* - * The maximum value of column specified in partitionColumnName that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionUpperBound") private Object partitionUpperBound; /* - * The minimum value of column specified in partitionColumnName that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionLowerBound") private Object partitionLowerBound; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaSource.java index d68087d3d4a2d..97f3849a81ce7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Netezza source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = NetezzaSource.class, visible = true) @JsonTypeName("NetezzaSource") @Fluent public final class NetezzaSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "NetezzaSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class NetezzaSource extends TabularSource { private Object query; /* - * The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", - * "DataSlice", "DynamicRange". + * The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", "DataSlice", "DynamicRange". */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -41,6 +48,16 @@ public final class NetezzaSource extends TabularSource { public NetezzaSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaTableDataset.java index 937af48062e08..6c994cf4627fd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.NetezzaTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Netezza dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = NetezzaTableDataset.class, visible = true) @JsonTypeName("NetezzaTable") @Fluent public final class NetezzaTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "NetezzaTable"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class NetezzaTableDataset extends Dataset { public NetezzaTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataLinkedService.java index a7c2e3d710588..8b3ee1f0c161b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ODataLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Open Data Protocol (OData) linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ODataLinkedService.class, visible = true) @JsonTypeName("OData") @Fluent public final class ODataLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OData"; + /* * OData linked service properties. */ @@ -32,6 +40,16 @@ public final class ODataLinkedService extends LinkedService { public ODataLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: OData linked service properties. * @@ -78,8 +96,7 @@ public ODataLinkedService withAnnotations(List annotations) { } /** - * Get the url property: The URL of the OData service endpoint. Type: string (or Expression with resultType - * string). + * Get the url property: The URL of the OData service endpoint. Type: string (or Expression with resultType string). * * @return the url value. */ @@ -88,8 +105,7 @@ public Object url() { } /** - * Set the url property: The URL of the OData service endpoint. Type: string (or Expression with resultType - * string). + * Set the url property: The URL of the OData service endpoint. Type: string (or Expression with resultType string). * * @param url the url value to set. * @return the ODataLinkedService object itself. @@ -247,9 +263,9 @@ public ODataLinkedService withServicePrincipalId(Object servicePrincipalId) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -258,9 +274,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the ODataLinkedService object itself. @@ -299,19 +315,20 @@ public ODataLinkedService withAadResourceId(Object aadResourceId) { } /** - * Get the aadServicePrincipalCredentialType property: Specify the credential type (key or cert) is used for - * service principal. + * Get the aadServicePrincipalCredentialType property: Specify the credential type (key or cert) is used for service + * principal. * * @return the aadServicePrincipalCredentialType value. */ public ODataAadServicePrincipalCredentialType aadServicePrincipalCredentialType() { - return this.innerTypeProperties() == null ? null + return this.innerTypeProperties() == null + ? null : this.innerTypeProperties().aadServicePrincipalCredentialType(); } /** - * Set the aadServicePrincipalCredentialType property: Specify the credential type (key or cert) is used for - * service principal. + * Set the aadServicePrincipalCredentialType property: Specify the credential type (key or cert) is used for service + * principal. * * @param aadServicePrincipalCredentialType the aadServicePrincipalCredentialType value to set. * @return the ODataLinkedService object itself. @@ -377,20 +394,21 @@ public ODataLinkedService withServicePrincipalEmbeddedCert(SecretBase servicePri /** * Get the servicePrincipalEmbeddedCertPassword property: Specify the password of your certificate if your - * certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression - * with resultType string). + * certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with + * resultType string). * * @return the servicePrincipalEmbeddedCertPassword value. */ public SecretBase servicePrincipalEmbeddedCertPassword() { - return this.innerTypeProperties() == null ? null + return this.innerTypeProperties() == null + ? null : this.innerTypeProperties().servicePrincipalEmbeddedCertPassword(); } /** * Set the servicePrincipalEmbeddedCertPassword property: Specify the password of your certificate if your - * certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression - * with resultType string). + * certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with + * resultType string). * * @param servicePrincipalEmbeddedCertPassword the servicePrincipalEmbeddedCertPassword value to set. * @return the ODataLinkedService object itself. @@ -405,8 +423,8 @@ public SecretBase servicePrincipalEmbeddedCertPassword() { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -415,8 +433,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ODataLinkedService object itself. @@ -438,8 +456,9 @@ public ODataLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ODataLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ODataLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataResourceDataset.java index b42f02418f043..0a979ff92586f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataResourceDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.ODataResourceDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The Open Data Protocol (OData) resource dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ODataResourceDataset.class, visible = true) @JsonTypeName("ODataResource") @Fluent public final class ODataResourceDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ODataResource"; + /* * OData dataset properties. */ @@ -31,6 +39,16 @@ public final class ODataResourceDataset extends Dataset { public ODataResourceDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: OData dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataSource.java index b74d78a250f72..238ea9c3713b0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for OData source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ODataSource.class, visible = true) @JsonTypeName("ODataSource") @Fluent public final class ODataSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ODataSource"; + /* * OData query. For example, "$top=1". Type: string (or Expression with resultType string). */ @@ -23,16 +31,13 @@ public final class ODataSource extends CopySource { private Object query; /* - * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read - * response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -43,6 +48,16 @@ public final class ODataSource extends CopySource { public ODataSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: OData query. For example, "$top=1". Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcLinkedService.java index d4b752938d983..66f18cab47956 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.OdbcLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Open Database Connectivity (ODBC) linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OdbcLinkedService.class, visible = true) @JsonTypeName("Odbc") @Fluent public final class OdbcLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Odbc"; + /* * ODBC linked service properties. */ @@ -32,6 +40,16 @@ public final class OdbcLinkedService extends LinkedService { public OdbcLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: ODBC linked service properties. * @@ -79,8 +97,8 @@ public OdbcLinkedService withAnnotations(List annotations) { /** * Get the connectionString property: The non-access credential portion of the connection string as well as an - * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression - * with resultType string. + * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with + * resultType string. * * @return the connectionString value. */ @@ -90,8 +108,8 @@ public Object connectionString() { /** * Set the connectionString property: The non-access credential portion of the connection string as well as an - * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression - * with resultType string. + * optional encrypted credential. Type: string, or SecureString, or AzureKeyVaultSecretReference, or Expression with + * resultType string. * * @param connectionString the connectionString value to set. * @return the OdbcLinkedService object itself. @@ -203,8 +221,8 @@ public OdbcLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -213,8 +231,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the OdbcLinkedService object itself. @@ -236,8 +254,9 @@ public OdbcLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model OdbcLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model OdbcLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSink.java index f81572e38349b..ec509c353151a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity ODBC sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OdbcSink.class, visible = true) @JsonTypeName("OdbcSink") @Fluent public final class OdbcSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OdbcSink"; + /* * A query to execute before starting the copy. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class OdbcSink extends CopySink { public OdbcSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preCopyScript property: A query to execute before starting the copy. Type: string (or Expression with * resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSource.java index 8aabaa0390ee9..a79b5792a3cae 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for ODBC databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OdbcSource.class, visible = true) @JsonTypeName("OdbcSource") @Fluent public final class OdbcSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OdbcSource"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class OdbcSource extends TabularSource { public OdbcSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcTableDataset.java index 9e2aa0099807c..715ab896d28c3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.OdbcTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The ODBC table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OdbcTableDataset.class, visible = true) @JsonTypeName("OdbcTable") @Fluent public final class OdbcTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OdbcTable"; + /* * ODBC table dataset properties. */ @@ -31,6 +39,16 @@ public final class OdbcTableDataset extends Dataset { public OdbcTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: ODBC table dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Dataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Dataset.java index f94fdadab47ff..3c7d0a079cc5a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Dataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Dataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.Office365DatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * The Office365 account. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Office365Dataset.class, visible = true) @JsonTypeName("Office365Table") @Fluent public final class Office365Dataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Office365Table"; + /* * Office365 dataset properties. */ @@ -32,6 +40,16 @@ public final class Office365Dataset extends Dataset { public Office365Dataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Office365 dataset properties. * @@ -163,8 +181,9 @@ public Office365Dataset withPredicate(Object predicate) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model Office365Dataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model Office365Dataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365LinkedService.java index baa23210f59bc..c7b3735060da2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365LinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.Office365LinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Office365 linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Office365LinkedService.class, visible = true) @JsonTypeName("Office365") @Fluent public final class Office365LinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Office365"; + /* * Office365 linked service properties. */ @@ -32,6 +40,16 @@ public final class Office365LinkedService extends LinkedService { public Office365LinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Office365 linked service properties. * @@ -176,8 +194,8 @@ public Office365LinkedService withServicePrincipalKey(SecretBase servicePrincipa } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -186,8 +204,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the Office365LinkedService object itself. @@ -209,8 +227,9 @@ public Office365LinkedService withEncryptedCredential(String encryptedCredential public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model Office365LinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model Office365LinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Source.java index a806a8c2f7d3e..928a020e6ab19 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Source.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for an Office 365 service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Office365Source.class, visible = true) @JsonTypeName("Office365Source") @Fluent public final class Office365Source extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Office365Source"; + /* * The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). */ @@ -29,8 +37,7 @@ public final class Office365Source extends CopySource { private Object userScopeFilterUri; /* - * The Column to apply the and . Type: string (or Expression - * with resultType string). + * The Column to apply the and . Type: string (or Expression with resultType string). */ @JsonProperty(value = "dateFilterColumn") private Object dateFilterColumn; @@ -48,8 +55,7 @@ public final class Office365Source extends CopySource { private Object endTime; /* - * The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType - * array of objects). itemType: OutputColumn. Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ] + * The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). itemType: OutputColumn. Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ] */ @JsonProperty(value = "outputColumns") private Object outputColumns; @@ -60,6 +66,16 @@ public final class Office365Source extends CopySource { public Office365Source() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the allowedGroups property: The groups containing all the users. Type: array of strings (or Expression with * resultType array of strings). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLinkedService.java index 524a17b1f4b24..13d3cb1630af1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.OracleCloudStorageLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for Oracle Cloud Storage. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = OracleCloudStorageLinkedService.class, + visible = true) @JsonTypeName("OracleCloudStorage") @Fluent public final class OracleCloudStorageLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OracleCloudStorage"; + /* * Oracle Cloud Storage linked service properties. */ @@ -33,6 +45,16 @@ public final class OracleCloudStorageLinkedService extends LinkedService { public OracleCloudStorageLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Oracle Cloud Storage linked service properties. * @@ -129,9 +151,9 @@ public OracleCloudStorageLinkedService withSecretAccessKey(SecretBase secretAcce } /** - * Get the serviceUrl property: This value specifies the endpoint to access with the Oracle Cloud Storage - * Connector. This is an optional property; change it only if you want to try a different service endpoint or want - * to switch between https and http. Type: string (or Expression with resultType string). + * Get the serviceUrl property: This value specifies the endpoint to access with the Oracle Cloud Storage Connector. + * This is an optional property; change it only if you want to try a different service endpoint or want to switch + * between https and http. Type: string (or Expression with resultType string). * * @return the serviceUrl value. */ @@ -140,9 +162,9 @@ public Object serviceUrl() { } /** - * Set the serviceUrl property: This value specifies the endpoint to access with the Oracle Cloud Storage - * Connector. This is an optional property; change it only if you want to try a different service endpoint or want - * to switch between https and http. Type: string (or Expression with resultType string). + * Set the serviceUrl property: This value specifies the endpoint to access with the Oracle Cloud Storage Connector. + * This is an optional property; change it only if you want to try a different service endpoint or want to switch + * between https and http. Type: string (or Expression with resultType string). * * @param serviceUrl the serviceUrl value to set. * @return the OracleCloudStorageLinkedService object itself. @@ -156,8 +178,8 @@ public OracleCloudStorageLinkedService withServiceUrl(Object serviceUrl) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -166,8 +188,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the OracleCloudStorageLinkedService object itself. @@ -189,8 +211,9 @@ public OracleCloudStorageLinkedService withEncryptedCredential(String encryptedC public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model OracleCloudStorageLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model OracleCloudStorageLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLocation.java index 821ae822e5fd6..ac59d1e5ef1f4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLocation.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of Oracle Cloud Storage dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = OracleCloudStorageLocation.class, + visible = true) @JsonTypeName("OracleCloudStorageLocation") @Fluent public final class OracleCloudStorageLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OracleCloudStorageLocation"; + /* * Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression with resultType string) */ @@ -34,6 +46,16 @@ public final class OracleCloudStorageLocation extends DatasetLocation { public OracleCloudStorageLocation() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the bucketName property: Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression with * resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageReadSettings.java index 468b77f612a63..afa75082499c7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageReadSettings.java @@ -6,19 +6,30 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Oracle Cloud Storage read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = OracleCloudStorageReadSettings.class, + visible = true) @JsonTypeName("OracleCloudStorageReadSettings") @Fluent public final class OracleCloudStorageReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OracleCloudStorageReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -42,8 +53,7 @@ public final class OracleCloudStorageReadSettings extends StoreReadSettings { private Object prefix; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; @@ -55,15 +65,13 @@ public final class OracleCloudStorageReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; @@ -87,8 +95,18 @@ public OracleCloudStorageReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -97,8 +115,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the OracleCloudStorageReadSettings object itself. @@ -175,8 +193,8 @@ public OracleCloudStorageReadSettings withPrefix(Object prefix) { } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -185,8 +203,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the OracleCloudStorageReadSettings object itself. @@ -219,8 +237,8 @@ public OracleCloudStorageReadSettings withEnablePartitionDiscovery(Object enable } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -229,8 +247,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the OracleCloudStorageReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleLinkedService.java index 9e0520320af82..d84effceb2a55 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.OracleLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Oracle database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OracleLinkedService.class, visible = true) @JsonTypeName("Oracle") @Fluent public final class OracleLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Oracle"; + /* * Oracle database linked service properties. */ @@ -32,6 +40,16 @@ public final class OracleLinkedService extends LinkedService { public OracleLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Oracle database linked service properties. * @@ -126,8 +144,8 @@ public OracleLinkedService withPassword(AzureKeyVaultSecretReference password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -136,8 +154,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the OracleLinkedService object itself. @@ -159,8 +177,9 @@ public OracleLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model OracleLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model OracleLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OraclePartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OraclePartitionSettings.java index 2e092b72a4370..15abf4942308a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OraclePartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OraclePartitionSettings.java @@ -13,28 +13,25 @@ @Fluent public final class OraclePartitionSettings { /* - * Names of the physical partitions of Oracle table. + * Names of the physical partitions of Oracle table. */ @JsonProperty(value = "partitionNames") private Object partitionNames; /* - * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or - * Expression with resultType string). + * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /* - * The maximum value of column specified in partitionColumnName that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionUpperBound") private Object partitionUpperBound; /* - * The minimum value of column specified in partitionColumnName that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionLowerBound") private Object partitionLowerBound; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudLinkedService.java index cb2d6f7bef729..815fb87693141 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.OracleServiceCloudLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Oracle Service Cloud linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = OracleServiceCloudLinkedService.class, + visible = true) @JsonTypeName("OracleServiceCloud") @Fluent public final class OracleServiceCloudLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OracleServiceCloud"; + /* * Oracle Service Cloud linked service properties. */ @@ -33,6 +45,16 @@ public final class OracleServiceCloudLinkedService extends LinkedService { public OracleServiceCloudLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Oracle Service Cloud linked service properties. * @@ -200,8 +222,8 @@ public OracleServiceCloudLinkedService withUseHostVerification(Object useHostVer } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. Type: boolean (or Expression with resultType boolean). * * @return the usePeerVerification value. */ @@ -210,8 +232,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. Type: boolean (or Expression with resultType boolean). * * @param usePeerVerification the usePeerVerification value to set. * @return the OracleServiceCloudLinkedService object itself. @@ -225,8 +247,8 @@ public OracleServiceCloudLinkedService withUsePeerVerification(Object usePeerVer } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -235,8 +257,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the OracleServiceCloudLinkedService object itself. @@ -258,8 +280,9 @@ public OracleServiceCloudLinkedService withEncryptedCredential(String encryptedC public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model OracleServiceCloudLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model OracleServiceCloudLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudObjectDataset.java index dce6b358437a4..9c0819f4b5288 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Oracle Service Cloud dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = OracleServiceCloudObjectDataset.class, + visible = true) @JsonTypeName("OracleServiceCloudObject") @Fluent public final class OracleServiceCloudObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OracleServiceCloudObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +43,16 @@ public final class OracleServiceCloudObjectDataset extends Dataset { public OracleServiceCloudObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudSource.java index 22e63affc7f75..fb09f41e441da 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudSource.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Oracle Service Cloud source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = OracleServiceCloudSource.class, + visible = true) @JsonTypeName("OracleServiceCloudSource") @Fluent public final class OracleServiceCloudSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OracleServiceCloudSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +40,16 @@ public final class OracleServiceCloudSource extends TabularSource { public OracleServiceCloudSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSink.java index 79967ebffc699..91f4316041279 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Oracle sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OracleSink.class, visible = true) @JsonTypeName("OracleSink") @Fluent public final class OracleSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OracleSink"; + /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class OracleSink extends CopySink { public OracleSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preCopyScript property: SQL pre-copy script. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSource.java index 4c548636ae24a..40f4fa81458e7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Oracle source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OracleSource.class, visible = true) @JsonTypeName("OracleSource") @Fluent public final class OracleSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OracleSource"; + /* * Oracle reader query. Type: string (or Expression with resultType string). */ @@ -23,15 +31,13 @@ public final class OracleSource extends CopySource { private Object oracleReaderQuery; /* - * Query timeout. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* - * The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", - * "PhysicalPartitionsOfTable", "DynamicRange". + * The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -43,8 +49,7 @@ public final class OracleSource extends CopySource { private OraclePartitionSettings partitionSettings; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -55,6 +60,16 @@ public final class OracleSource extends CopySource { public OracleSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the oracleReaderQuery property: Oracle reader query. Type: string (or Expression with resultType string). * @@ -98,8 +113,8 @@ public OracleSource withQueryTimeout(Object queryTimeout) { } /** - * Get the partitionOption property: The partition mechanism that will be used for Oracle read in parallel. - * Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + * Get the partitionOption property: The partition mechanism that will be used for Oracle read in parallel. Possible + * values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". * * @return the partitionOption value. */ @@ -108,8 +123,8 @@ public Object partitionOption() { } /** - * Set the partitionOption property: The partition mechanism that will be used for Oracle read in parallel. - * Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + * Set the partitionOption property: The partition mechanism that will be used for Oracle read in parallel. Possible + * values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". * * @param partitionOption the partitionOption value to set. * @return the OracleSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleTableDataset.java index 0496717802470..b60389b0917ab 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.OracleTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The on-premises Oracle database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OracleTableDataset.class, visible = true) @JsonTypeName("OracleTable") @Fluent public final class OracleTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OracleTable"; + /* * On-premises Oracle dataset properties. */ @@ -31,6 +39,16 @@ public final class OracleTableDataset extends Dataset { public OracleTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: On-premises Oracle dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcDataset.java index f13a91925ad94..054ab19eda37d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.OrcDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * ORC dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OrcDataset.class, visible = true) @JsonTypeName("Orc") @Fluent public final class OrcDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Orc"; + /* * ORC dataset properties. */ @@ -31,6 +39,16 @@ public final class OrcDataset extends Dataset { public OrcDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: ORC dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcFormat.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcFormat.java index 8b76575d9afb6..d8f0a0615f014 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcFormat.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcFormat.java @@ -5,22 +5,41 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The data stored in Optimized Row Columnar (ORC) format. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OrcFormat.class, visible = true) @JsonTypeName("OrcFormat") @Fluent public final class OrcFormat extends DatasetStorageFormat { + /* + * Type of dataset storage format. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OrcFormat"; + /** * Creates an instance of OrcFormat class. */ public OrcFormat() { } + /** + * Get the type property: Type of dataset storage format. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSink.java index 0ea99195ea50c..f9ab94417b6a0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity ORC sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OrcSink.class, visible = true) @JsonTypeName("OrcSink") @Fluent public final class OrcSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OrcSink"; + /* * ORC store settings. */ @@ -34,6 +42,16 @@ public final class OrcSink extends CopySink { public OrcSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: ORC store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSource.java index d57d585aea89e..decc8999ed99f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity ORC source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OrcSource.class, visible = true) @JsonTypeName("OrcSource") @Fluent public final class OrcSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OrcSource"; + /* * ORC store settings. */ @@ -23,8 +31,7 @@ public final class OrcSource extends CopySource { private StoreReadSettings storeSettings; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -35,6 +42,16 @@ public final class OrcSource extends CopySource { public OrcSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: ORC store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcWriteSettings.java index 0dd4a6daf410b..4592d9847f950 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcWriteSettings.java @@ -6,26 +6,32 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Orc write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OrcWriteSettings.class, visible = true) @JsonTypeName("OrcWriteSettings") @Fluent public final class OrcWriteSettings extends FormatWriteSettings { /* - * Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or - * Expression with resultType integer). + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "OrcWriteSettings"; + + /* + * Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "maxRowsPerFile") private Object maxRowsPerFile; /* - * Specifies the file name pattern _. when copy from non-file based store - * without partitionOptions. Type: string (or Expression with resultType string). + * Specifies the file name pattern _. when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileNamePrefix") private Object fileNamePrefix; @@ -36,6 +42,16 @@ public final class OrcWriteSettings extends FormatWriteSettings { public OrcWriteSettings() { } + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the maxRowsPerFile property: Limit the written file's row count to be smaller than or equal to the specified * count. Type: integer (or Expression with resultType integer). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PackageStore.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PackageStore.java index b111b7f907117..5d47365dd2711 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PackageStore.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PackageStore.java @@ -78,12 +78,13 @@ public PackageStore withPackageStoreLinkedService(EntityReference packageStoreLi */ public void validate() { if (name() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property name in model PackageStore")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property name in model PackageStore")); } if (packageStoreLinkedService() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property packageStoreLinkedService in model PackageStore")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property packageStoreLinkedService in model PackageStore")); } else { packageStoreLinkedService().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParameterSpecification.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParameterSpecification.java index ae9fe3dd56bcb..06f37aa3990b7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParameterSpecification.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParameterSpecification.java @@ -78,8 +78,8 @@ public ParameterSpecification withDefaultValue(Object defaultValue) { */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model ParameterSpecification")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property type in model ParameterSpecification")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetDataset.java index 3d4ad0577d73f..470864efbd0db 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.ParquetDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Parquet dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ParquetDataset.class, visible = true) @JsonTypeName("Parquet") @Fluent public final class ParquetDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Parquet"; + /* * Parquet dataset properties. */ @@ -31,6 +39,16 @@ public final class ParquetDataset extends Dataset { public ParquetDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Parquet dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetFormat.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetFormat.java index 814b53e617c21..261cb07e3e26e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetFormat.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetFormat.java @@ -5,22 +5,41 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The data stored in Parquet format. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ParquetFormat.class, visible = true) @JsonTypeName("ParquetFormat") @Fluent public final class ParquetFormat extends DatasetStorageFormat { + /* + * Type of dataset storage format. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ParquetFormat"; + /** * Creates an instance of ParquetFormat class. */ public ParquetFormat() { } + /** + * Get the type property: Type of dataset storage format. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetReadSettings.java index c8563fb8211d4..f755783d8aeda 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetReadSettings.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Parquet read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ParquetReadSettings.class, visible = true) @JsonTypeName("ParquetReadSettings") @Fluent public final class ParquetReadSettings extends FormatReadSettings { + /* + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ParquetReadSettings"; + /* * Compression settings. */ @@ -28,6 +36,16 @@ public final class ParquetReadSettings extends FormatReadSettings { public ParquetReadSettings() { } + /** + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the compressionProperties property: Compression settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSink.java index e33a16a8abe29..a76451adb7625 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Parquet sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ParquetSink.class, visible = true) @JsonTypeName("ParquetSink") @Fluent public final class ParquetSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ParquetSink"; + /* * Parquet store settings. */ @@ -34,6 +42,16 @@ public final class ParquetSink extends CopySink { public ParquetSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: Parquet store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSource.java index 62ffc3e15b7bd..f7730c93d83aa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Parquet source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ParquetSource.class, visible = true) @JsonTypeName("ParquetSource") @Fluent public final class ParquetSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ParquetSource"; + /* * Parquet store settings. */ @@ -29,8 +37,7 @@ public final class ParquetSource extends CopySource { private ParquetReadSettings formatSettings; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -41,6 +48,16 @@ public final class ParquetSource extends CopySource { public ParquetSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: Parquet store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetWriteSettings.java index 648a183fe2917..1615962b42721 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetWriteSettings.java @@ -6,26 +6,32 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Parquet write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ParquetWriteSettings.class, visible = true) @JsonTypeName("ParquetWriteSettings") @Fluent public final class ParquetWriteSettings extends FormatWriteSettings { /* - * Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or - * Expression with resultType integer). + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ParquetWriteSettings"; + + /* + * Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "maxRowsPerFile") private Object maxRowsPerFile; /* - * Specifies the file name pattern _. when copy from non-file based store - * without partitionOptions. Type: string (or Expression with resultType string). + * Specifies the file name pattern _. when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileNamePrefix") private Object fileNamePrefix; @@ -36,6 +42,16 @@ public final class ParquetWriteSettings extends FormatWriteSettings { public ParquetWriteSettings() { } + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the maxRowsPerFile property: Limit the written file's row count to be smaller than or equal to the specified * count. Type: integer (or Expression with resultType integer). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalLinkedService.java index a17059b96a8cd..7b1bc7f4c8c8f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.PaypalLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Paypal Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PaypalLinkedService.class, visible = true) @JsonTypeName("Paypal") @Fluent public final class PaypalLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Paypal"; + /* * Paypal Service linked service properties. */ @@ -32,6 +40,16 @@ public final class PaypalLinkedService extends LinkedService { public PaypalLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Paypal Service linked service properties. * @@ -197,8 +215,8 @@ public PaypalLinkedService withUseHostVerification(Object useHostVerification) { } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -207,8 +225,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the PaypalLinkedService object itself. @@ -222,8 +240,8 @@ public PaypalLinkedService withUsePeerVerification(Object usePeerVerification) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -232,8 +250,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the PaypalLinkedService object itself. @@ -255,8 +273,9 @@ public PaypalLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model PaypalLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model PaypalLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalObjectDataset.java index b70599b7742eb..e1538917f2330 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Paypal Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PaypalObjectDataset.class, visible = true) @JsonTypeName("PaypalObject") @Fluent public final class PaypalObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "PaypalObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class PaypalObjectDataset extends Dataset { public PaypalObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalSource.java index 7fe448fb25e2a..bde4d655336aa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Paypal Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PaypalSource.class, visible = true) @JsonTypeName("PaypalSource") @Fluent public final class PaypalSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "PaypalSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class PaypalSource extends TabularSource { public PaypalSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixLinkedService.java index 3095385b8c1ee..d897885d64376 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.PhoenixLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Phoenix server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PhoenixLinkedService.class, visible = true) @JsonTypeName("Phoenix") @Fluent public final class PhoenixLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Phoenix"; + /* * Phoenix server linked service properties. */ @@ -32,6 +40,16 @@ public final class PhoenixLinkedService extends LinkedService { public PhoenixLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Phoenix server linked service properties. * @@ -245,9 +263,9 @@ public PhoenixLinkedService withEnableSsl(Object enableSsl) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @return the trustedCertPath value. */ @@ -256,9 +274,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @param trustedCertPath the trustedCertPath value to set. * @return the PhoenixLinkedService object itself. @@ -347,8 +365,8 @@ public PhoenixLinkedService withAllowSelfSignedServerCert(Object allowSelfSigned } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -357,8 +375,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the PhoenixLinkedService object itself. @@ -380,8 +398,9 @@ public PhoenixLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model PhoenixLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model PhoenixLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixObjectDataset.java index 8ccce4974609b..9fab72ada0751 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.PhoenixDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Phoenix server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PhoenixObjectDataset.class, visible = true) @JsonTypeName("PhoenixObject") @Fluent public final class PhoenixObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "PhoenixObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class PhoenixObjectDataset extends Dataset { public PhoenixObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixSource.java index e2472a5c12e92..f52bf3b58e2a7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Phoenix server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PhoenixSource.class, visible = true) @JsonTypeName("PhoenixSource") @Fluent public final class PhoenixSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "PhoenixSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class PhoenixSource extends TabularSource { public PhoenixSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineExternalComputeScaleProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineExternalComputeScaleProperties.java index e42c9a3dbb22a..654226f7a887a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineExternalComputeScaleProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineExternalComputeScaleProperties.java @@ -48,8 +48,8 @@ public PipelineExternalComputeScaleProperties() { } /** - * Get the timeToLive property: Time to live (in minutes) setting of integration runtime which will execute - * pipeline and external activity. + * Get the timeToLive property: Time to live (in minutes) setting of integration runtime which will execute pipeline + * and external activity. * * @return the timeToLive value. */ @@ -58,8 +58,8 @@ public Integer timeToLive() { } /** - * Set the timeToLive property: Time to live (in minutes) setting of integration runtime which will execute - * pipeline and external activity. + * Set the timeToLive property: Time to live (in minutes) setting of integration runtime which will execute pipeline + * and external activity. * * @param timeToLive the timeToLive value to set. * @return the PipelineExternalComputeScaleProperties object itself. @@ -92,8 +92,8 @@ public PipelineExternalComputeScaleProperties withNumberOfPipelineNodes(Integer } /** - * Get the numberOfExternalNodes property: Number of the the external nodes, which should be greater than 0 and - * less than 11. + * Get the numberOfExternalNodes property: Number of the the external nodes, which should be greater than 0 and less + * than 11. * * @return the numberOfExternalNodes value. */ @@ -102,8 +102,8 @@ public Integer numberOfExternalNodes() { } /** - * Set the numberOfExternalNodes property: Number of the the external nodes, which should be greater than 0 and - * less than 11. + * Set the numberOfExternalNodes property: Number of the the external nodes, which should be greater than 0 and less + * than 11. * * @param numberOfExternalNodes the numberOfExternalNodes value to set. * @return the PipelineExternalComputeScaleProperties object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineListResponse.java index 3db19a4e0e6d5..b586c50992a9f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineListResponse.java @@ -80,8 +80,8 @@ public PipelineListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model PipelineListResponse")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property value in model PipelineListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineReference.java index 7f2cffa50ed0d..aa5fce3f4e808 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineReference.java @@ -104,8 +104,9 @@ public PipelineReference withName(String name) { */ public void validate() { if (referenceName() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property referenceName in model PipelineReference")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property referenceName in model PipelineReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PolybaseSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PolybaseSettings.java index 435e476040d70..c7d78493f0677 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PolybaseSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PolybaseSettings.java @@ -24,22 +24,19 @@ public final class PolybaseSettings { private PolybaseSettingsRejectType rejectType; /* - * Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or - * Expression with resultType number), minimum: 0. + * Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. */ @JsonProperty(value = "rejectValue") private Object rejectValue; /* - * Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected - * rows. Type: integer (or Expression with resultType integer), minimum: 0. + * Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. */ @JsonProperty(value = "rejectSampleValue") private Object rejectSampleValue; /* - * Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. - * Type: boolean (or Expression with resultType boolean). + * Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "useTypeDefault") private Object useTypeDefault; @@ -77,8 +74,8 @@ public PolybaseSettings withRejectType(PolybaseSettingsRejectType rejectType) { } /** - * Get the rejectValue property: Specifies the value or the percentage of rows that can be rejected before the - * query fails. Type: number (or Expression with resultType number), minimum: 0. + * Get the rejectValue property: Specifies the value or the percentage of rows that can be rejected before the query + * fails. Type: number (or Expression with resultType number), minimum: 0. * * @return the rejectValue value. */ @@ -87,8 +84,8 @@ public Object rejectValue() { } /** - * Set the rejectValue property: Specifies the value or the percentage of rows that can be rejected before the - * query fails. Type: number (or Expression with resultType number), minimum: 0. + * Set the rejectValue property: Specifies the value or the percentage of rows that can be rejected before the query + * fails. Type: number (or Expression with resultType number), minimum: 0. * * @param rejectValue the rejectValue value to set. * @return the PolybaseSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlLinkedService.java index 13c72f8bee1df..2ee1bc25dd19d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for PostgreSQL data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = PostgreSqlLinkedService.class, + visible = true) @JsonTypeName("PostgreSql") @Fluent public final class PostgreSqlLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "PostgreSql"; + /* * PostgreSQL linked service properties. */ @@ -32,6 +44,16 @@ public final class PostgreSqlLinkedService extends LinkedService { public PostgreSqlLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: PostgreSQL linked service properties. * @@ -126,8 +148,8 @@ public PostgreSqlLinkedService withPassword(AzureKeyVaultSecretReference passwor } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -136,8 +158,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the PostgreSqlLinkedService object itself. @@ -159,8 +181,9 @@ public PostgreSqlLinkedService withEncryptedCredential(String encryptedCredentia public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model PostgreSqlLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model PostgreSqlLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlSource.java index d2c3ac0134c4c..72417050669a7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for PostgreSQL databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PostgreSqlSource.class, visible = true) @JsonTypeName("PostgreSqlSource") @Fluent public final class PostgreSqlSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "PostgreSqlSource"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class PostgreSqlSource extends TabularSource { public PostgreSqlSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlTableDataset.java index f7cb8d622748e..5e2e372c87df4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The PostgreSQL table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PostgreSqlTableDataset.class, visible = true) @JsonTypeName("PostgreSqlTable") @Fluent public final class PostgreSqlTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "PostgreSqlTable"; + /* * PostgreSQL table dataset properties. */ @@ -31,6 +39,16 @@ public final class PostgreSqlTableDataset extends Dataset { public PostgreSqlTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: PostgreSQL table dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2LinkedService.java index f3ae175caa6f7..027ace8e8a480 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2LinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlV2LinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for PostgreSQLV2 data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = PostgreSqlV2LinkedService.class, + visible = true) @JsonTypeName("PostgreSqlV2") @Fluent public final class PostgreSqlV2LinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "PostgreSqlV2"; + /* * PostgreSQLV2 linked service properties. */ @@ -32,6 +44,16 @@ public final class PostgreSqlV2LinkedService extends LinkedService { public PostgreSqlV2LinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: PostgreSQLV2 linked service properties. * @@ -508,8 +530,8 @@ public PostgreSqlV2LinkedService withPassword(AzureKeyVaultSecretReference passw } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -518,8 +540,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the PostgreSqlV2LinkedService object itself. @@ -541,8 +563,9 @@ public PostgreSqlV2LinkedService withEncryptedCredential(String encryptedCredent public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model PostgreSqlV2LinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model PostgreSqlV2LinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2Source.java index 275110a29d8e2..214d38f5ce201 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2Source.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for PostgreSQL databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PostgreSqlV2Source.class, visible = true) @JsonTypeName("PostgreSqlV2Source") @Fluent public final class PostgreSqlV2Source extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "PostgreSqlV2Source"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class PostgreSqlV2Source extends TabularSource { public PostgreSqlV2Source() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2TableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2TableDataset.java index 63457c3b29c51..3beee6ee1e93d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2TableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2TableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlV2TableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * The PostgreSQLV2 table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = PostgreSqlV2TableDataset.class, + visible = true) @JsonTypeName("PostgreSqlV2Table") @Fluent public final class PostgreSqlV2TableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "PostgreSqlV2Table"; + /* * PostgreSQLV2 table dataset properties. */ @@ -31,6 +43,16 @@ public final class PostgreSqlV2TableDataset extends Dataset { public PostgreSqlV2TableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: PostgreSQLV2 table dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoLinkedService.java index e105952feed94..647c4ff28af2f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.PrestoLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Presto server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PrestoLinkedService.class, visible = true) @JsonTypeName("Presto") @Fluent public final class PrestoLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Presto"; + /* * Presto server linked service properties. */ @@ -32,6 +40,16 @@ public final class PrestoLinkedService extends LinkedService { public PrestoLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Presto server linked service properties. * @@ -266,9 +284,9 @@ public PrestoLinkedService withEnableSsl(Object enableSsl) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @return the trustedCertPath value. */ @@ -277,9 +295,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @param trustedCertPath the trustedCertPath value to set. * @return the PrestoLinkedService object itself. @@ -393,8 +411,8 @@ public PrestoLinkedService withTimeZoneId(Object timeZoneId) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -403,8 +421,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the PrestoLinkedService object itself. @@ -426,8 +444,9 @@ public PrestoLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model PrestoLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model PrestoLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoObjectDataset.java index f8721222bcb4b..3e27f4f75948f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.PrestoDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Presto server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PrestoObjectDataset.class, visible = true) @JsonTypeName("PrestoObject") @Fluent public final class PrestoObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "PrestoObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class PrestoObjectDataset extends Dataset { public PrestoObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoSource.java index a20c65813dc3f..2f26291fc66b3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Presto server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PrestoSource.class, visible = true) @JsonTypeName("PrestoSource") @Fluent public final class PrestoSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "PrestoSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class PrestoSource extends TabularSource { public PrestoSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateEndpointConnectionListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateEndpointConnectionListResponse.java index 2b15003c63943..7dedcf15399e3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateEndpointConnectionListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateEndpointConnectionListResponse.java @@ -80,8 +80,9 @@ public PrivateEndpointConnectionListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property value in model PrivateEndpointConnectionListResponse")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property value in model PrivateEndpointConnectionListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksLinkedService.java index 0cf3bce8175cd..e93459019e52f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.QuickBooksLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * QuickBooks server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = QuickBooksLinkedService.class, + visible = true) @JsonTypeName("QuickBooks") @Fluent public final class QuickBooksLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "QuickBooks"; + /* * QuickBooks server linked service properties. */ @@ -32,6 +44,16 @@ public final class QuickBooksLinkedService extends LinkedService { public QuickBooksLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: QuickBooks server linked service properties. * @@ -266,8 +288,8 @@ public QuickBooksLinkedService withUseEncryptedEndpoints(Object useEncryptedEndp } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -276,8 +298,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the QuickBooksLinkedService object itself. @@ -299,8 +321,9 @@ public QuickBooksLinkedService withEncryptedCredential(String encryptedCredentia public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model QuickBooksLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model QuickBooksLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksObjectDataset.java index 0238de8e0f6bd..f6c9251fd867c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * QuickBooks server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = QuickBooksObjectDataset.class, + visible = true) @JsonTypeName("QuickBooksObject") @Fluent public final class QuickBooksObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "QuickBooksObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +43,16 @@ public final class QuickBooksObjectDataset extends Dataset { public QuickBooksObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksSource.java index 211a8e0a446da..13e4eefbd661c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity QuickBooks server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = QuickBooksSource.class, visible = true) @JsonTypeName("QuickBooksSource") @Fluent public final class QuickBooksSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "QuickBooksSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class QuickBooksSource extends TabularSource { public QuickBooksSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickbaseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickbaseLinkedService.java index 5a25b85ee203f..92b0747d8ab4b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickbaseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickbaseLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.QuickbaseLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for Quickbase. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = QuickbaseLinkedService.class, visible = true) @JsonTypeName("Quickbase") @Fluent public final class QuickbaseLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Quickbase"; + /* * Quickbase linked service properties. */ @@ -32,6 +40,16 @@ public final class QuickbaseLinkedService extends LinkedService { public QuickbaseLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Quickbase linked service properties. * @@ -124,8 +142,8 @@ public QuickbaseLinkedService withUserToken(SecretBase userToken) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -134,8 +152,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the QuickbaseLinkedService object itself. @@ -157,8 +175,9 @@ public QuickbaseLinkedService withEncryptedCredential(String encryptedCredential public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model QuickbaseLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model QuickbaseLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedirectIncompatibleRowSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedirectIncompatibleRowSettings.java index 3184a49cd85e4..fc9387c46be6e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedirectIncompatibleRowSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedirectIncompatibleRowSettings.java @@ -19,9 +19,7 @@ @Fluent public final class RedirectIncompatibleRowSettings { /* - * Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked service used for redirecting - * incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression - * with resultType string). + * Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType string). */ @JsonProperty(value = "linkedServiceName", required = true) private Object linkedServiceName; @@ -46,8 +44,8 @@ public RedirectIncompatibleRowSettings() { /** * Get the linkedServiceName property: Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked - * service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is - * specified. Type: string (or Expression with resultType string). + * service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. + * Type: string (or Expression with resultType string). * * @return the linkedServiceName value. */ @@ -57,8 +55,8 @@ public Object linkedServiceName() { /** * Set the linkedServiceName property: Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked - * service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is - * specified. Type: string (or Expression with resultType string). + * service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. + * Type: string (or Expression with resultType string). * * @param linkedServiceName the linkedServiceName value to set. * @return the RedirectIncompatibleRowSettings object itself. @@ -126,8 +124,9 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { if (linkedServiceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property linkedServiceName in model RedirectIncompatibleRowSettings")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property linkedServiceName in model RedirectIncompatibleRowSettings")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedshiftUnloadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedshiftUnloadSettings.java index a2732057f23a1..16a2c974c5f55 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedshiftUnloadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedshiftUnloadSettings.java @@ -16,16 +16,13 @@ @Fluent public final class RedshiftUnloadSettings { /* - * The name of the Amazon S3 linked service which will be used for the unload operation when copying from the - * Amazon Redshift source. + * The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon Redshift source. */ @JsonProperty(value = "s3LinkedServiceName", required = true) private LinkedServiceReference s3LinkedServiceName; /* - * The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. - * The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType - * string). + * The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). */ @JsonProperty(value = "bucketName", required = true) private Object bucketName; @@ -89,14 +86,16 @@ public RedshiftUnloadSettings withBucketName(Object bucketName) { */ public void validate() { if (s3LinkedServiceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property s3LinkedServiceName in model RedshiftUnloadSettings")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property s3LinkedServiceName in model RedshiftUnloadSettings")); } else { s3LinkedServiceName().validate(); } if (bucketName() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property bucketName in model RedshiftUnloadSettings")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property bucketName in model RedshiftUnloadSettings")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalSource.java index ae70ba5e12fd8..d48b56b394ced 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for various relational databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = RelationalSource.class, visible = true) @JsonTypeName("RelationalSource") @Fluent public final class RelationalSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "RelationalSource"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class RelationalSource extends CopySource { private Object query; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -35,6 +42,16 @@ public final class RelationalSource extends CopySource { public RelationalSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalTableDataset.java index 0424fe89bbf61..82dd0bbc11593 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.RelationalTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The relational table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = RelationalTableDataset.class, visible = true) @JsonTypeName("RelationalTable") @Fluent public final class RelationalTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "RelationalTable"; + /* * Relational table dataset properties. */ @@ -31,6 +39,16 @@ public final class RelationalTableDataset extends Dataset { public RelationalTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Relational table dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RerunTumblingWindowTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RerunTumblingWindowTrigger.java index 78c4b46d50898..4830c4200a4f0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RerunTumblingWindowTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RerunTumblingWindowTrigger.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.RerunTumblingWindowTriggerTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.time.OffsetDateTime; @@ -17,10 +18,21 @@ * Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested * end time. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = RerunTumblingWindowTrigger.class, + visible = true) @JsonTypeName("RerunTumblingWindowTrigger") @Fluent public final class RerunTumblingWindowTrigger extends Trigger { + /* + * Trigger type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "RerunTumblingWindowTrigger"; + /* * Rerun Trigger properties. */ @@ -34,6 +46,16 @@ public final class RerunTumblingWindowTrigger extends Trigger { public RerunTumblingWindowTrigger() { } + /** + * Get the type property: Trigger type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Rerun Trigger properties. * @@ -168,8 +190,9 @@ public RerunTumblingWindowTrigger withRerunConcurrency(int rerunConcurrency) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model RerunTumblingWindowTrigger")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model RerunTumblingWindowTrigger")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysLinkedService.java index d1d340fe05268..1dab3b3f7d8e8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ResponsysLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Responsys linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ResponsysLinkedService.class, visible = true) @JsonTypeName("Responsys") @Fluent public final class ResponsysLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Responsys"; + /* * Responsys linked service properties. */ @@ -32,6 +40,16 @@ public final class ResponsysLinkedService extends LinkedService { public ResponsysLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Responsys linked service properties. * @@ -203,8 +221,8 @@ public ResponsysLinkedService withUseHostVerification(Object useHostVerification } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. Type: boolean (or Expression with resultType boolean). * * @return the usePeerVerification value. */ @@ -213,8 +231,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. Type: boolean (or Expression with resultType boolean). * * @param usePeerVerification the usePeerVerification value to set. * @return the ResponsysLinkedService object itself. @@ -228,8 +246,8 @@ public ResponsysLinkedService withUsePeerVerification(Object usePeerVerification } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -238,8 +256,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ResponsysLinkedService object itself. @@ -261,8 +279,9 @@ public ResponsysLinkedService withEncryptedCredential(String encryptedCredential public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ResponsysLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ResponsysLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysObjectDataset.java index 64c6219cfef9c..1ddd04e3b5d4b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Responsys dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ResponsysObjectDataset.class, visible = true) @JsonTypeName("ResponsysObject") @Fluent public final class ResponsysObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ResponsysObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class ResponsysObjectDataset extends Dataset { public ResponsysObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysSource.java index d107f875047d1..9d39e963d92c8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Responsys source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ResponsysSource.class, visible = true) @JsonTypeName("ResponsysSource") @Fluent public final class ResponsysSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ResponsysSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class ResponsysSource extends TabularSource { public ResponsysSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestResourceDataset.java index bd677fdaa1472..bd23de94a7eb3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestResourceDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.RestResourceDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * A Rest service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = RestResourceDataset.class, visible = true) @JsonTypeName("RestResource") @Fluent public final class RestResourceDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "RestResource"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class RestResourceDataset extends Dataset { public RestResourceDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * @@ -154,8 +172,8 @@ public RestResourceDataset withRequestMethod(Object requestMethod) { } /** - * Get the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string - * (or Expression with resultType string). + * Get the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or + * Expression with resultType string). * * @return the requestBody value. */ @@ -164,8 +182,8 @@ public Object requestBody() { } /** - * Set the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string - * (or Expression with resultType string). + * Set the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or + * Expression with resultType string). * * @param requestBody the requestBody value to set. * @return the RestResourceDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestServiceLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestServiceLinkedService.java index 32a939626dacc..6bfa278779887 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestServiceLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestServiceLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.RestServiceLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Rest Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = RestServiceLinkedService.class, + visible = true) @JsonTypeName("RestService") @Fluent public final class RestServiceLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "RestService"; + /* * Rest Service linked service properties. */ @@ -32,6 +44,16 @@ public final class RestServiceLinkedService extends LinkedService { public RestServiceLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Rest Service linked service properties. * @@ -107,7 +129,8 @@ public RestServiceLinkedService withUrl(Object url) { * @return the enableServerCertificateValidation value. */ public Object enableServerCertificateValidation() { - return this.innerTypeProperties() == null ? null + return this.innerTypeProperties() == null + ? null : this.innerTypeProperties().enableServerCertificateValidation(); } @@ -223,8 +246,8 @@ public RestServiceLinkedService withAuthHeaders(Object authHeaders) { } /** - * Get the servicePrincipalId property: The application's client ID used in AadServicePrincipal authentication - * type. Type: string (or Expression with resultType string). + * Get the servicePrincipalId property: The application's client ID used in AadServicePrincipal authentication type. + * Type: string (or Expression with resultType string). * * @return the servicePrincipalId value. */ @@ -233,8 +256,8 @@ public Object servicePrincipalId() { } /** - * Set the servicePrincipalId property: The application's client ID used in AadServicePrincipal authentication - * type. Type: string (or Expression with resultType string). + * Set the servicePrincipalId property: The application's client ID used in AadServicePrincipal authentication type. + * Type: string (or Expression with resultType string). * * @param servicePrincipalId the servicePrincipalId value to set. * @return the RestServiceLinkedService object itself. @@ -296,9 +319,9 @@ public RestServiceLinkedService withTenant(Object tenant) { } /** - * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Get the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @return the azureCloudType value. */ @@ -307,9 +330,9 @@ public Object azureCloudType() { } /** - * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values - * are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud - * type. Type: string (or Expression with resultType string). + * Set the azureCloudType property: Indicates the azure cloud type of the service principle auth. Allowed values are + * AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. + * Type: string (or Expression with resultType string). * * @param azureCloudType the azureCloudType value to set. * @return the RestServiceLinkedService object itself. @@ -323,8 +346,8 @@ public RestServiceLinkedService withAzureCloudType(Object azureCloudType) { } /** - * Get the aadResourceId property: The resource you are requesting authorization to use. Type: string (or - * Expression with resultType string). + * Get the aadResourceId property: The resource you are requesting authorization to use. Type: string (or Expression + * with resultType string). * * @return the aadResourceId value. */ @@ -333,8 +356,8 @@ public Object aadResourceId() { } /** - * Set the aadResourceId property: The resource you are requesting authorization to use. Type: string (or - * Expression with resultType string). + * Set the aadResourceId property: The resource you are requesting authorization to use. Type: string (or Expression + * with resultType string). * * @param aadResourceId the aadResourceId value to set. * @return the RestServiceLinkedService object itself. @@ -348,8 +371,8 @@ public RestServiceLinkedService withAadResourceId(Object aadResourceId) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -358,8 +381,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the RestServiceLinkedService object itself. @@ -469,8 +492,8 @@ public RestServiceLinkedService withTokenEndpoint(Object tokenEndpoint) { } /** - * Get the resource property: The target service or resource to which the access will be requested. Type: string - * (or Expression with resultType string). + * Get the resource property: The target service or resource to which the access will be requested. Type: string (or + * Expression with resultType string). * * @return the resource value. */ @@ -479,8 +502,8 @@ public Object resource() { } /** - * Set the resource property: The target service or resource to which the access will be requested. Type: string - * (or Expression with resultType string). + * Set the resource property: The target service or resource to which the access will be requested. Type: string (or + * Expression with resultType string). * * @param resource the resource value to set. * @return the RestServiceLinkedService object itself. @@ -527,8 +550,9 @@ public RestServiceLinkedService withScope(Object scope) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model RestServiceLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model RestServiceLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSink.java index 4d2d7630334b0..76fc72a54b5d3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSink.java @@ -6,47 +6,50 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Rest service Sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = RestSink.class, visible = true) @JsonTypeName("RestSink") @Fluent public final class RestSink extends CopySink { /* - * The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType - * string). + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "RestSink"; + + /* + * The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). */ @JsonProperty(value = "requestMethod") private Object requestMethod; /* - * The additional HTTP headers in the request to the RESTful API. Type: key value pairs (value should be string - * type). + * The additional HTTP headers in the request to the RESTful API. Type: key value pairs (value should be string type). */ @JsonProperty(value = "additionalHeaders") private Object additionalHeaders; /* - * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read - * response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /* - * The time to await before sending next request, in milliseconds + * The time to await before sending next request, in milliseconds */ @JsonProperty(value = "requestInterval") private Object requestInterval; /* - * Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The - * Only Supported option is Gzip. Type: string (or Expression with resultType string). + * Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. Type: string (or Expression with resultType string). */ @JsonProperty(value = "httpCompressionType") private Object httpCompressionType; @@ -57,6 +60,16 @@ public final class RestSink extends CopySink { public RestSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the requestMethod property: The HTTP method used to call the RESTful API. The default is POST. Type: string * (or Expression with resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSource.java index c725819228fbf..92f3629665b9b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSource.java @@ -6,33 +6,38 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Rest service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = RestSource.class, visible = true) @JsonTypeName("RestSource") @Fluent public final class RestSource extends CopySource { /* - * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType - * string). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "RestSource"; + + /* + * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). */ @JsonProperty(value = "requestMethod") private Object requestMethod; /* - * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType - * string). + * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). */ @JsonProperty(value = "requestBody") private Object requestBody; /* - * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType - * string). + * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */ @JsonProperty(value = "additionalHeaders") private Object additionalHeaders; @@ -44,22 +49,19 @@ public final class RestSource extends CopySource { private Object paginationRules; /* - * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read - * response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /* - * The time to await before sending next page request. + * The time to await before sending next page request. */ @JsonProperty(value = "requestInterval") private Object requestInterval; /* - * Specifies the additional columns to be added to source data. Type: key value pairs (value should be string - * type). + * Specifies the additional columns to be added to source data. Type: key value pairs (value should be string type). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -70,6 +72,16 @@ public final class RestSource extends CopySource { public RestSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the requestMethod property: The HTTP method used to call the RESTful API. The default is GET. Type: string * (or Expression with resultType string). @@ -93,8 +105,8 @@ public RestSource withRequestMethod(Object requestMethod) { } /** - * Get the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string - * (or Expression with resultType string). + * Get the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or + * Expression with resultType string). * * @return the requestBody value. */ @@ -103,8 +115,8 @@ public Object requestBody() { } /** - * Set the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string - * (or Expression with resultType string). + * Set the requestBody property: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or + * Expression with resultType string). * * @param requestBody the requestBody value to set. * @return the RestSource object itself. @@ -137,8 +149,8 @@ public RestSource withAdditionalHeaders(Object additionalHeaders) { } /** - * Get the paginationRules property: The pagination rules to compose next page requests. Type: string (or - * Expression with resultType string). + * Get the paginationRules property: The pagination rules to compose next page requests. Type: string (or Expression + * with resultType string). * * @return the paginationRules value. */ @@ -147,8 +159,8 @@ public Object paginationRules() { } /** - * Set the paginationRules property: The pagination rules to compose next page requests. Type: string (or - * Expression with resultType string). + * Set the paginationRules property: The pagination rules to compose next page requests. Type: string (or Expression + * with resultType string). * * @param paginationRules the paginationRules value to set. * @return the RestSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RetryPolicy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RetryPolicy.java index 6b7e99ae385a6..790f21bba9510 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RetryPolicy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RetryPolicy.java @@ -13,8 +13,7 @@ @Fluent public final class RetryPolicy { /* - * Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: - * 0. + * Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. */ @JsonProperty(value = "count") private Object count; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunFilterParameters.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunFilterParameters.java index c7af48ef8e578..5561c49e135fa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunFilterParameters.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunFilterParameters.java @@ -160,12 +160,14 @@ public RunFilterParameters withOrderBy(List orderBy) { */ public void validate() { if (lastUpdatedAfter() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property lastUpdatedAfter in model RunFilterParameters")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property lastUpdatedAfter in model RunFilterParameters")); } if (lastUpdatedBefore() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property lastUpdatedBefore in model RunFilterParameters")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property lastUpdatedBefore in model RunFilterParameters")); } if (filters() != null) { filters().forEach(e -> e.validate()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilter.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilter.java index c95b0004b3f57..8e1c563053694 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilter.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilter.java @@ -15,9 +15,7 @@ @Fluent public final class RunQueryFilter { /* - * Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, - * RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and - * Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. + * Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. */ @JsonProperty(value = "operand", required = true) private RunQueryFilterOperand operand; @@ -113,16 +111,16 @@ public RunQueryFilter withValues(List values) { */ public void validate() { if (operand() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property operand in model RunQueryFilter")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property operand in model RunQueryFilter")); } if (operator() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property operator in model RunQueryFilter")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property operator in model RunQueryFilter")); } if (values() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property values in model RunQueryFilter")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property values in model RunQueryFilter")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrderBy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrderBy.java index 79c8141bbcb11..9931073d1cbd3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrderBy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrderBy.java @@ -14,9 +14,7 @@ @Fluent public final class RunQueryOrderBy { /* - * Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, - * RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; - * for trigger runs are TriggerName, TriggerRunTimestamp and Status. + * Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. */ @JsonProperty(value = "orderBy", required = true) private RunQueryOrderByField orderBy; @@ -34,9 +32,9 @@ public RunQueryOrderBy() { } /** - * Get the orderBy property: Parameter name to be used for order by. The allowed parameters to order by for - * pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, - * ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. + * Get the orderBy property: Parameter name to be used for order by. The allowed parameters to order by for pipeline + * runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, + * ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. * * @return the orderBy value. */ @@ -45,9 +43,9 @@ public RunQueryOrderByField orderBy() { } /** - * Set the orderBy property: Parameter name to be used for order by. The allowed parameters to order by for - * pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, - * ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. + * Set the orderBy property: Parameter name to be used for order by. The allowed parameters to order by for pipeline + * runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, + * ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. * * @param orderBy the orderBy value to set. * @return the RunQueryOrderBy object itself. @@ -84,12 +82,12 @@ public RunQueryOrderBy withOrder(RunQueryOrder order) { */ public void validate() { if (orderBy() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property orderBy in model RunQueryOrderBy")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property orderBy in model RunQueryOrderBy")); } if (order() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property order in model RunQueryOrderBy")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property order in model RunQueryOrderBy")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceLinkedService.java index 815a90a3e0088..9248daab25630 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for Salesforce. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceLinkedService.class, + visible = true) @JsonTypeName("Salesforce") @Fluent public final class SalesforceLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Salesforce"; + /* * Salesforce linked service properties. */ @@ -32,6 +44,16 @@ public final class SalesforceLinkedService extends LinkedService { public SalesforceLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Salesforce linked service properties. * @@ -201,8 +223,8 @@ public SalesforceLinkedService withApiVersion(Object apiVersion) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -211,8 +233,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SalesforceLinkedService object itself. @@ -234,8 +256,9 @@ public SalesforceLinkedService withEncryptedCredential(String encryptedCredentia public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SalesforceLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SalesforceLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudLinkedService.java index d60771a63677b..c01e29f3f18fb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceMarketingCloudLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Salesforce Marketing Cloud linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceMarketingCloudLinkedService.class, + visible = true) @JsonTypeName("SalesforceMarketingCloud") @Fluent public final class SalesforceMarketingCloudLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceMarketingCloud"; + /* * Salesforce Marketing Cloud linked service properties. */ @@ -33,6 +45,16 @@ public final class SalesforceMarketingCloudLinkedService extends LinkedService { public SalesforceMarketingCloudLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Salesforce Marketing Cloud linked service properties. * @@ -104,8 +126,8 @@ public SalesforceMarketingCloudLinkedService withConnectionProperties(Object con } /** - * Get the clientId property: The client ID associated with the Salesforce Marketing Cloud application. Type: - * string (or Expression with resultType string). + * Get the clientId property: The client ID associated with the Salesforce Marketing Cloud application. Type: string + * (or Expression with resultType string). * * @return the clientId value. */ @@ -114,8 +136,8 @@ public Object clientId() { } /** - * Set the clientId property: The client ID associated with the Salesforce Marketing Cloud application. Type: - * string (or Expression with resultType string). + * Set the clientId property: The client ID associated with the Salesforce Marketing Cloud application. Type: string + * (or Expression with resultType string). * * @param clientId the clientId value to set. * @return the SalesforceMarketingCloudLinkedService object itself. @@ -206,8 +228,8 @@ public SalesforceMarketingCloudLinkedService withUseHostVerification(Object useH } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. Type: boolean (or Expression with resultType boolean). * * @return the usePeerVerification value. */ @@ -216,8 +238,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. Type: boolean (or Expression with resultType boolean). * * @param usePeerVerification the usePeerVerification value to set. * @return the SalesforceMarketingCloudLinkedService object itself. @@ -231,8 +253,8 @@ public SalesforceMarketingCloudLinkedService withUsePeerVerification(Object useP } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -241,8 +263,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SalesforceMarketingCloudLinkedService object itself. @@ -264,8 +286,9 @@ public SalesforceMarketingCloudLinkedService withEncryptedCredential(String encr public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SalesforceMarketingCloudLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SalesforceMarketingCloudLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudObjectDataset.java index 2f29690b8347f..a72e0d8947ed8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Salesforce Marketing Cloud dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceMarketingCloudObjectDataset.class, + visible = true) @JsonTypeName("SalesforceMarketingCloudObject") @Fluent public final class SalesforceMarketingCloudObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceMarketingCloudObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +43,16 @@ public final class SalesforceMarketingCloudObjectDataset extends Dataset { public SalesforceMarketingCloudObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudSource.java index 85bddfea4cbd1..40a9fa28a17aa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudSource.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Salesforce Marketing Cloud source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceMarketingCloudSource.class, + visible = true) @JsonTypeName("SalesforceMarketingCloudSource") @Fluent public final class SalesforceMarketingCloudSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceMarketingCloudSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +40,16 @@ public final class SalesforceMarketingCloudSource extends TabularSource { public SalesforceMarketingCloudSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceObjectDataset.java index 07dd622c8f90a..08816741e6e25 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceObjectDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * The Salesforce object dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceObjectDataset.class, + visible = true) @JsonTypeName("SalesforceObject") @Fluent public final class SalesforceObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceObject"; + /* * Salesforce object dataset properties. */ @@ -31,6 +43,16 @@ public final class SalesforceObjectDataset extends Dataset { public SalesforceObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Salesforce object dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudLinkedService.java index cb16f0f9478a7..34aeb31e64e40 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for Salesforce Service Cloud. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceServiceCloudLinkedService.class, + visible = true) @JsonTypeName("SalesforceServiceCloud") @Fluent public final class SalesforceServiceCloudLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceServiceCloud"; + /* * Salesforce Service Cloud linked service properties. */ @@ -33,6 +45,16 @@ public final class SalesforceServiceCloudLinkedService extends LinkedService { public SalesforceServiceCloudLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Salesforce Service Cloud linked service properties. * @@ -229,8 +251,8 @@ public SalesforceServiceCloudLinkedService withExtendedProperties(Object extende } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -239,8 +261,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SalesforceServiceCloudLinkedService object itself. @@ -262,8 +284,9 @@ public SalesforceServiceCloudLinkedService withEncryptedCredential(String encryp public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SalesforceServiceCloudLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SalesforceServiceCloudLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudObjectDataset.java index a7e05fd9fe6b4..a436fd3bc891f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudObjectDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * The Salesforce Service Cloud object dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceServiceCloudObjectDataset.class, + visible = true) @JsonTypeName("SalesforceServiceCloudObject") @Fluent public final class SalesforceServiceCloudObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceServiceCloudObject"; + /* * Salesforce Service Cloud object dataset properties. */ @@ -31,6 +43,16 @@ public final class SalesforceServiceCloudObjectDataset extends Dataset { public SalesforceServiceCloudObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Salesforce Service Cloud object dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSink.java index 2a68783b8f7f4..696cdb62ea975 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSink.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Salesforce Service Cloud sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceServiceCloudSink.class, + visible = true) @JsonTypeName("SalesforceServiceCloudSink") @Fluent public final class SalesforceServiceCloudSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceServiceCloudSink"; + /* * The write behavior for the operation. Default is Insert. */ @@ -23,18 +35,13 @@ public final class SalesforceServiceCloudSink extends CopySink { private SalesforceSinkWriteBehavior writeBehavior; /* - * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or - * Expression with resultType string). + * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). */ @JsonProperty(value = "externalIdFieldName") private Object externalIdFieldName; /* - * The flag indicating whether or not to ignore null values from input dataset (except key fields) during write - * operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object - * unchanged when doing upsert/update operation and insert defined default value when doing insert operation, - * versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert - * NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). + * The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; @@ -45,6 +52,16 @@ public final class SalesforceServiceCloudSink extends CopySink { public SalesforceServiceCloudSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: The write behavior for the operation. Default is Insert. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSource.java index 637e8f3e25a6c..90fe3d36bd0e5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSource.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Salesforce Service Cloud source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceServiceCloudSource.class, + visible = true) @JsonTypeName("SalesforceServiceCloudSource") @Fluent public final class SalesforceServiceCloudSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceServiceCloudSource"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -23,15 +35,13 @@ public final class SalesforceServiceCloudSource extends CopySource { private Object query; /* - * The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or - * Expression with resultType string). + * The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or Expression with resultType string). */ @JsonProperty(value = "readBehavior") private Object readBehavior; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -42,6 +52,16 @@ public final class SalesforceServiceCloudSource extends CopySource { public SalesforceServiceCloudSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2LinkedService.java index 6653a32e5cc97..8f7d0420eb3c2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2LinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudV2LinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for Salesforce Service Cloud V2. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceServiceCloudV2LinkedService.class, + visible = true) @JsonTypeName("SalesforceServiceCloudV2") @Fluent public final class SalesforceServiceCloudV2LinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceServiceCloudV2"; + /* * Salesforce Service Cloud V2 linked service properties. */ @@ -33,6 +45,16 @@ public final class SalesforceServiceCloudV2LinkedService extends LinkedService { public SalesforceServiceCloudV2LinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Salesforce Service Cloud V2 linked service properties. * @@ -204,8 +226,8 @@ public SalesforceServiceCloudV2LinkedService withApiVersion(Object apiVersion) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -214,8 +236,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SalesforceServiceCloudV2LinkedService object itself. @@ -237,8 +259,9 @@ public SalesforceServiceCloudV2LinkedService withEncryptedCredential(String encr public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SalesforceServiceCloudV2LinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SalesforceServiceCloudV2LinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2ObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2ObjectDataset.java index 86d7c35d36005..8f897fa3e0411 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2ObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2ObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudV2ObjectDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * The Salesforce Service Cloud V2 object dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceServiceCloudV2ObjectDataset.class, + visible = true) @JsonTypeName("SalesforceServiceCloudV2Object") @Fluent public final class SalesforceServiceCloudV2ObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceServiceCloudV2Object"; + /* * Salesforce Service Cloud V2 object dataset properties. */ @@ -31,6 +43,16 @@ public final class SalesforceServiceCloudV2ObjectDataset extends Dataset { public SalesforceServiceCloudV2ObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Salesforce Service Cloud V2 object dataset properties. * @@ -104,8 +126,8 @@ public SalesforceServiceCloudV2ObjectDataset withFolder(DatasetFolder folder) { } /** - * Get the objectApiName property: The Salesforce Service Cloud V2 object API name. Type: string (or Expression - * with resultType string). + * Get the objectApiName property: The Salesforce Service Cloud V2 object API name. Type: string (or Expression with + * resultType string). * * @return the objectApiName value. */ @@ -114,8 +136,8 @@ public Object objectApiName() { } /** - * Set the objectApiName property: The Salesforce Service Cloud V2 object API name. Type: string (or Expression - * with resultType string). + * Set the objectApiName property: The Salesforce Service Cloud V2 object API name. Type: string (or Expression with + * resultType string). * * @param objectApiName the objectApiName value to set. * @return the SalesforceServiceCloudV2ObjectDataset object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Sink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Sink.java index d9341acbe4898..557b5cccc8bba 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Sink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Sink.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Salesforce Service Cloud V2 sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceServiceCloudV2Sink.class, + visible = true) @JsonTypeName("SalesforceServiceCloudV2Sink") @Fluent public final class SalesforceServiceCloudV2Sink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceServiceCloudV2Sink"; + /* * The write behavior for the operation. Default is Insert. */ @@ -23,18 +35,13 @@ public final class SalesforceServiceCloudV2Sink extends CopySink { private SalesforceV2SinkWriteBehavior writeBehavior; /* - * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or - * Expression with resultType string). + * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). */ @JsonProperty(value = "externalIdFieldName") private Object externalIdFieldName; /* - * The flag indicating whether or not to ignore null values from input dataset (except key fields) during write - * operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object - * unchanged when doing upsert/update operation and insert defined default value when doing insert operation, - * versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert - * NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). + * The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; @@ -45,6 +52,16 @@ public final class SalesforceServiceCloudV2Sink extends CopySink { public SalesforceServiceCloudV2Sink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: The write behavior for the operation. Default is Insert. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Source.java index cfcad5c9969ed..6d473b1ad2827 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Source.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Salesforce Service Cloud V2 source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceServiceCloudV2Source.class, + visible = true) @JsonTypeName("SalesforceServiceCloudV2Source") @Fluent public final class SalesforceServiceCloudV2Source extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceServiceCloudV2Source"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -23,15 +35,13 @@ public final class SalesforceServiceCloudV2Source extends CopySource { private Object soqlQuery; /* - * This property control whether query result contains Deleted objects. Default is false. Type: boolean (or - * Expression with resultType boolean). + * This property control whether query result contains Deleted objects. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "includeDeletedObjects") private Object includeDeletedObjects; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -42,6 +52,16 @@ public final class SalesforceServiceCloudV2Source extends CopySource { public SalesforceServiceCloudV2Source() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the soqlQuery property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSink.java index db032bc420ab5..5269e5425f2f1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Salesforce sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SalesforceSink.class, visible = true) @JsonTypeName("SalesforceSink") @Fluent public final class SalesforceSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceSink"; + /* * The write behavior for the operation. Default is Insert. */ @@ -23,18 +31,13 @@ public final class SalesforceSink extends CopySink { private SalesforceSinkWriteBehavior writeBehavior; /* - * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or - * Expression with resultType string). + * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). */ @JsonProperty(value = "externalIdFieldName") private Object externalIdFieldName; /* - * The flag indicating whether or not to ignore null values from input dataset (except key fields) during write - * operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object - * unchanged when doing upsert/update operation and insert defined default value when doing insert operation, - * versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert - * NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). + * The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; @@ -45,6 +48,16 @@ public final class SalesforceSink extends CopySink { public SalesforceSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: The write behavior for the operation. Default is Insert. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSource.java index 4d6f7a29985b9..4a328550a665e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Salesforce source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SalesforceSource.class, visible = true) @JsonTypeName("SalesforceSource") @Fluent public final class SalesforceSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceSource"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class SalesforceSource extends TabularSource { private Object query; /* - * The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or - * Expression with resultType string). + * The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or Expression with resultType string). */ @JsonProperty(value = "readBehavior") private Object readBehavior; @@ -35,6 +42,16 @@ public final class SalesforceSource extends TabularSource { public SalesforceSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2LinkedService.java index d7d56410d76ae..20a916818df03 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2LinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceV2LinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for Salesforce V2. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceV2LinkedService.class, + visible = true) @JsonTypeName("SalesforceV2") @Fluent public final class SalesforceV2LinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceV2"; + /* * Salesforce V2 linked service properties. */ @@ -32,6 +44,16 @@ public final class SalesforceV2LinkedService extends LinkedService { public SalesforceV2LinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Salesforce V2 linked service properties. * @@ -203,8 +225,8 @@ public SalesforceV2LinkedService withApiVersion(Object apiVersion) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -213,8 +235,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SalesforceV2LinkedService object itself. @@ -236,8 +258,9 @@ public SalesforceV2LinkedService withEncryptedCredential(String encryptedCredent public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SalesforceV2LinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SalesforceV2LinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2ObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2ObjectDataset.java index f94f65680ad2f..bf8d79e81f362 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2ObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2ObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceV2ObjectDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * The Salesforce V2 object dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SalesforceV2ObjectDataset.class, + visible = true) @JsonTypeName("SalesforceV2Object") @Fluent public final class SalesforceV2ObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceV2Object"; + /* * Salesforce V2 object dataset properties. */ @@ -31,6 +43,16 @@ public final class SalesforceV2ObjectDataset extends Dataset { public SalesforceV2ObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Salesforce V2 object dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Sink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Sink.java index 2335248dff70d..a0ef817a722ff 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Sink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Sink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Salesforce V2 sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SalesforceV2Sink.class, visible = true) @JsonTypeName("SalesforceV2Sink") @Fluent public final class SalesforceV2Sink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceV2Sink"; + /* * The write behavior for the operation. Default is Insert. */ @@ -23,18 +31,13 @@ public final class SalesforceV2Sink extends CopySink { private SalesforceV2SinkWriteBehavior writeBehavior; /* - * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or - * Expression with resultType string). + * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). */ @JsonProperty(value = "externalIdFieldName") private Object externalIdFieldName; /* - * The flag indicating whether or not to ignore null values from input dataset (except key fields) during write - * operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object - * unchanged when doing upsert/update operation and insert defined default value when doing insert operation, - * versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert - * NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). + * The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; @@ -45,6 +48,16 @@ public final class SalesforceV2Sink extends CopySink { public SalesforceV2Sink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: The write behavior for the operation. Default is Insert. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Source.java index 1b337709d876d..fb40cc4d3b83f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Source.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Salesforce V2 source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SalesforceV2Source.class, visible = true) @JsonTypeName("SalesforceV2Source") @Fluent public final class SalesforceV2Source extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SalesforceV2Source"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class SalesforceV2Source extends TabularSource { private Object soqlQuery; /* - * This property control whether query result contains Deleted objects. Default is false. Type: boolean (or - * Expression with resultType boolean). + * This property control whether query result contains Deleted objects. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "includeDeletedObjects") private Object includeDeletedObjects; @@ -35,6 +42,16 @@ public final class SalesforceV2Source extends TabularSource { public SalesforceV2Source() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the soqlQuery property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBWLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBWLinkedService.java index 3c2e1f6c58334..7b0ae97fcfc31 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBWLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBWLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SapBWLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * SAP Business Warehouse Linked Service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapBWLinkedService.class, visible = true) @JsonTypeName("SapBW") @Fluent public final class SapBWLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapBW"; + /* * Properties specific to this linked service type. */ @@ -32,6 +40,16 @@ public final class SapBWLinkedService extends LinkedService { public SapBWLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this linked service type. * @@ -101,8 +119,8 @@ public SapBWLinkedService withServer(Object server) { } /** - * Get the systemNumber property: System number of the BW system. (Usually a two-digit decimal number represented - * as a string.) Type: string (or Expression with resultType string). + * Get the systemNumber property: System number of the BW system. (Usually a two-digit decimal number represented as + * a string.) Type: string (or Expression with resultType string). * * @return the systemNumber value. */ @@ -111,8 +129,8 @@ public Object systemNumber() { } /** - * Set the systemNumber property: System number of the BW system. (Usually a two-digit decimal number represented - * as a string.) Type: string (or Expression with resultType string). + * Set the systemNumber property: System number of the BW system. (Usually a two-digit decimal number represented as + * a string.) Type: string (or Expression with resultType string). * * @param systemNumber the systemNumber value to set. * @return the SapBWLinkedService object itself. @@ -199,8 +217,8 @@ public SapBWLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -209,8 +227,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapBWLinkedService object itself. @@ -232,8 +250,9 @@ public SapBWLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SapBWLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SapBWLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwCubeDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwCubeDataset.java index 867ef46c8fe9f..8bf41a31502b8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwCubeDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwCubeDataset.java @@ -5,6 +5,8 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -13,16 +15,33 @@ /** * The SAP BW cube dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapBwCubeDataset.class, visible = true) @JsonTypeName("SapBwCube") @Fluent public final class SapBwCubeDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapBwCube"; + /** * Creates an instance of SapBwCubeDataset class. */ public SapBwCubeDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwSource.java index 3e00e18be8ef7..beb381b660cc3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for SapBW server via MDX. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapBwSource.class, visible = true) @JsonTypeName("SapBwSource") @Fluent public final class SapBwSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapBwSource"; + /* * MDX query. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class SapBwSource extends TabularSource { public SapBwSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: MDX query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerLinkedService.java index d7baf3eba5aa3..514ed0df38137 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SapCloudForCustomerLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for SAP Cloud for Customer. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SapCloudForCustomerLinkedService.class, + visible = true) @JsonTypeName("SapCloudForCustomer") @Fluent public final class SapCloudForCustomerLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapCloudForCustomer"; + /* * SAP Cloud for Customer linked service properties. */ @@ -33,6 +45,16 @@ public final class SapCloudForCustomerLinkedService extends LinkedService { public SapCloudForCustomerLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: SAP Cloud for Customer linked service properties. * @@ -152,9 +174,9 @@ public SapCloudForCustomerLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must - * be provided. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Either encryptedCredential or username/password must be + * provided. Type: string. * * @return the encryptedCredential value. */ @@ -163,9 +185,9 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must - * be provided. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Either encryptedCredential or username/password must be + * provided. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapCloudForCustomerLinkedService object itself. @@ -187,8 +209,9 @@ public SapCloudForCustomerLinkedService withEncryptedCredential(String encrypted public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SapCloudForCustomerLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SapCloudForCustomerLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerResourceDataset.java index 6854ee4545b8f..42e86094851a8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerResourceDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SapCloudForCustomerResourceDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * The path of the SAP Cloud for Customer OData entity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SapCloudForCustomerResourceDataset.class, + visible = true) @JsonTypeName("SapCloudForCustomerResource") @Fluent public final class SapCloudForCustomerResourceDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapCloudForCustomerResource"; + /* * SAP Cloud For Customer OData resource dataset properties. */ @@ -33,6 +45,16 @@ public final class SapCloudForCustomerResourceDataset extends Dataset { public SapCloudForCustomerResourceDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: SAP Cloud For Customer OData resource dataset properties. * @@ -139,8 +161,9 @@ public SapCloudForCustomerResourceDataset withPath(Object path) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SapCloudForCustomerResourceDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SapCloudForCustomerResourceDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSink.java index 66a5661f5b285..8c7d30a690b71 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSink.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity SAP Cloud for Customer sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SapCloudForCustomerSink.class, + visible = true) @JsonTypeName("SapCloudForCustomerSink") @Fluent public final class SapCloudForCustomerSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapCloudForCustomerSink"; + /* * The write behavior for the operation. Default is 'Insert'. */ @@ -23,9 +35,7 @@ public final class SapCloudForCustomerSink extends CopySink { private SapCloudForCustomerSinkWriteBehavior writeBehavior; /* - * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read - * response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; @@ -36,6 +46,16 @@ public final class SapCloudForCustomerSink extends CopySink { public SapCloudForCustomerSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the writeBehavior property: The write behavior for the operation. Default is 'Insert'. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSource.java index e0c2c0626009f..77e1d6fdb5f33 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSource.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for SAP Cloud for Customer source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SapCloudForCustomerSource.class, + visible = true) @JsonTypeName("SapCloudForCustomerSource") @Fluent public final class SapCloudForCustomerSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapCloudForCustomerSource"; + /* * SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). */ @@ -23,9 +35,7 @@ public final class SapCloudForCustomerSource extends TabularSource { private Object query; /* - * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read - * response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; @@ -36,6 +46,16 @@ public final class SapCloudForCustomerSource extends TabularSource { public SapCloudForCustomerSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression * with resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccLinkedService.java index 26df9a756b1fa..c5157041fecc2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SapEccLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for SAP ERP Central Component(SAP ECC). */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapEccLinkedService.class, visible = true) @JsonTypeName("SapEcc") @Fluent public final class SapEccLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapEcc"; + /* * SAP ECC linked service properties. */ @@ -32,6 +40,16 @@ public final class SapEccLinkedService extends LinkedService { public SapEccLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: SAP ECC linked service properties. * @@ -151,9 +169,9 @@ public SapEccLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must - * be provided. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Either encryptedCredential or username/password must be + * provided. Type: string. * * @return the encryptedCredential value. */ @@ -162,9 +180,9 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must - * be provided. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Either encryptedCredential or username/password must be + * provided. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapEccLinkedService object itself. @@ -186,8 +204,9 @@ public SapEccLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SapEccLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SapEccLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccResourceDataset.java index a137f26a5347f..ed80256f202be 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccResourceDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SapEccResourceDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * The path of the SAP ECC OData entity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapEccResourceDataset.class, visible = true) @JsonTypeName("SapEccResource") @Fluent public final class SapEccResourceDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapEccResource"; + /* * SAP ECC OData resource dataset properties. */ @@ -32,6 +40,16 @@ public final class SapEccResourceDataset extends Dataset { public SapEccResourceDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: SAP ECC OData resource dataset properties. * @@ -105,8 +123,7 @@ public SapEccResourceDataset withFolder(DatasetFolder folder) { } /** - * Get the path property: The path of the SAP ECC OData entity. Type: string (or Expression with resultType - * string). + * Get the path property: The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). * * @return the path value. */ @@ -115,8 +132,7 @@ public Object path() { } /** - * Set the path property: The path of the SAP ECC OData entity. Type: string (or Expression with resultType - * string). + * Set the path property: The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). * * @param path the path value to set. * @return the SapEccResourceDataset object itself. @@ -138,8 +154,9 @@ public SapEccResourceDataset withPath(Object path) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SapEccResourceDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SapEccResourceDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccSource.java index 4427067c68878..607ee6e42cd10 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for SAP ECC source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapEccSource.class, visible = true) @JsonTypeName("SapEccSource") @Fluent public final class SapEccSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapEccSource"; + /* * SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). */ @@ -23,9 +31,7 @@ public final class SapEccSource extends TabularSource { private Object query; /* - * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read - * response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; @@ -36,6 +42,16 @@ public final class SapEccSource extends TabularSource { public SapEccSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaLinkedService.java index 518e1c431d828..9de0a5ac94679 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SapHanaLinkedServiceProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * SAP HANA Linked Service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapHanaLinkedService.class, visible = true) @JsonTypeName("SapHana") @Fluent public final class SapHanaLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapHana"; + /* * Properties specific to this linked service type. */ @@ -32,6 +40,16 @@ public final class SapHanaLinkedService extends LinkedService { public SapHanaLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this linked service type. * @@ -197,8 +215,8 @@ public SapHanaLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -207,8 +225,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapHanaLinkedService object itself. @@ -230,8 +248,9 @@ public SapHanaLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SapHanaLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SapHanaLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaPartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaPartitionSettings.java index 38c5da827ed20..84009d064c4cd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaPartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaPartitionSettings.java @@ -13,8 +13,7 @@ @Fluent public final class SapHanaPartitionSettings { /* - * The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with - * resultType string). + * The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; @@ -26,8 +25,8 @@ public SapHanaPartitionSettings() { } /** - * Get the partitionColumnName property: The name of the column that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * Get the partitionColumnName property: The name of the column that will be used for proceeding range partitioning. + * Type: string (or Expression with resultType string). * * @return the partitionColumnName value. */ @@ -36,8 +35,8 @@ public Object partitionColumnName() { } /** - * Set the partitionColumnName property: The name of the column that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * Set the partitionColumnName property: The name of the column that will be used for proceeding range partitioning. + * Type: string (or Expression with resultType string). * * @param partitionColumnName the partitionColumnName value to set. * @return the SapHanaPartitionSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaSource.java index 57d518cef82bb..a956c9309baf4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for SAP HANA source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapHanaSource.class, visible = true) @JsonTypeName("SapHanaSource") @Fluent public final class SapHanaSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapHanaSource"; + /* * SAP HANA Sql query. Type: string (or Expression with resultType string). */ @@ -29,8 +37,7 @@ public final class SapHanaSource extends TabularSource { private Object packetSize; /* - * The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", - * "PhysicalPartitionsOfTable", "SapHanaDynamicRange". + * The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -47,6 +54,16 @@ public final class SapHanaSource extends TabularSource { public SapHanaSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: SAP HANA Sql query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaTableDataset.java index fb6fc65b79e89..384baf066445f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.SapHanaTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * SAP HANA Table properties. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapHanaTableDataset.class, visible = true) @JsonTypeName("SapHanaTable") @Fluent public final class SapHanaTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapHanaTable"; + /* * SAP HANA Table properties. */ @@ -31,6 +39,16 @@ public final class SapHanaTableDataset extends Dataset { public SapHanaTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: SAP HANA Table properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpLinkedService.java index 99b9af4eadc48..4c7bd0c10636a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SapOdpLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * SAP ODP Linked Service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapOdpLinkedService.class, visible = true) @JsonTypeName("SapOdp") @Fluent public final class SapOdpLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapOdp"; + /* * Properties specific to SAP ODP linked service type. */ @@ -32,6 +40,16 @@ public final class SapOdpLinkedService extends LinkedService { public SapOdpLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to SAP ODP linked service type. * @@ -426,8 +444,8 @@ public SapOdpLinkedService withSncQop(Object sncQop) { } /** - * Get the x509CertificatePath property: SNC X509 certificate file path. Type: string (or Expression with - * resultType string). + * Get the x509CertificatePath property: SNC X509 certificate file path. Type: string (or Expression with resultType + * string). * * @return the x509CertificatePath value. */ @@ -436,8 +454,8 @@ public Object x509CertificatePath() { } /** - * Set the x509CertificatePath property: SNC X509 certificate file path. Type: string (or Expression with - * resultType string). + * Set the x509CertificatePath property: SNC X509 certificate file path. Type: string (or Expression with resultType + * string). * * @param x509CertificatePath the x509CertificatePath value to set. * @return the SapOdpLinkedService object itself. @@ -499,8 +517,8 @@ public SapOdpLinkedService withSubscriberName(Object subscriberName) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -509,8 +527,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapOdpLinkedService object itself. @@ -532,8 +550,9 @@ public SapOdpLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SapOdpLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SapOdpLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpResourceDataset.java index 30c16a555db68..4b20173fc4f27 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpResourceDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SapOdpResourceDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * SAP ODP Resource properties. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapOdpResourceDataset.class, visible = true) @JsonTypeName("SapOdpResource") @Fluent public final class SapOdpResourceDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapOdpResource"; + /* * SAP ODP Resource properties. */ @@ -32,6 +40,16 @@ public final class SapOdpResourceDataset extends Dataset { public SapOdpResourceDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: SAP ODP Resource properties. * @@ -105,8 +123,7 @@ public SapOdpResourceDataset withFolder(DatasetFolder folder) { } /** - * Get the context property: The context of the SAP ODP Object. Type: string (or Expression with resultType - * string). + * Get the context property: The context of the SAP ODP Object. Type: string (or Expression with resultType string). * * @return the context value. */ @@ -115,8 +132,7 @@ public Object context() { } /** - * Set the context property: The context of the SAP ODP Object. Type: string (or Expression with resultType - * string). + * Set the context property: The context of the SAP ODP Object. Type: string (or Expression with resultType string). * * @param context the context value to set. * @return the SapOdpResourceDataset object itself. @@ -130,8 +146,7 @@ public SapOdpResourceDataset withContext(Object context) { } /** - * Get the objectName property: The name of the SAP ODP Object. Type: string (or Expression with resultType - * string). + * Get the objectName property: The name of the SAP ODP Object. Type: string (or Expression with resultType string). * * @return the objectName value. */ @@ -140,8 +155,7 @@ public Object objectName() { } /** - * Set the objectName property: The name of the SAP ODP Object. Type: string (or Expression with resultType - * string). + * Set the objectName property: The name of the SAP ODP Object. Type: string (or Expression with resultType string). * * @param objectName the objectName value to set. * @return the SapOdpResourceDataset object itself. @@ -163,8 +177,9 @@ public SapOdpResourceDataset withObjectName(Object objectName) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SapOdpResourceDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SapOdpResourceDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpSource.java index 49c5f7708b5ae..8104b35a4c822 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpSource.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for SAP ODP source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapOdpSource.class, visible = true) @JsonTypeName("SapOdpSource") @Fluent public final class SapOdpSource extends TabularSource { /* - * The extraction mode. Allowed value include: Full, Delta and Recovery. The default value is Full. Type: string - * (or Expression with resultType string). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapOdpSource"; + + /* + * The extraction mode. Allowed value include: Full, Delta and Recovery. The default value is Full. Type: string (or Expression with resultType string). */ @JsonProperty(value = "extractionMode") private Object extractionMode; @@ -30,15 +37,13 @@ public final class SapOdpSource extends TabularSource { private Object subscriberProcess; /* - * Specifies the selection conditions from source data. Type: array of objects(selection) (or Expression with - * resultType array of objects). + * Specifies the selection conditions from source data. Type: array of objects(selection) (or Expression with resultType array of objects). */ @JsonProperty(value = "selection") private Object selection; /* - * Specifies the columns to be selected from source data. Type: array of objects(projection) (or Expression with - * resultType array of objects). + * Specifies the columns to be selected from source data. Type: array of objects(projection) (or Expression with resultType array of objects). */ @JsonProperty(value = "projection") private Object projection; @@ -49,6 +54,16 @@ public final class SapOdpSource extends TabularSource { public SapOdpSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the extractionMode property: The extraction mode. Allowed value include: Full, Delta and Recovery. The * default value is Full. Type: string (or Expression with resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubLinkedService.java index 364ab0a6fd560..e501f216fbc07 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SapOpenHubLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * SAP Business Warehouse Open Hub Destination Linked Service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SapOpenHubLinkedService.class, + visible = true) @JsonTypeName("SapOpenHub") @Fluent public final class SapOpenHubLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapOpenHub"; + /* * Properties specific to SAP Business Warehouse Open Hub Destination linked service type. */ @@ -32,6 +44,16 @@ public final class SapOpenHubLinkedService extends LinkedService { public SapOpenHubLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to SAP Business Warehouse Open Hub Destination linked * service type. @@ -79,8 +101,8 @@ public SapOpenHubLinkedService withAnnotations(List annotations) { } /** - * Get the server property: Host name of the SAP BW instance where the open hub destination is located. Type: - * string (or Expression with resultType string). + * Get the server property: Host name of the SAP BW instance where the open hub destination is located. Type: string + * (or Expression with resultType string). * * @return the server value. */ @@ -89,8 +111,8 @@ public Object server() { } /** - * Set the server property: Host name of the SAP BW instance where the open hub destination is located. Type: - * string (or Expression with resultType string). + * Set the server property: Host name of the SAP BW instance where the open hub destination is located. Type: string + * (or Expression with resultType string). * * @param server the server value to set. * @return the SapOpenHubLinkedService object itself. @@ -104,9 +126,8 @@ public SapOpenHubLinkedService withServer(Object server) { } /** - * Get the systemNumber property: System number of the BW system where the open hub destination is located. - * (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType - * string). + * Get the systemNumber property: System number of the BW system where the open hub destination is located. (Usually + * a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). * * @return the systemNumber value. */ @@ -115,9 +136,8 @@ public Object systemNumber() { } /** - * Set the systemNumber property: System number of the BW system where the open hub destination is located. - * (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType - * string). + * Set the systemNumber property: System number of the BW system where the open hub destination is located. (Usually + * a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). * * @param systemNumber the systemNumber value to set. * @return the SapOpenHubLinkedService object itself. @@ -158,8 +178,8 @@ public SapOpenHubLinkedService withClientId(Object clientId) { } /** - * Get the language property: Language of the BW system where the open hub destination is located. The default - * value is EN. Type: string (or Expression with resultType string). + * Get the language property: Language of the BW system where the open hub destination is located. The default value + * is EN. Type: string (or Expression with resultType string). * * @return the language value. */ @@ -168,8 +188,8 @@ public Object language() { } /** - * Set the language property: Language of the BW system where the open hub destination is located. The default - * value is EN. Type: string (or Expression with resultType string). + * Set the language property: Language of the BW system where the open hub destination is located. The default value + * is EN. Type: string (or Expression with resultType string). * * @param language the language value to set. * @return the SapOpenHubLinkedService object itself. @@ -331,8 +351,8 @@ public SapOpenHubLinkedService withLogonGroup(Object logonGroup) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -341,8 +361,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapOpenHubLinkedService object itself. @@ -364,8 +384,9 @@ public SapOpenHubLinkedService withEncryptedCredential(String encryptedCredentia public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SapOpenHubLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SapOpenHubLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubSource.java index 3df1feeb700e1..de766e29e7420 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubSource.java @@ -6,40 +6,44 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for SAP Business Warehouse Open Hub Destination source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapOpenHubSource.class, visible = true) @JsonTypeName("SapOpenHubSource") @Fluent public final class SapOpenHubSource extends TabularSource { /* - * Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with - * resultType boolean). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapOpenHubSource"; + + /* + * Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "excludeLastRequest") private Object excludeLastRequest; /* - * The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this - * property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). + * The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). */ @JsonProperty(value = "baseRequestId") private Object baseRequestId; /* - * Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or - * Expression with resultType string). + * Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). */ @JsonProperty(value = "customRfcReadTableFunctionModule") private Object customRfcReadTableFunctionModule; /* - * The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data - * retrieved. Type: string (or Expression with resultType string). + * The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sapDataColumnDelimiter") private Object sapDataColumnDelimiter; @@ -50,6 +54,16 @@ public final class SapOpenHubSource extends TabularSource { public SapOpenHubSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the excludeLastRequest property: Whether to exclude the records of the last request. The default value is * true. Type: boolean (or Expression with resultType boolean). @@ -119,8 +133,8 @@ public SapOpenHubSource withCustomRfcReadTableFunctionModule(Object customRfcRea } /** - * Get the sapDataColumnDelimiter property: The single character that will be used as delimiter passed to SAP RFC - * as well as splitting the output data retrieved. Type: string (or Expression with resultType string). + * Get the sapDataColumnDelimiter property: The single character that will be used as delimiter passed to SAP RFC as + * well as splitting the output data retrieved. Type: string (or Expression with resultType string). * * @return the sapDataColumnDelimiter value. */ @@ -129,8 +143,8 @@ public Object sapDataColumnDelimiter() { } /** - * Set the sapDataColumnDelimiter property: The single character that will be used as delimiter passed to SAP RFC - * as well as splitting the output data retrieved. Type: string (or Expression with resultType string). + * Set the sapDataColumnDelimiter property: The single character that will be used as delimiter passed to SAP RFC as + * well as splitting the output data retrieved. Type: string (or Expression with resultType string). * * @param sapDataColumnDelimiter the sapDataColumnDelimiter value to set. * @return the SapOpenHubSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubTableDataset.java index 4f0441697f3d2..7b6f254359be8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubTableDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SapOpenHubTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Sap Business Warehouse Open Hub Destination Table properties. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapOpenHubTableDataset.class, visible = true) @JsonTypeName("SapOpenHubTable") @Fluent public final class SapOpenHubTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapOpenHubTable"; + /* * Sap Business Warehouse Open Hub Destination Table properties. */ @@ -32,6 +40,16 @@ public final class SapOpenHubTableDataset extends Dataset { public SapOpenHubTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Sap Business Warehouse Open Hub Destination Table properties. * @@ -190,8 +208,9 @@ public SapOpenHubTableDataset withBaseRequestId(Object baseRequestId) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SapOpenHubTableDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SapOpenHubTableDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableLinkedService.java index 6f9a474c08ce4..97bcd51695b10 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SapTableLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * SAP Table Linked Service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapTableLinkedService.class, visible = true) @JsonTypeName("SapTable") @Fluent public final class SapTableLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapTable"; + /* * Properties specific to this linked service type. */ @@ -32,6 +40,16 @@ public final class SapTableLinkedService extends LinkedService { public SapTableLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this linked service type. * @@ -451,8 +469,8 @@ public SapTableLinkedService withLogonGroup(Object logonGroup) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -461,8 +479,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SapTableLinkedService object itself. @@ -484,8 +502,9 @@ public SapTableLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SapTableLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SapTableLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTablePartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTablePartitionSettings.java index db1b941052588..20cc7a92f6fe6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTablePartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTablePartitionSettings.java @@ -13,29 +13,25 @@ @Fluent public final class SapTablePartitionSettings { /* - * The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with - * resultType string). + * The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /* - * The maximum value of column specified in partitionColumnName that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionUpperBound") private Object partitionUpperBound; /* - * The minimum value of column specified in partitionColumnName that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionLowerBound") private Object partitionLowerBound; /* - * The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType - * string). + * The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). */ @JsonProperty(value = "maxPartitionsNumber") private Object maxPartitionsNumber; @@ -47,8 +43,8 @@ public SapTablePartitionSettings() { } /** - * Get the partitionColumnName property: The name of the column that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * Get the partitionColumnName property: The name of the column that will be used for proceeding range partitioning. + * Type: string (or Expression with resultType string). * * @return the partitionColumnName value. */ @@ -57,8 +53,8 @@ public Object partitionColumnName() { } /** - * Set the partitionColumnName property: The name of the column that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * Set the partitionColumnName property: The name of the column that will be used for proceeding range partitioning. + * Type: string (or Expression with resultType string). * * @param partitionColumnName the partitionColumnName value to set. * @return the SapTablePartitionSettings object itself. @@ -113,8 +109,8 @@ public SapTablePartitionSettings withPartitionLowerBound(Object partitionLowerBo } /** - * Get the maxPartitionsNumber property: The maximum value of partitions the table will be split into. Type: - * integer (or Expression with resultType string). + * Get the maxPartitionsNumber property: The maximum value of partitions the table will be split into. Type: integer + * (or Expression with resultType string). * * @return the maxPartitionsNumber value. */ @@ -123,8 +119,8 @@ public Object maxPartitionsNumber() { } /** - * Set the maxPartitionsNumber property: The maximum value of partitions the table will be split into. Type: - * integer (or Expression with resultType string). + * Set the maxPartitionsNumber property: The maximum value of partitions the table will be split into. Type: integer + * (or Expression with resultType string). * * @param maxPartitionsNumber the maxPartitionsNumber value to set. * @return the SapTablePartitionSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableResourceDataset.java index 351ce58f26e48..c6dfc089f81cf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableResourceDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SapTableResourceDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * SAP Table Resource properties. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SapTableResourceDataset.class, + visible = true) @JsonTypeName("SapTableResource") @Fluent public final class SapTableResourceDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapTableResource"; + /* * SAP Table Resource properties. */ @@ -32,6 +44,16 @@ public final class SapTableResourceDataset extends Dataset { public SapTableResourceDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: SAP Table Resource properties. * @@ -136,8 +158,9 @@ public SapTableResourceDataset withTableName(Object tableName) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SapTableResourceDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SapTableResourceDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableSource.java index b5d8595e52223..56848ec9ec16c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for SAP Table source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapTableSource.class, visible = true) @JsonTypeName("SapTableSource") @Fluent public final class SapTableSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SapTableSource"; + /* * The number of rows to be retrieved. Type: integer(or Expression with resultType integer). */ @@ -29,44 +37,37 @@ public final class SapTableSource extends TabularSource { private Object rowSkips; /* - * The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression - * with resultType string). + * The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). */ @JsonProperty(value = "rfcTableFields") private Object rfcTableFields; /* - * The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression - * with resultType string). + * The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). */ @JsonProperty(value = "rfcTableOptions") private Object rfcTableOptions; /* - * Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: - * integer (or Expression with resultType integer). + * Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "batchSize") private Object batchSize; /* - * Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or - * Expression with resultType string). + * Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). */ @JsonProperty(value = "customRfcReadTableFunctionModule") private Object customRfcReadTableFunctionModule; /* - * The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data - * retrieved. Type: string (or Expression with resultType string). + * The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sapDataColumnDelimiter") private Object sapDataColumnDelimiter; /* - * The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", - * "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", - * "PartitionOnTime". + * The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -83,6 +84,16 @@ public final class SapTableSource extends TabularSource { public SapTableSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the rowCount property: The number of rows to be retrieved. Type: integer(or Expression with resultType * integer). @@ -172,8 +183,8 @@ public SapTableSource withRfcTableOptions(Object rfcTableOptions) { } /** - * Get the batchSize property: Specifies the maximum number of rows that will be retrieved at a time when - * retrieving data from SAP Table. Type: integer (or Expression with resultType integer). + * Get the batchSize property: Specifies the maximum number of rows that will be retrieved at a time when retrieving + * data from SAP Table. Type: integer (or Expression with resultType integer). * * @return the batchSize value. */ @@ -182,8 +193,8 @@ public Object batchSize() { } /** - * Set the batchSize property: Specifies the maximum number of rows that will be retrieved at a time when - * retrieving data from SAP Table. Type: integer (or Expression with resultType integer). + * Set the batchSize property: Specifies the maximum number of rows that will be retrieved at a time when retrieving + * data from SAP Table. Type: integer (or Expression with resultType integer). * * @param batchSize the batchSize value to set. * @return the SapTableSource object itself. @@ -216,8 +227,8 @@ public SapTableSource withCustomRfcReadTableFunctionModule(Object customRfcReadT } /** - * Get the sapDataColumnDelimiter property: The single character that will be used as delimiter passed to SAP RFC - * as well as splitting the output data retrieved. Type: string (or Expression with resultType string). + * Get the sapDataColumnDelimiter property: The single character that will be used as delimiter passed to SAP RFC as + * well as splitting the output data retrieved. Type: string (or Expression with resultType string). * * @return the sapDataColumnDelimiter value. */ @@ -226,8 +237,8 @@ public Object sapDataColumnDelimiter() { } /** - * Set the sapDataColumnDelimiter property: The single character that will be used as delimiter passed to SAP RFC - * as well as splitting the output data retrieved. Type: string (or Expression with resultType string). + * Set the sapDataColumnDelimiter property: The single character that will be used as delimiter passed to SAP RFC as + * well as splitting the output data retrieved. Type: string (or Expression with resultType string). * * @param sapDataColumnDelimiter the sapDataColumnDelimiter value to set. * @return the SapTableSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScheduleTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScheduleTrigger.java index 9c56985b228b1..ab79d1a47eac6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScheduleTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScheduleTrigger.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ScheduleTriggerTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Trigger that creates pipeline runs periodically, on schedule. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ScheduleTrigger.class, visible = true) @JsonTypeName("ScheduleTrigger") @Fluent public final class ScheduleTrigger extends MultiplePipelineTrigger { + /* + * Trigger type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ScheduleTrigger"; + /* * Schedule Trigger properties. */ @@ -31,6 +39,16 @@ public final class ScheduleTrigger extends MultiplePipelineTrigger { public ScheduleTrigger() { } + /** + * Get the type property: Trigger type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Schedule Trigger properties. * @@ -99,8 +117,9 @@ public ScheduleTrigger withRecurrence(ScheduleTriggerRecurrence recurrence) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model ScheduleTrigger")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ScheduleTrigger")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptAction.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptAction.java index f5b535bd710ee..29f7089230dad 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptAction.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptAction.java @@ -130,16 +130,16 @@ public ScriptAction withParameters(String parameters) { */ public void validate() { if (name() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property name in model ScriptAction")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property name in model ScriptAction")); } if (uri() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property uri in model ScriptAction")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property uri in model ScriptAction")); } if (roles() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property roles in model ScriptAction")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property roles in model ScriptAction")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivity.java index f0cb592a1a5ed..47b37309f0296 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ScriptActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Script activity type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ScriptActivity.class, visible = true) @JsonTypeName("Script") @Fluent public final class ScriptActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Script"; + /* * Script activity properties. */ @@ -31,6 +39,16 @@ public final class ScriptActivity extends ExecutionActivity { public ScriptActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Script activity properties. * @@ -192,8 +210,9 @@ public ScriptActivity withLogSettings(ScriptActivityTypePropertiesLogSettings lo public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model ScriptActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ScriptActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityScriptBlock.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityScriptBlock.java index bcc176e32e27c..4147f9e37d852 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityScriptBlock.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityScriptBlock.java @@ -21,10 +21,10 @@ public final class ScriptActivityScriptBlock { private Object text; /* - * The type of the query. Type: string. + * The type of the query. Please refer to the ScriptType for valid options. Type: string (or Expression with resultType string). */ @JsonProperty(value = "type", required = true) - private ScriptType type; + private Object type; /* * Array of script parameters. Type: array. @@ -59,21 +59,23 @@ public ScriptActivityScriptBlock withText(Object text) { } /** - * Get the type property: The type of the query. Type: string. + * Get the type property: The type of the query. Please refer to the ScriptType for valid options. Type: string (or + * Expression with resultType string). * * @return the type value. */ - public ScriptType type() { + public Object type() { return this.type; } /** - * Set the type property: The type of the query. Type: string. + * Set the type property: The type of the query. Please refer to the ScriptType for valid options. Type: string (or + * Expression with resultType string). * * @param type the type value to set. * @return the ScriptActivityScriptBlock object itself. */ - public ScriptActivityScriptBlock withType(ScriptType type) { + public ScriptActivityScriptBlock withType(Object type) { this.type = type; return this; } @@ -105,12 +107,12 @@ public ScriptActivityScriptBlock withParameters(List pa */ public void validate() { if (text() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property text in model ScriptActivityScriptBlock")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property text in model ScriptActivityScriptBlock")); } if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model ScriptActivityScriptBlock")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property type in model ScriptActivityScriptBlock")); } if (parameters() != null) { parameters().forEach(e -> e.validate()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityTypePropertiesLogSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityTypePropertiesLogSettings.java index 9a4c2e5995433..e5e3f313b283e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityTypePropertiesLogSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityTypePropertiesLogSettings.java @@ -78,8 +78,9 @@ public ScriptActivityTypePropertiesLogSettings withLogLocationSettings(LogLocati */ public void validate() { if (logDestination() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property logDestination in model ScriptActivityTypePropertiesLogSettings")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property logDestination in model ScriptActivityTypePropertiesLogSettings")); } if (logLocationSettings() != null) { logLocationSettings().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptType.java deleted file mode 100644 index 55e483c3117cd..0000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptType.java +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.models; - -import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; -import java.util.Collection; - -/** - * The type of the query. Type: string. - */ -public final class ScriptType extends ExpandableStringEnum { - /** - * Static value Query for ScriptType. - */ - public static final ScriptType QUERY = fromString("Query"); - - /** - * Static value NonQuery for ScriptType. - */ - public static final ScriptType NON_QUERY = fromString("NonQuery"); - - /** - * Creates a new instance of ScriptType value. - * - * @deprecated Use the {@link #fromString(String)} factory method. - */ - @Deprecated - public ScriptType() { - } - - /** - * Creates or finds a ScriptType from its string representation. - * - * @param name a name to look for. - * @return the corresponding ScriptType. - */ - @JsonCreator - public static ScriptType fromString(String name) { - return fromString(name, ScriptType.class); - } - - /** - * Gets known ScriptType values. - * - * @return known ScriptType values. - */ - public static Collection values() { - return values(ScriptType.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecretBase.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecretBase.java index 1fe71bc501ee8..73d84910f9cbd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecretBase.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecretBase.java @@ -5,28 +5,43 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The base definition of a secret type. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = SecretBase.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SecretBase.class, visible = true) @JsonTypeName("SecretBase") @JsonSubTypes({ @JsonSubTypes.Type(name = "SecureString", value = SecureString.class), @JsonSubTypes.Type(name = "AzureKeyVaultSecret", value = AzureKeyVaultSecretReference.class) }) @Immutable public class SecretBase { + /* + * Type of the secret. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /** * Creates an instance of SecretBase class. */ public SecretBase() { + this.type = "SecretBase"; + } + + /** + * Get the type property: Type of the secret. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecureString.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecureString.java index 581000d11f605..72a96d361ccb6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecureString.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecureString.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; @@ -14,10 +15,17 @@ * Azure Data Factory secure string definition. The string value will be masked with asterisks '*' during Get or List * API calls. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SecureString.class, visible = true) @JsonTypeName("SecureString") @Fluent public final class SecureString extends SecretBase { + /* + * Type of the secret. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SecureString"; + /* * Value of secure string. */ @@ -30,6 +38,16 @@ public final class SecureString extends SecretBase { public SecureString() { } + /** + * Get the type property: Type of the secret. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the value property: Value of secure string. * @@ -59,8 +77,8 @@ public SecureString withValue(String value) { public void validate() { super.validate(); if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model SecureString")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property value in model SecureString")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfDependencyTumblingWindowTriggerReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfDependencyTumblingWindowTriggerReference.java index 03d05345dc5e9..394a55f597430 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfDependencyTumblingWindowTriggerReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfDependencyTumblingWindowTriggerReference.java @@ -7,16 +7,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Self referenced tumbling window trigger dependency. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SelfDependencyTumblingWindowTriggerReference.class, + visible = true) @JsonTypeName("SelfDependencyTumblingWindowTriggerReference") @Fluent public final class SelfDependencyTumblingWindowTriggerReference extends DependencyReference { + /* + * The type of dependency reference. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SelfDependencyTumblingWindowTriggerReference"; + /* * Timespan applied to the start time of a tumbling window when evaluating dependency. */ @@ -24,8 +36,7 @@ public final class SelfDependencyTumblingWindowTriggerReference extends Dependen private String offset; /* - * The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be - * used. + * The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. */ @JsonProperty(value = "size") private String size; @@ -36,6 +47,16 @@ public final class SelfDependencyTumblingWindowTriggerReference extends Dependen public SelfDependencyTumblingWindowTriggerReference() { } + /** + * Get the type property: The type of dependency reference. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the offset property: Timespan applied to the start time of a tumbling window when evaluating dependency. * @@ -87,8 +108,9 @@ public SelfDependencyTumblingWindowTriggerReference withSize(String size) { public void validate() { super.validate(); if (offset() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property offset in model SelfDependencyTumblingWindowTriggerReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property offset in model SelfDependencyTumblingWindowTriggerReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntime.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntime.java index 86b0f101c87a9..70f8255627817 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntime.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntime.java @@ -7,19 +7,30 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Self-hosted integration runtime. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SelfHostedIntegrationRuntime.class, + visible = true) @JsonTypeName("SelfHosted") @Fluent public final class SelfHostedIntegrationRuntime extends IntegrationRuntime { /* - * When this property is not null, means this is a linked integration runtime. The property is used to access - * original integration runtime. + * Type of integration runtime. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private IntegrationRuntimeType type = IntegrationRuntimeType.SELF_HOSTED; + + /* + * When this property is not null, means this is a linked integration runtime. The property is used to access original integration runtime. */ @JsonProperty(value = "typeProperties") private SelfHostedIntegrationRuntimeTypeProperties innerTypeProperties; @@ -31,8 +42,18 @@ public SelfHostedIntegrationRuntime() { } /** - * Get the innerTypeProperties property: When this property is not null, means this is a linked integration - * runtime. The property is used to access original integration runtime. + * Get the type property: Type of integration runtime. + * + * @return the type value. + */ + @Override + public IntegrationRuntimeType type() { + return this.type; + } + + /** + * Get the innerTypeProperties property: When this property is not null, means this is a linked integration runtime. + * The property is used to access original integration runtime. * * @return the innerTypeProperties value. */ @@ -79,7 +100,8 @@ public SelfHostedIntegrationRuntime withLinkedInfo(LinkedIntegrationRuntimeType * @return the selfContainedInteractiveAuthoringEnabled value. */ public Boolean selfContainedInteractiveAuthoringEnabled() { - return this.innerTypeProperties() == null ? null + return this.innerTypeProperties() == null + ? null : this.innerTypeProperties().selfContainedInteractiveAuthoringEnabled(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntimeStatus.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntimeStatus.java index 9225cf38756d5..4f98a75fef139 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntimeStatus.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntimeStatus.java @@ -9,6 +9,7 @@ import com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeNodeInner; import com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeStatusTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.time.OffsetDateTime; @@ -18,10 +19,21 @@ /** * Self-hosted integration runtime status. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SelfHostedIntegrationRuntimeStatus.class, + visible = true) @JsonTypeName("SelfHosted") @Fluent public final class SelfHostedIntegrationRuntimeStatus extends IntegrationRuntimeStatus { + /* + * Type of integration runtime. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private IntegrationRuntimeType type = IntegrationRuntimeType.SELF_HOSTED; + /* * Self-hosted integration runtime status type properties. */ @@ -35,6 +47,16 @@ public final class SelfHostedIntegrationRuntimeStatus extends IntegrationRuntime public SelfHostedIntegrationRuntimeStatus() { } + /** + * Get the type property: Type of integration runtime. + * + * @return the type value. + */ + @Override + public IntegrationRuntimeType type() { + return this.type; + } + /** * Get the innerTypeProperties property: Self-hosted integration runtime status type properties. * @@ -228,7 +250,8 @@ public OffsetDateTime autoUpdateEta() { * @return the selfContainedInteractiveAuthoringEnabled value. */ public Boolean selfContainedInteractiveAuthoringEnabled() { - return this.innerTypeProperties() == null ? null + return this.innerTypeProperties() == null + ? null : this.innerTypeProperties().selfContainedInteractiveAuthoringEnabled(); } @@ -241,8 +264,9 @@ public Boolean selfContainedInteractiveAuthoringEnabled() { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SelfHostedIntegrationRuntimeStatus")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SelfHostedIntegrationRuntimeStatus")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowLinkedService.java index 714d86fdc8993..fc33c71581393 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ServiceNowLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * ServiceNow server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = ServiceNowLinkedService.class, + visible = true) @JsonTypeName("ServiceNow") @Fluent public final class ServiceNowLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ServiceNow"; + /* * ServiceNow server linked service properties. */ @@ -32,6 +44,16 @@ public final class ServiceNowLinkedService extends LinkedService { public ServiceNowLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: ServiceNow server linked service properties. * @@ -268,8 +290,8 @@ public ServiceNowLinkedService withUseHostVerification(Object useHostVerificatio } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -278,8 +300,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the ServiceNowLinkedService object itself. @@ -293,8 +315,8 @@ public ServiceNowLinkedService withUsePeerVerification(Object usePeerVerificatio } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -303,8 +325,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ServiceNowLinkedService object itself. @@ -326,8 +348,9 @@ public ServiceNowLinkedService withEncryptedCredential(String encryptedCredentia public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ServiceNowLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ServiceNowLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowObjectDataset.java index 8a900677c81c9..929566ea44594 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * ServiceNow server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = ServiceNowObjectDataset.class, + visible = true) @JsonTypeName("ServiceNowObject") @Fluent public final class ServiceNowObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ServiceNowObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +43,16 @@ public final class ServiceNowObjectDataset extends Dataset { public ServiceNowObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowSource.java index 0f3d19afabd4a..c476bc819291d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity ServiceNow server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ServiceNowSource.class, visible = true) @JsonTypeName("ServiceNowSource") @Fluent public final class ServiceNowSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ServiceNowSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class ServiceNowSource extends TabularSource { public ServiceNowSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2LinkedService.java index 6c9e08fbdb3ec..deab516f99b70 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2LinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ServiceNowV2LinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * ServiceNowV2 server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = ServiceNowV2LinkedService.class, + visible = true) @JsonTypeName("ServiceNowV2") @Fluent public final class ServiceNowV2LinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ServiceNowV2"; + /* * ServiceNowV2 server linked service properties. */ @@ -32,6 +44,16 @@ public final class ServiceNowV2LinkedService extends LinkedService { public ServiceNowV2LinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: ServiceNowV2 server linked service properties. * @@ -241,8 +263,8 @@ public ServiceNowV2LinkedService withGrantType(Object grantType) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -251,8 +273,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ServiceNowV2LinkedService object itself. @@ -274,8 +296,9 @@ public ServiceNowV2LinkedService withEncryptedCredential(String encryptedCredent public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ServiceNowV2LinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ServiceNowV2LinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2ObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2ObjectDataset.java index b13ebd44cb027..e357a2890f039 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2ObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2ObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * ServiceNowV2 server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = ServiceNowV2ObjectDataset.class, + visible = true) @JsonTypeName("ServiceNowV2Object") @Fluent public final class ServiceNowV2ObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ServiceNowV2Object"; + /* * Properties specific to this dataset type. */ @@ -31,6 +43,16 @@ public final class ServiceNowV2ObjectDataset extends Dataset { public ServiceNowV2ObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2Source.java index 48b2b95b68322..99f5478cf8137 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2Source.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity ServiceNowV2 server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ServiceNowV2Source.class, visible = true) @JsonTypeName("ServiceNowV2Source") @Fluent public final class ServiceNowV2Source extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ServiceNowV2Source"; + /* * Expression to filter data from source. */ @@ -28,6 +36,16 @@ public final class ServiceNowV2Source extends TabularSource { public ServiceNowV2Source() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the expression property: Expression to filter data from source. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServicePrincipalCredential.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServicePrincipalCredential.java index 766f880cd7075..23e31b0174e4b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServicePrincipalCredential.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServicePrincipalCredential.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ServicePrincipalCredentialTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * Service principal credential. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = ServicePrincipalCredential.class, + visible = true) @JsonTypeName("ServicePrincipal") @Fluent public final class ServicePrincipalCredential extends Credential { + /* + * Type of credential. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ServicePrincipal"; + /* * Service Principal credential properties. */ @@ -32,6 +44,16 @@ public final class ServicePrincipalCredential extends Credential { public ServicePrincipalCredential() { } + /** + * Get the type property: Type of credential. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Service Principal credential properties. * @@ -137,8 +159,9 @@ public ServicePrincipalCredential withTenant(Object tenant) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ServicePrincipalCredential")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ServicePrincipalCredential")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServicePrincipalCredentialResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServicePrincipalCredentialResource.java new file mode 100644 index 0000000000000..3c32056ece78d --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServicePrincipalCredentialResource.java @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.util.logging.ClientLogger; +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * Credential resource type. + */ +@Fluent +public final class ServicePrincipalCredentialResource extends CredentialResourceInner { + /* + * Service Principal Credential properties. + */ + @JsonProperty(value = "properties", required = true) + private ServicePrincipalCredential properties; + + /** + * Creates an instance of ServicePrincipalCredentialResource class. + */ + public ServicePrincipalCredentialResource() { + } + + /** + * Get the properties property: Service Principal Credential properties. + * + * @return the properties value. + */ + public ServicePrincipalCredential properties() { + return this.properties; + } + + /** + * Set the properties property: Service Principal Credential properties. + * + * @param properties the properties value to set. + * @return the ServicePrincipalCredentialResource object itself. + */ + public ServicePrincipalCredentialResource withProperties(ServicePrincipalCredential properties) { + this.properties = properties; + return this; + } + + /** + * {@inheritDoc} + */ + @Override + public ServicePrincipalCredentialResource withId(String id) { + super.withId(id); + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + super.validate(); + if (properties() == null) { + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property properties in model ServicePrincipalCredentialResource")); + } else { + properties().validate(); + } + } + + private static final ClientLogger LOGGER = new ClientLogger(ServicePrincipalCredentialResource.class); +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SetVariableActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SetVariableActivity.java index 93c409922d6e1..9499d79bce5ea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SetVariableActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SetVariableActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SetVariableActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Set value for a Variable. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SetVariableActivity.class, visible = true) @JsonTypeName("SetVariable") @Fluent public final class SetVariableActivity extends ControlActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SetVariable"; + /* * Set Variable activity properties. */ @@ -37,6 +45,16 @@ public final class SetVariableActivity extends ControlActivity { public SetVariableActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Set Variable activity properties. * @@ -198,8 +216,9 @@ public SetVariableActivity withSetSystemVariable(Boolean setSystemVariable) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SetVariableActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SetVariableActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpLocation.java index bfaf5fcb28227..3a472f5c10e97 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpLocation.java @@ -5,22 +5,41 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The location of SFTP dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SftpLocation.class, visible = true) @JsonTypeName("SftpLocation") @Fluent public final class SftpLocation extends DatasetLocation { + /* + * Type of dataset storage location. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SftpLocation"; + /** * Creates an instance of SftpLocation class. */ public SftpLocation() { } + /** + * Get the type property: Type of dataset storage location. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpReadSettings.java index 50844be7ca6be..2f11d2e07a81b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpReadSettings.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Sftp read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SftpReadSettings.class, visible = true) @JsonTypeName("SftpReadSettings") @Fluent public final class SftpReadSettings extends StoreReadSettings { /* - * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression - * with resultType boolean). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SftpReadSettings"; + + /* + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "recursive") private Object recursive; @@ -42,22 +49,19 @@ public final class SftpReadSettings extends StoreReadSettings { private Object enablePartitionDiscovery; /* - * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType - * string). + * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* - * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to - * copy. Type: string (or Expression with resultType string). + * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ @JsonProperty(value = "fileListPath") private Object fileListPath; /* - * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or - * Expression with resultType boolean). + * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; @@ -75,8 +79,7 @@ public final class SftpReadSettings extends StoreReadSettings { private Object modifiedDatetimeEnd; /* - * If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with - * resultType boolean). + * If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "disableChunking") private Object disableChunking; @@ -88,8 +91,18 @@ public SftpReadSettings() { } /** - * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @return the recursive value. */ @@ -98,8 +111,8 @@ public Object recursive() { } /** - * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. - * Type: boolean (or Expression with resultType boolean). + * Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type: + * boolean (or Expression with resultType boolean). * * @param recursive the recursive value to set. * @return the SftpReadSettings object itself. @@ -174,8 +187,8 @@ public SftpReadSettings withEnablePartitionDiscovery(Object enablePartitionDisco } /** - * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @return the partitionRootPath value. */ @@ -184,8 +197,8 @@ public Object partitionRootPath() { } /** - * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string - * (or Expression with resultType string). + * Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or + * Expression with resultType string). * * @param partitionRootPath the partitionRootPath value to set. * @return the SftpReadSettings object itself. @@ -196,8 +209,8 @@ public SftpReadSettings withPartitionRootPath(Object partitionRootPath) { } /** - * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @return the fileListPath value. */ @@ -206,8 +219,8 @@ public Object fileListPath() { } /** - * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured - * in the dataset) that you want to copy. Type: string (or Expression with resultType string). + * Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in + * the dataset) that you want to copy. Type: string (or Expression with resultType string). * * @param fileListPath the fileListPath value to set. * @return the SftpReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpServerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpServerLinkedService.java index 631918dcfaac9..57a342a721cf9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpServerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpServerLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SftpServerLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * A linked service for an SSH File Transfer Protocol (SFTP) server. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SftpServerLinkedService.class, + visible = true) @JsonTypeName("Sftp") @Fluent public final class SftpServerLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Sftp"; + /* * Properties specific to this linked service type. */ @@ -32,6 +44,16 @@ public final class SftpServerLinkedService extends LinkedService { public SftpServerLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this linked service type. * @@ -197,8 +219,8 @@ public SftpServerLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -207,8 +229,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SftpServerLinkedService object itself. @@ -224,8 +246,8 @@ public SftpServerLinkedService withEncryptedCredential(String encryptedCredentia /** * Get the privateKeyPath property: The SSH private key file path for SshPublicKey authentication. Only valid for * on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or - * PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression - * with resultType string). + * PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with + * resultType string). * * @return the privateKeyPath value. */ @@ -236,8 +258,8 @@ public Object privateKeyPath() { /** * Set the privateKeyPath property: The SSH private key file path for SshPublicKey authentication. Only valid for * on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or - * PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression - * with resultType string). + * PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with + * resultType string). * * @param privateKeyPath the privateKeyPath value to set. * @return the SftpServerLinkedService object itself. @@ -359,8 +381,9 @@ public SftpServerLinkedService withHostKeyFingerprint(Object hostKeyFingerprint) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SftpServerLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SftpServerLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpWriteSettings.java index 15482d3a4c1f6..91420d291b2c1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpWriteSettings.java @@ -6,6 +6,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -13,20 +14,25 @@ /** * Sftp write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SftpWriteSettings.class, visible = true) @JsonTypeName("SftpWriteSettings") @Fluent public final class SftpWriteSettings extends StoreWriteSettings { /* - * Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string - * (or Expression with resultType string). + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SftpWriteSettings"; + + /* + * Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). */ @JsonProperty(value = "operationTimeout") private Object operationTimeout; /* - * Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename - * operation. Type: boolean (or Expression with resultType boolean). + * Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "useTempFileRename") private Object useTempFileRename; @@ -37,6 +43,16 @@ public final class SftpWriteSettings extends StoreWriteSettings { public SftpWriteSettings() { } + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the operationTimeout property: Specifies the timeout for writing each chunk to SFTP server. Default value: * 01:00:00 (one hour). Type: string (or Expression with resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListLinkedService.java index 9b64aba64265f..420ed487b908b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SharePointOnlineListLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * SharePoint Online List linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SharePointOnlineListLinkedService.class, + visible = true) @JsonTypeName("SharePointOnlineList") @Fluent public final class SharePointOnlineListLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SharePointOnlineList"; + /* * SharePoint Online List linked service properties. */ @@ -33,6 +45,16 @@ public final class SharePointOnlineListLinkedService extends LinkedService { public SharePointOnlineListLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: SharePoint Online List linked service properties. * @@ -156,8 +178,8 @@ public SharePointOnlineListLinkedService withServicePrincipalId(Object servicePr } /** - * Get the servicePrincipalKey property: The client secret of your application registered in Azure Active - * Directory. Type: string (or Expression with resultType string). + * Get the servicePrincipalKey property: The client secret of your application registered in Azure Active Directory. + * Type: string (or Expression with resultType string). * * @return the servicePrincipalKey value. */ @@ -166,8 +188,8 @@ public SecretBase servicePrincipalKey() { } /** - * Set the servicePrincipalKey property: The client secret of your application registered in Azure Active - * Directory. Type: string (or Expression with resultType string). + * Set the servicePrincipalKey property: The client secret of your application registered in Azure Active Directory. + * Type: string (or Expression with resultType string). * * @param servicePrincipalKey the servicePrincipalKey value to set. * @return the SharePointOnlineListLinkedService object itself. @@ -181,8 +203,8 @@ public SharePointOnlineListLinkedService withServicePrincipalKey(SecretBase serv } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -191,8 +213,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SharePointOnlineListLinkedService object itself. @@ -214,8 +236,9 @@ public SharePointOnlineListLinkedService withEncryptedCredential(String encrypte public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SharePointOnlineListLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SharePointOnlineListLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListResourceDataset.java index 95fb843dc9791..c103ff1efdad1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListResourceDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.SharePointOnlineListDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * The sharepoint online list resource dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SharePointOnlineListResourceDataset.class, + visible = true) @JsonTypeName("SharePointOnlineListResource") @Fluent public final class SharePointOnlineListResourceDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SharePointOnlineListResource"; + /* * Sharepoint online list dataset properties. */ @@ -31,6 +43,16 @@ public final class SharePointOnlineListResourceDataset extends Dataset { public SharePointOnlineListResourceDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Sharepoint online list dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListSource.java index e7cf75cf04131..1a34ed495b80f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListSource.java @@ -6,26 +6,36 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for sharePoint online list source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SharePointOnlineListSource.class, + visible = true) @JsonTypeName("SharePointOnlineListSource") @Fluent public final class SharePointOnlineListSource extends CopySource { /* - * The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression - * with resultType string). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SharePointOnlineListSource"; + + /* + * The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). */ @JsonProperty(value = "query") private Object query; /* - * The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or - * Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; @@ -36,6 +46,16 @@ public final class SharePointOnlineListSource extends CopySource { public SharePointOnlineListSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: The OData query to filter the data in SharePoint Online list. For example, "$top=1". * Type: string (or Expression with resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyLinkedService.java index 3e602d92b6e1c..f6eff44d1c06a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ShopifyLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Shopify Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ShopifyLinkedService.class, visible = true) @JsonTypeName("Shopify") @Fluent public final class ShopifyLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Shopify"; + /* * Shopify Service linked service properties. */ @@ -32,6 +40,16 @@ public final class ShopifyLinkedService extends LinkedService { public ShopifyLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Shopify Service linked service properties. * @@ -176,8 +194,8 @@ public ShopifyLinkedService withUseHostVerification(Object useHostVerification) } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -186,8 +204,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the ShopifyLinkedService object itself. @@ -201,8 +219,8 @@ public ShopifyLinkedService withUsePeerVerification(Object usePeerVerification) } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -211,8 +229,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ShopifyLinkedService object itself. @@ -234,8 +252,9 @@ public ShopifyLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ShopifyLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ShopifyLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyObjectDataset.java index a78d81d5a9276..efb2611d16ae1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Shopify Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ShopifyObjectDataset.class, visible = true) @JsonTypeName("ShopifyObject") @Fluent public final class ShopifyObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ShopifyObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class ShopifyObjectDataset extends Dataset { public ShopifyObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifySource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifySource.java index b3ef0dc6239bc..99c45c609ab2b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifySource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifySource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Shopify Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ShopifySource.class, visible = true) @JsonTypeName("ShopifySource") @Fluent public final class ShopifySource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ShopifySource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class ShopifySource extends TabularSource { public ShopifySource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SkipErrorFile.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SkipErrorFile.java index d3292134b6010..3f0a9c8beedb3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SkipErrorFile.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SkipErrorFile.java @@ -13,15 +13,13 @@ @Fluent public final class SkipErrorFile { /* - * Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with - * resultType boolean). + * Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "fileMissing") private Object fileMissing; /* - * Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with - * resultType boolean). + * Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "dataInconsistency") private Object dataInconsistency; @@ -33,8 +31,8 @@ public SkipErrorFile() { } /** - * Get the fileMissing property: Skip if file is deleted by other client during copy. Default is true. Type: - * boolean (or Expression with resultType boolean). + * Get the fileMissing property: Skip if file is deleted by other client during copy. Default is true. Type: boolean + * (or Expression with resultType boolean). * * @return the fileMissing value. */ @@ -43,8 +41,8 @@ public Object fileMissing() { } /** - * Set the fileMissing property: Skip if file is deleted by other client during copy. Default is true. Type: - * boolean (or Expression with resultType boolean). + * Set the fileMissing property: Skip if file is deleted by other client during copy. Default is true. Type: boolean + * (or Expression with resultType boolean). * * @param fileMissing the fileMissing value to set. * @return the SkipErrorFile object itself. @@ -55,8 +53,8 @@ public SkipErrorFile withFileMissing(Object fileMissing) { } /** - * Get the dataInconsistency property: Skip if source/sink file changed by other concurrent write. Default is - * false. Type: boolean (or Expression with resultType boolean). + * Get the dataInconsistency property: Skip if source/sink file changed by other concurrent write. Default is false. + * Type: boolean (or Expression with resultType boolean). * * @return the dataInconsistency value. */ @@ -65,8 +63,8 @@ public Object dataInconsistency() { } /** - * Set the dataInconsistency property: Skip if source/sink file changed by other concurrent write. Default is - * false. Type: boolean (or Expression with resultType boolean). + * Set the dataInconsistency property: Skip if source/sink file changed by other concurrent write. Default is false. + * Type: boolean (or Expression with resultType boolean). * * @param dataInconsistency the dataInconsistency value to set. * @return the SkipErrorFile object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SmartsheetLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SmartsheetLinkedService.java index cbb5c1063c4b5..91dace1e278ff 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SmartsheetLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SmartsheetLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SmartsheetLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Linked service for Smartsheet. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SmartsheetLinkedService.class, + visible = true) @JsonTypeName("Smartsheet") @Fluent public final class SmartsheetLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Smartsheet"; + /* * Smartsheet linked service properties. */ @@ -32,6 +44,16 @@ public final class SmartsheetLinkedService extends LinkedService { public SmartsheetLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Smartsheet linked service properties. * @@ -101,8 +123,8 @@ public SmartsheetLinkedService withApiToken(SecretBase apiToken) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -111,8 +133,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SmartsheetLinkedService object itself. @@ -134,8 +156,9 @@ public SmartsheetLinkedService withEncryptedCredential(String encryptedCredentia public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SmartsheetLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SmartsheetLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeDataset.java index 36e2ab19b3a54..d848b7e19d44e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SnowflakeDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * The snowflake dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeDataset.class, visible = true) @JsonTypeName("SnowflakeTable") @Fluent public final class SnowflakeDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SnowflakeTable"; + /* * Snowflake dataset properties. */ @@ -32,6 +40,16 @@ public final class SnowflakeDataset extends Dataset { public SnowflakeDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Snowflake dataset properties. * @@ -163,8 +181,9 @@ public SnowflakeDataset withTable(Object table) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SnowflakeDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SnowflakeDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeExportCopyCommand.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeExportCopyCommand.java index 223b3051f9d05..db88e43b1ebb9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeExportCopyCommand.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeExportCopyCommand.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.Map; @@ -14,23 +15,30 @@ /** * Snowflake export command settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SnowflakeExportCopyCommand.class, + visible = true) @JsonTypeName("SnowflakeExportCopyCommand") @Fluent public final class SnowflakeExportCopyCommand extends ExportSettings { /* - * Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string - * type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", - * "TIME_FORMAT": "'HH24:MI:SS.FF'" } + * The export setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SnowflakeExportCopyCommand"; + + /* + * Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" } */ @JsonProperty(value = "additionalCopyOptions") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map additionalCopyOptions; /* - * Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be - * string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", - * "MAX_FILE_SIZE": "'FALSE'" } + * Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" } */ @JsonProperty(value = "additionalFormatOptions") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) @@ -42,6 +50,16 @@ public final class SnowflakeExportCopyCommand extends ExportSettings { public SnowflakeExportCopyCommand() { } + /** + * Get the type property: The export setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the additionalCopyOptions property: Additional copy options directly passed to snowflake Copy Command. Type: * key value pairs (value should be string type) (or Expression with resultType object). Example: diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeImportCopyCommand.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeImportCopyCommand.java index adb5518fdd791..fb03877faf08c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeImportCopyCommand.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeImportCopyCommand.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.Map; @@ -14,23 +15,30 @@ /** * Snowflake import command settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SnowflakeImportCopyCommand.class, + visible = true) @JsonTypeName("SnowflakeImportCopyCommand") @Fluent public final class SnowflakeImportCopyCommand extends ImportSettings { /* - * Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string - * type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", - * "TIME_FORMAT": "'HH24:MI:SS.FF'" } + * The import setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SnowflakeImportCopyCommand"; + + /* + * Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" } */ @JsonProperty(value = "additionalCopyOptions") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map additionalCopyOptions; /* - * Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be - * string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", - * "LOAD_UNCERTAIN_FILES": "'FALSE'" } + * Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" } */ @JsonProperty(value = "additionalFormatOptions") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) @@ -42,6 +50,16 @@ public final class SnowflakeImportCopyCommand extends ImportSettings { public SnowflakeImportCopyCommand() { } + /** + * Get the type property: The import setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the additionalCopyOptions property: Additional copy options directly passed to snowflake Copy Command. Type: * key value pairs (value should be string type) (or Expression with resultType object). Example: diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeLinkedService.java index f4197c6700667..c82cf68e032ba 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SnowflakeLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Snowflake linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeLinkedService.class, visible = true) @JsonTypeName("Snowflake") @Fluent public final class SnowflakeLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Snowflake"; + /* * Snowflake linked service properties. */ @@ -32,6 +40,16 @@ public final class SnowflakeLinkedService extends LinkedService { public SnowflakeLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Snowflake linked service properties. * @@ -124,8 +142,8 @@ public SnowflakeLinkedService withPassword(AzureKeyVaultSecretReference password } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -134,8 +152,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SnowflakeLinkedService object itself. @@ -157,8 +175,9 @@ public SnowflakeLinkedService withEncryptedCredential(String encryptedCredential public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SnowflakeLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SnowflakeLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSink.java index 8a2a70589731f..317d88dd5cb65 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity snowflake sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeSink.class, visible = true) @JsonTypeName("SnowflakeSink") @Fluent public final class SnowflakeSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SnowflakeSink"; + /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ @@ -34,6 +42,16 @@ public final class SnowflakeSink extends CopySink { public SnowflakeSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preCopyScript property: SQL pre-copy script. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSource.java index dca7a5eebdabf..067540e0defb1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSource.java @@ -7,16 +7,24 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity snowflake source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeSource.class, visible = true) @JsonTypeName("SnowflakeSource") @Fluent public final class SnowflakeSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SnowflakeSource"; + /* * Snowflake Sql query. Type: string (or Expression with resultType string). */ @@ -35,6 +43,16 @@ public final class SnowflakeSource extends CopySource { public SnowflakeSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Snowflake Sql query. Type: string (or Expression with resultType string). * @@ -120,8 +138,8 @@ public SnowflakeSource withDisableMetricsCollection(Object disableMetricsCollect public void validate() { super.validate(); if (exportSettings() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property exportSettings in model SnowflakeSource")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property exportSettings in model SnowflakeSource")); } else { exportSettings().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Dataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Dataset.java index 98e825baf6aa6..8f1982f510b0f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Dataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Dataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SnowflakeDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * The snowflake dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeV2Dataset.class, visible = true) @JsonTypeName("SnowflakeV2Table") @Fluent public final class SnowflakeV2Dataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SnowflakeV2Table"; + /* * Snowflake dataset properties. */ @@ -32,6 +40,16 @@ public final class SnowflakeV2Dataset extends Dataset { public SnowflakeV2Dataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Snowflake dataset properties. * @@ -163,8 +181,9 @@ public SnowflakeV2Dataset withTable(Object table) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SnowflakeV2Dataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SnowflakeV2Dataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2LinkedService.java index 1d265a7806844..32f34fa22eeea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2LinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SnowflakeLinkedV2ServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Snowflake linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SnowflakeV2LinkedService.class, + visible = true) @JsonTypeName("SnowflakeV2") @Fluent public final class SnowflakeV2LinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SnowflakeV2"; + /* * Snowflake linked service properties. */ @@ -32,6 +44,16 @@ public final class SnowflakeV2LinkedService extends LinkedService { public SnowflakeV2LinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Snowflake linked service properties. * @@ -293,8 +315,8 @@ public SnowflakeV2LinkedService withTenantId(Object tenantId) { } /** - * Get the scope property: The scope of the application registered in Azure Active Directory for - * AADServicePrincipal authentication. + * Get the scope property: The scope of the application registered in Azure Active Directory for AADServicePrincipal + * authentication. * * @return the scope value. */ @@ -303,8 +325,8 @@ public Object scope() { } /** - * Set the scope property: The scope of the application registered in Azure Active Directory for - * AADServicePrincipal authentication. + * Set the scope property: The scope of the application registered in Azure Active Directory for AADServicePrincipal + * authentication. * * @param scope the scope value to set. * @return the SnowflakeV2LinkedService object itself. @@ -366,8 +388,8 @@ public SnowflakeV2LinkedService withPrivateKeyPassphrase(SecretBase privateKeyPa } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -376,8 +398,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SnowflakeV2LinkedService object itself. @@ -399,8 +421,9 @@ public SnowflakeV2LinkedService withEncryptedCredential(String encryptedCredenti public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SnowflakeV2LinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SnowflakeV2LinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Sink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Sink.java index d6ff885563f2f..f879e9d7d47fb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Sink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Sink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity snowflake sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeV2Sink.class, visible = true) @JsonTypeName("SnowflakeV2Sink") @Fluent public final class SnowflakeV2Sink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SnowflakeV2Sink"; + /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ @@ -34,6 +42,16 @@ public final class SnowflakeV2Sink extends CopySink { public SnowflakeV2Sink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preCopyScript property: SQL pre-copy script. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Source.java index 23d0a3d1d748c..1139a32014c41 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Source.java @@ -7,16 +7,24 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity snowflake source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeV2Source.class, visible = true) @JsonTypeName("SnowflakeV2Source") @Fluent public final class SnowflakeV2Source extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SnowflakeV2Source"; + /* * Snowflake Sql query. Type: string (or Expression with resultType string). */ @@ -35,6 +43,16 @@ public final class SnowflakeV2Source extends CopySource { public SnowflakeV2Source() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Snowflake Sql query. Type: string (or Expression with resultType string). * @@ -120,8 +138,9 @@ public SnowflakeV2Source withDisableMetricsCollection(Object disableMetricsColle public void validate() { super.validate(); if (exportSettings() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property exportSettings in model SnowflakeV2Source")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property exportSettings in model SnowflakeV2Source")); } else { exportSettings().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationParametrizationReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationParametrizationReference.java index 691e165b444f6..dd4bc92ff0e70 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationParametrizationReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationParametrizationReference.java @@ -80,12 +80,14 @@ public SparkConfigurationParametrizationReference withReferenceName(Object refer */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property type in model SparkConfigurationParametrizationReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property type in model SparkConfigurationParametrizationReference")); } if (referenceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property referenceName in model SparkConfigurationParametrizationReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property referenceName in model SparkConfigurationParametrizationReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkLinkedService.java index 61eb975f4cd2d..b8838b9a7dff6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SparkLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Spark Server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SparkLinkedService.class, visible = true) @JsonTypeName("Spark") @Fluent public final class SparkLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Spark"; + /* * Spark Server linked service properties. */ @@ -32,6 +40,16 @@ public final class SparkLinkedService extends LinkedService { public SparkLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Spark Server linked service properties. * @@ -287,9 +305,9 @@ public SparkLinkedService withEnableSsl(Object enableSsl) { } /** - * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Get the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @return the trustedCertPath value. */ @@ -298,9 +316,9 @@ public Object trustedCertPath() { } /** - * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for - * verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. - * The default value is the cacerts.pem file installed with the IR. + * Set the trustedCertPath property: The full path of the .pem file containing trusted CA certificates for verifying + * the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default + * value is the cacerts.pem file installed with the IR. * * @param trustedCertPath the trustedCertPath value to set. * @return the SparkLinkedService object itself. @@ -389,8 +407,8 @@ public SparkLinkedService withAllowSelfSignedServerCert(Object allowSelfSignedSe } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -399,8 +417,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SparkLinkedService object itself. @@ -422,8 +440,9 @@ public SparkLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SparkLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SparkLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkObjectDataset.java index 643ecc32fcacf..860bb4031aa8a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.SparkDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Spark Server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SparkObjectDataset.class, visible = true) @JsonTypeName("SparkObject") @Fluent public final class SparkObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SparkObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class SparkObjectDataset extends Dataset { public SparkObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkSource.java index 79314a24ba0f9..6a64833daf852 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Spark Server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SparkSource.class, visible = true) @JsonTypeName("SparkSource") @Fluent public final class SparkSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SparkSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class SparkSource extends TabularSource { public SparkSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlAlwaysEncryptedProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlAlwaysEncryptedProperties.java index 00940382c8843..fd5654f1f8b10 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlAlwaysEncryptedProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlAlwaysEncryptedProperties.java @@ -20,8 +20,7 @@ public final class SqlAlwaysEncryptedProperties { private SqlAlwaysEncryptedAkvAuthType alwaysEncryptedAkvAuthType; /* - * The client ID of the application in Azure Active Directory used for Azure Key Vault authentication. Type: string - * (or Expression with resultType string). + * The client ID of the application in Azure Active Directory used for Azure Key Vault authentication. Type: string (or Expression with resultType string). */ @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; @@ -136,8 +135,9 @@ public SqlAlwaysEncryptedProperties withCredential(CredentialReference credentia */ public void validate() { if (alwaysEncryptedAkvAuthType() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property alwaysEncryptedAkvAuthType in model SqlAlwaysEncryptedProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property alwaysEncryptedAkvAuthType in model SqlAlwaysEncryptedProperties")); } if (servicePrincipalKey() != null) { servicePrincipalKey().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSink.java index 21801816c9093..e977e3c30c4de 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity SQL Data Warehouse sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlDWSink.class, visible = true) @JsonTypeName("SqlDWSink") @Fluent public final class SqlDWSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SqlDWSink"; + /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class SqlDWSink extends CopySink { private Object preCopyScript; /* - * Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression - * with resultType boolean). + * Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "allowPolyBase") private Object allowPolyBase; @@ -36,8 +43,7 @@ public final class SqlDWSink extends CopySink { private PolybaseSettings polyBaseSettings; /* - * Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType - * boolean). + * Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "allowCopyCommand") private Object allowCopyCommand; @@ -49,8 +55,7 @@ public final class SqlDWSink extends CopySink { private DWCopyCommandSettings copyCommandSettings; /* - * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string - * (or Expression with resultType string). + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tableOption") private Object tableOption; @@ -62,8 +67,7 @@ public final class SqlDWSink extends CopySink { private Object sqlWriterUseTableLock; /* - * Write behavior when copying data into azure SQL DW. Type: SqlDWWriteBehaviorEnum (or Expression with resultType - * SqlDWWriteBehaviorEnum) + * Write behavior when copying data into azure SQL DW. Type: SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum) */ @JsonProperty(value = "writeBehavior") private Object writeBehavior; @@ -80,6 +84,16 @@ public final class SqlDWSink extends CopySink { public SqlDWSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preCopyScript property: SQL pre-copy script. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSource.java index 8f5750adeb194..2223c307dc0af 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity SQL Data Warehouse source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlDWSource.class, visible = true) @JsonTypeName("SqlDWSource") @Fluent public final class SqlDWSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SqlDWSource"; + /* * SQL Data Warehouse reader query. Type: string (or Expression with resultType string). */ @@ -23,30 +31,25 @@ public final class SqlDWSource extends TabularSource { private Object sqlReaderQuery; /* - * Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as - * SqlReaderQuery. Type: string (or Expression with resultType string). + * Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; /* - * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - * Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. */ @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* - * Specifies the transaction locking behavior for the SQL source. Allowed values: - * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: - * string (or Expression with resultType string). + * Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). */ @JsonProperty(value = "isolationLevel") private Object isolationLevel; /* - * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", - * "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). + * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -63,6 +66,16 @@ public final class SqlDWSource extends TabularSource { public SqlDWSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the sqlReaderQuery property: SQL Data Warehouse reader query. Type: string (or Expression with resultType * string). @@ -86,8 +99,8 @@ public SqlDWSource withSqlReaderQuery(Object sqlReaderQuery) { } /** - * Get the sqlReaderStoredProcedureName property: Name of the stored procedure for a SQL Data Warehouse source. - * This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). + * Get the sqlReaderStoredProcedureName property: Name of the stored procedure for a SQL Data Warehouse source. This + * cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). * * @return the sqlReaderStoredProcedureName value. */ @@ -96,8 +109,8 @@ public Object sqlReaderStoredProcedureName() { } /** - * Set the sqlReaderStoredProcedureName property: Name of the stored procedure for a SQL Data Warehouse source. - * This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). + * Set the sqlReaderStoredProcedureName property: Name of the stored procedure for a SQL Data Warehouse source. This + * cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). * * @param sqlReaderStoredProcedureName the sqlReaderStoredProcedureName value to set. * @return the SqlDWSource object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWUpsertSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWUpsertSettings.java index a7b754042dbcb..b74467bfd0dd7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWUpsertSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWUpsertSettings.java @@ -19,8 +19,7 @@ public final class SqlDWUpsertSettings { private Object interimSchemaName; /* - * Key column names for unique row identification. Type: array of strings (or Expression with resultType array of - * strings). + * Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). */ @JsonProperty(value = "keys") private Object keys; @@ -54,8 +53,8 @@ public SqlDWUpsertSettings withInterimSchemaName(Object interimSchemaName) { } /** - * Get the keys property: Key column names for unique row identification. Type: array of strings (or Expression - * with resultType array of strings). + * Get the keys property: Key column names for unique row identification. Type: array of strings (or Expression with + * resultType array of strings). * * @return the keys value. */ @@ -64,8 +63,8 @@ public Object keys() { } /** - * Set the keys property: Key column names for unique row identification. Type: array of strings (or Expression - * with resultType array of strings). + * Set the keys property: Key column names for unique row identification. Type: array of strings (or Expression with + * resultType array of strings). * * @param keys the keys value to set. * @return the SqlDWUpsertSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISink.java index ff1ae12653459..bad22325d22d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure SQL Managed Instance sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlMISink.class, visible = true) @JsonTypeName("SqlMISink") @Fluent public final class SqlMISink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SqlMISink"; + /* * SQL writer stored procedure name. Type: string (or Expression with resultType string). */ @@ -47,8 +55,7 @@ public final class SqlMISink extends CopySink { private Object storedProcedureTableTypeParameterName; /* - * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string - * (or Expression with resultType string). + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tableOption") private Object tableOption; @@ -78,8 +85,18 @@ public SqlMISink() { } /** - * Get the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression - * with resultType string). + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression with + * resultType string). * * @return the sqlWriterStoredProcedureName value. */ @@ -88,8 +105,8 @@ public Object sqlWriterStoredProcedureName() { } /** - * Set the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression - * with resultType string). + * Set the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression with + * resultType string). * * @param sqlWriterStoredProcedureName the sqlWriterStoredProcedureName value to set. * @return the SqlMISink object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISource.java index 66309a643289f..f843319f350ed 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Azure SQL Managed Instance source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlMISource.class, visible = true) @JsonTypeName("SqlMISource") @Fluent public final class SqlMISource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SqlMISource"; + /* * SQL reader query. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class SqlMISource extends TabularSource { private Object sqlReaderQuery; /* - * Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as - * SqlReaderQuery. Type: string (or Expression with resultType string). + * Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; @@ -36,9 +43,7 @@ public final class SqlMISource extends TabularSource { private Object storedProcedureParameters; /* - * Specifies the transaction locking behavior for the SQL source. Allowed values: - * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: - * string (or Expression with resultType string). + * Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). */ @JsonProperty(value = "isolationLevel") private Object isolationLevel; @@ -50,8 +55,7 @@ public final class SqlMISource extends TabularSource { private Object produceAdditionalTypes; /* - * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", - * "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). + * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -68,6 +72,16 @@ public final class SqlMISource extends TabularSource { public SqlMISource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the sqlReaderQuery property: SQL reader query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlPartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlPartitionSettings.java index 5047336c5a31f..9e67707ebb964 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlPartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlPartitionSettings.java @@ -13,25 +13,19 @@ @Fluent public final class SqlPartitionSettings { /* - * The name of the column in integer or datetime type that will be used for proceeding partitioning. If not - * specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or - * Expression with resultType string). + * The name of the column in integer or datetime type that will be used for proceeding partitioning. If not specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /* - * The maximum value of the partition column for partition range splitting. This value is used to decide the - * partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned - * and copied. Type: string (or Expression with resultType string). + * The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionUpperBound") private Object partitionUpperBound; /* - * The minimum value of the partition column for partition range splitting. This value is used to decide the - * partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned - * and copied. Type: string (or Expression with resultType string). + * The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionLowerBound") private Object partitionLowerBound; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerLinkedService.java index 807ac86594c18..879ac3bb4ebdb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SqlServerLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * SQL Server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlServerLinkedService.class, visible = true) @JsonTypeName("SqlServer") @Fluent public final class SqlServerLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SqlServer"; + /* * SQL Server linked service properties. */ @@ -32,6 +40,16 @@ public final class SqlServerLinkedService extends LinkedService { public SqlServerLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: SQL Server linked service properties. * @@ -151,8 +169,8 @@ public SqlServerLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -161,8 +179,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SqlServerLinkedService object itself. @@ -207,8 +225,9 @@ public SqlServerLinkedService withAlwaysEncryptedSettings(SqlAlwaysEncryptedProp public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SqlServerLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SqlServerLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSink.java index d4e9ad6e22605..4e66a556031aa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity SQL server sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlServerSink.class, visible = true) @JsonTypeName("SqlServerSink") @Fluent public final class SqlServerSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SqlServerSink"; + /* * SQL writer stored procedure name. Type: string (or Expression with resultType string). */ @@ -47,8 +55,7 @@ public final class SqlServerSink extends CopySink { private Object storedProcedureTableTypeParameterName; /* - * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string - * (or Expression with resultType string). + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tableOption") private Object tableOption; @@ -78,8 +85,18 @@ public SqlServerSink() { } /** - * Get the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression - * with resultType string). + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression with + * resultType string). * * @return the sqlWriterStoredProcedureName value. */ @@ -88,8 +105,8 @@ public Object sqlWriterStoredProcedureName() { } /** - * Set the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression - * with resultType string). + * Set the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression with + * resultType string). * * @param sqlWriterStoredProcedureName the sqlWriterStoredProcedureName value to set. * @return the SqlServerSink object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSource.java index 6850fd69d71ce..52f58c7a94911 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity SQL server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlServerSource.class, visible = true) @JsonTypeName("SqlServerSource") @Fluent public final class SqlServerSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SqlServerSource"; + /* * SQL reader query. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class SqlServerSource extends TabularSource { private Object sqlReaderQuery; /* - * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. - * Type: string (or Expression with resultType string). + * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; @@ -36,9 +43,7 @@ public final class SqlServerSource extends TabularSource { private Object storedProcedureParameters; /* - * Specifies the transaction locking behavior for the SQL source. Allowed values: - * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: - * string (or Expression with resultType string). + * Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). */ @JsonProperty(value = "isolationLevel") private Object isolationLevel; @@ -50,8 +55,7 @@ public final class SqlServerSource extends TabularSource { private Object produceAdditionalTypes; /* - * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", - * "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). + * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -68,6 +72,16 @@ public final class SqlServerSource extends TabularSource { public SqlServerSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the sqlReaderQuery property: SQL reader query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerStoredProcedureActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerStoredProcedureActivity.java index c7d8123560018..0f28fe4cbb014 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerStoredProcedureActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerStoredProcedureActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SqlServerStoredProcedureActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,21 @@ /** * SQL stored procedure activity type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SqlServerStoredProcedureActivity.class, + visible = true) @JsonTypeName("SqlServerStoredProcedure") @Fluent public final class SqlServerStoredProcedureActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SqlServerStoredProcedure"; + /* * SQL stored procedure activity properties. */ @@ -32,6 +44,16 @@ public final class SqlServerStoredProcedureActivity extends ExecutionActivity { public SqlServerStoredProcedureActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: SQL stored procedure activity properties. * @@ -114,8 +136,7 @@ public SqlServerStoredProcedureActivity withUserProperties(List us } /** - * Get the storedProcedureName property: Stored procedure name. Type: string (or Expression with resultType - * string). + * Get the storedProcedureName property: Stored procedure name. Type: string (or Expression with resultType string). * * @return the storedProcedureName value. */ @@ -124,8 +145,7 @@ public Object storedProcedureName() { } /** - * Set the storedProcedureName property: Stored procedure name. Type: string (or Expression with resultType - * string). + * Set the storedProcedureName property: Stored procedure name. Type: string (or Expression with resultType string). * * @param storedProcedureName the storedProcedureName value to set. * @return the SqlServerStoredProcedureActivity object itself. @@ -172,8 +192,9 @@ public SqlServerStoredProcedureActivity withStoredProcedureParameters(Object sto public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SqlServerStoredProcedureActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SqlServerStoredProcedureActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerTableDataset.java index 8593f817fc850..393faeaf8b87a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.SqlServerTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The on-premises SQL Server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlServerTableDataset.class, visible = true) @JsonTypeName("SqlServerTable") @Fluent public final class SqlServerTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SqlServerTable"; + /* * On-premises SQL Server dataset properties. */ @@ -31,6 +39,16 @@ public final class SqlServerTableDataset extends Dataset { public SqlServerTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: On-premises SQL Server dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSink.java index b18b0431f5343..7e4c73c1ed820 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity SQL sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlSink.class, visible = true) @JsonTypeName("SqlSink") @Fluent public final class SqlSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SqlSink"; + /* * SQL writer stored procedure name. Type: string (or Expression with resultType string). */ @@ -47,8 +55,7 @@ public final class SqlSink extends CopySink { private Object storedProcedureTableTypeParameterName; /* - * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string - * (or Expression with resultType string). + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tableOption") private Object tableOption; @@ -78,8 +85,18 @@ public SqlSink() { } /** - * Get the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression - * with resultType string). + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression with + * resultType string). * * @return the sqlWriterStoredProcedureName value. */ @@ -88,8 +105,8 @@ public Object sqlWriterStoredProcedureName() { } /** - * Set the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression - * with resultType string). + * Set the sqlWriterStoredProcedureName property: SQL writer stored procedure name. Type: string (or Expression with + * resultType string). * * @param sqlWriterStoredProcedureName the sqlWriterStoredProcedureName value to set. * @return the SqlSink object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSource.java index 748903d1ff37c..0a8a8f438833e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity SQL source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlSource.class, visible = true) @JsonTypeName("SqlSource") @Fluent public final class SqlSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SqlSource"; + /* * SQL reader query. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class SqlSource extends TabularSource { private Object sqlReaderQuery; /* - * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. - * Type: string (or Expression with resultType string). + * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; @@ -36,16 +43,13 @@ public final class SqlSource extends TabularSource { private Object storedProcedureParameters; /* - * Specifies the transaction locking behavior for the SQL source. Allowed values: - * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: - * string (or Expression with resultType string). + * Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). */ @JsonProperty(value = "isolationLevel") private Object isolationLevel; /* - * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", - * "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). + * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -62,6 +66,16 @@ public final class SqlSource extends TabularSource { public SqlSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the sqlReaderQuery property: SQL reader query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlUpsertSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlUpsertSettings.java index fc1de6baedd9f..c91e3ddcf5785 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlUpsertSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlUpsertSettings.java @@ -13,8 +13,7 @@ @Fluent public final class SqlUpsertSettings { /* - * Specifies whether to use temp db for upsert interim table. Type: boolean (or Expression with resultType - * boolean). + * Specifies whether to use temp db for upsert interim table. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "useTempDB") private Object useTempDB; @@ -26,8 +25,7 @@ public final class SqlUpsertSettings { private Object interimSchemaName; /* - * Key column names for unique row identification. Type: array of strings (or Expression with resultType array of - * strings). + * Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). */ @JsonProperty(value = "keys") private Object keys; @@ -83,8 +81,8 @@ public SqlUpsertSettings withInterimSchemaName(Object interimSchemaName) { } /** - * Get the keys property: Key column names for unique row identification. Type: array of strings (or Expression - * with resultType array of strings). + * Get the keys property: Key column names for unique row identification. Type: array of strings (or Expression with + * resultType array of strings). * * @return the keys value. */ @@ -93,8 +91,8 @@ public Object keys() { } /** - * Set the keys property: Key column names for unique row identification. Type: array of strings (or Expression - * with resultType array of strings). + * Set the keys property: Key column names for unique row identification. Type: array of strings (or Expression with + * resultType array of strings). * * @param keys the keys value to set. * @return the SqlUpsertSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareLinkedService.java index 37426cf53b043..b34f1af7d3002 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SquareLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Square Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SquareLinkedService.class, visible = true) @JsonTypeName("Square") @Fluent public final class SquareLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Square"; + /* * Square Service linked service properties. */ @@ -32,6 +40,16 @@ public final class SquareLinkedService extends LinkedService { public SquareLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Square Service linked service properties. * @@ -247,8 +265,8 @@ public SquareLinkedService withUseHostVerification(Object useHostVerification) { } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -257,8 +275,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the SquareLinkedService object itself. @@ -272,8 +290,8 @@ public SquareLinkedService withUsePeerVerification(Object usePeerVerification) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -282,8 +300,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SquareLinkedService object itself. @@ -305,8 +323,9 @@ public SquareLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SquareLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SquareLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareObjectDataset.java index 097472e3caa56..46d8e9b0711b0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Square Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SquareObjectDataset.class, visible = true) @JsonTypeName("SquareObject") @Fluent public final class SquareObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SquareObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class SquareObjectDataset extends Dataset { public SquareObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareSource.java index 8d076434c0349..4662f5f3f61b8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Square Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SquareSource.class, visible = true) @JsonTypeName("SquareSource") @Fluent public final class SquareSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SquareSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class SquareSource extends TabularSource { public SquareSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisAccessCredential.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisAccessCredential.java index 96b4ba12ec334..b8b1a1f6b14a4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisAccessCredential.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisAccessCredential.java @@ -106,16 +106,16 @@ public SsisAccessCredential withPassword(SecretBase password) { */ public void validate() { if (domain() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property domain in model SsisAccessCredential")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property domain in model SsisAccessCredential")); } if (username() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property username in model SsisAccessCredential")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property username in model SsisAccessCredential")); } if (password() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property password in model SsisAccessCredential")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property password in model SsisAccessCredential")); } else { password().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisChildPackage.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisChildPackage.java index b23122704a7e1..6b6ccecf8a44a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisChildPackage.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisChildPackage.java @@ -134,12 +134,13 @@ public SsisChildPackage withPackageLastModifiedDate(String packageLastModifiedDa */ public void validate() { if (packagePath() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property packagePath in model SsisChildPackage")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property packagePath in model SsisChildPackage")); } if (packageContent() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property packageContent in model SsisChildPackage")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property packageContent in model SsisChildPackage")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisEnvironment.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisEnvironment.java index cc94aee8d1843..95ab4daeba050 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisEnvironment.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisEnvironment.java @@ -6,6 +6,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -13,10 +14,17 @@ /** * Ssis environment. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SsisEnvironment.class, visible = true) @JsonTypeName("Environment") @Fluent public final class SsisEnvironment extends SsisObjectMetadata { + /* + * Type of metadata. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private SsisObjectMetadataType type = SsisObjectMetadataType.ENVIRONMENT; + /* * Folder id which contains environment. */ @@ -35,6 +43,16 @@ public final class SsisEnvironment extends SsisObjectMetadata { public SsisEnvironment() { } + /** + * Get the type property: Type of metadata. + * + * @return the type value. + */ + @Override + public SsisObjectMetadataType type() { + return this.type; + } + /** * Get the folderId property: Folder id which contains environment. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionCredential.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionCredential.java index f717d2e6a5cac..90ff35911f6b7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionCredential.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionCredential.java @@ -106,16 +106,18 @@ public SsisExecutionCredential withPassword(SecureString password) { */ public void validate() { if (domain() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property domain in model SsisExecutionCredential")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property domain in model SsisExecutionCredential")); } if (username() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property username in model SsisExecutionCredential")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property username in model SsisExecutionCredential")); } if (password() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property password in model SsisExecutionCredential")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property password in model SsisExecutionCredential")); } else { password().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionParameter.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionParameter.java index a579bf5e5cb78..39f799c5fadfe 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionParameter.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionParameter.java @@ -54,8 +54,8 @@ public SsisExecutionParameter withValue(Object value) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model SsisExecutionParameter")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property value in model SsisExecutionParameter")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisFolder.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisFolder.java index e136845962268..931d6cf457a22 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisFolder.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisFolder.java @@ -5,22 +5,41 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Ssis folder. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SsisFolder.class, visible = true) @JsonTypeName("Folder") @Fluent public final class SsisFolder extends SsisObjectMetadata { + /* + * Type of metadata. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private SsisObjectMetadataType type = SsisObjectMetadataType.FOLDER; + /** * Creates an instance of SsisFolder class. */ public SsisFolder() { } + /** + * Get the type property: Type of metadata. + * + * @return the type value. + */ + @Override + public SsisObjectMetadataType type() { + return this.type; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisLogLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisLogLocation.java index a83715767b5b6..6db88c9d12742 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisLogLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisLogLocation.java @@ -146,16 +146,17 @@ public SsisLogLocation withLogRefreshInterval(Object logRefreshInterval) { */ public void validate() { if (logPath() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property logPath in model SsisLogLocation")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property logPath in model SsisLogLocation")); } if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model SsisLogLocation")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property type in model SsisLogLocation")); } if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model SsisLogLocation")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SsisLogLocation")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisObjectMetadata.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisObjectMetadata.java index 0158417990a84..015c3e8f71e48 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisObjectMetadata.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisObjectMetadata.java @@ -7,17 +7,14 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * SSIS object metadata. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = SsisObjectMetadata.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SsisObjectMetadata.class, visible = true) @JsonTypeName("SsisObjectMetadata") @JsonSubTypes({ @JsonSubTypes.Type(name = "Folder", value = SsisFolder.class), @@ -26,6 +23,13 @@ @JsonSubTypes.Type(name = "Environment", value = SsisEnvironment.class) }) @Fluent public class SsisObjectMetadata { + /* + * Type of metadata. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private SsisObjectMetadataType type; + /* * Metadata id. */ @@ -48,6 +52,16 @@ public class SsisObjectMetadata { * Creates an instance of SsisObjectMetadata class. */ public SsisObjectMetadata() { + this.type = SsisObjectMetadataType.fromString("SsisObjectMetadata"); + } + + /** + * Get the type property: Type of metadata. + * + * @return the type value. + */ + public SsisObjectMetadataType type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackage.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackage.java index d8907690bc43f..3f6a65a4ea6d2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackage.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackage.java @@ -6,6 +6,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -13,10 +14,17 @@ /** * Ssis Package. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SsisPackage.class, visible = true) @JsonTypeName("Package") @Fluent public final class SsisPackage extends SsisObjectMetadata { + /* + * Type of metadata. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private SsisObjectMetadataType type = SsisObjectMetadataType.PACKAGE; + /* * Folder id which contains package. */ @@ -47,6 +55,16 @@ public final class SsisPackage extends SsisObjectMetadata { public SsisPackage() { } + /** + * Get the type property: Type of metadata. + * + * @return the type value. + */ + @Override + public SsisObjectMetadataType type() { + return this.type; + } + /** * Get the folderId property: Folder id which contains package. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisProject.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisProject.java index b2dd6cadbed5d..c523f6011a80b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisProject.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisProject.java @@ -6,6 +6,7 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -13,10 +14,17 @@ /** * Ssis project. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SsisProject.class, visible = true) @JsonTypeName("Project") @Fluent public final class SsisProject extends SsisObjectMetadata { + /* + * Type of metadata. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private SsisObjectMetadataType type = SsisObjectMetadataType.PROJECT; + /* * Folder id which contains project. */ @@ -47,6 +55,16 @@ public final class SsisProject extends SsisObjectMetadata { public SsisProject() { } + /** + * Get the type property: Type of metadata. + * + * @return the type value. + */ + @Override + public SsisObjectMetadataType type() { + return this.type; + } + /** * Get the folderId property: Folder id which contains project. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPropertyOverride.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPropertyOverride.java index c01a76e10b043..e7bba79d4cf9b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPropertyOverride.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPropertyOverride.java @@ -82,8 +82,8 @@ public SsisPropertyOverride withIsSensitive(Boolean isSensitive) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model SsisPropertyOverride")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property value in model SsisPropertyOverride")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StagingSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StagingSettings.java index 4298f1706fecb..14929635984ea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StagingSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StagingSettings.java @@ -31,8 +31,7 @@ public final class StagingSettings { private Object path; /* - * Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: - * boolean (or Expression with resultType boolean). + * Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "enableCompression") private Object enableCompression; @@ -149,8 +148,9 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { if (linkedServiceName() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property linkedServiceName in model StagingSettings")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property linkedServiceName in model StagingSettings")); } else { linkedServiceName().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreReadSettings.java index 9d3b57c6374ba..b716bf87ce854 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreReadSettings.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -18,11 +19,7 @@ /** * Connector read setting. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = StoreReadSettings.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StoreReadSettings.class, visible = true) @JsonTypeName("StoreReadSettings") @JsonSubTypes({ @JsonSubTypes.Type(name = "AzureBlobStorageReadSettings", value = AzureBlobStorageReadSettings.class), @@ -42,15 +39,20 @@ @Fluent public class StoreReadSettings { /* - * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType - * integer). + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + + /* + * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "maxConcurrentConnections") private Object maxConcurrentConnections; /* - * If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType - * boolean). + * If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "disableMetricsCollection") private Object disableMetricsCollection; @@ -65,6 +67,16 @@ public class StoreReadSettings { * Creates an instance of StoreReadSettings class. */ public StoreReadSettings() { + this.type = "StoreReadSettings"; + } + + /** + * Get the type property: The read setting type. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreWriteSettings.java index 3bb6c40acb62b..5cb45e5b3c477 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreWriteSettings.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -19,11 +20,7 @@ /** * Connector write settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = StoreWriteSettings.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StoreWriteSettings.class, visible = true) @JsonTypeName("StoreWriteSettings") @JsonSubTypes({ @JsonSubTypes.Type(name = "SftpWriteSettings", value = SftpWriteSettings.class), @@ -36,15 +33,20 @@ @Fluent public class StoreWriteSettings { /* - * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType - * integer). + * The write setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + + /* + * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "maxConcurrentConnections") private Object maxConcurrentConnections; /* - * If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType - * boolean). + * If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "disableMetricsCollection") private Object disableMetricsCollection; @@ -56,8 +58,7 @@ public class StoreWriteSettings { private Object copyBehavior; /* - * Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType - * array of objects). + * Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). */ @JsonProperty(value = "metadata") private List metadata; @@ -72,6 +73,16 @@ public class StoreWriteSettings { * Creates an instance of StoreWriteSettings class. */ public StoreWriteSettings() { + this.type = "StoreWriteSettings"; + } + + /** + * Get the type property: The write setting type. + * + * @return the type value. + */ + public String type() { + return this.type; } /** diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SwitchActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SwitchActivity.java index b0e2110e45579..f44b3cfe76c39 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SwitchActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SwitchActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SwitchActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ * This activity evaluates an expression and executes activities under the cases property that correspond to the * expression evaluation expected in the equals property. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SwitchActivity.class, visible = true) @JsonTypeName("Switch") @Fluent public final class SwitchActivity extends ControlActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Switch"; + /* * Switch activity properties. */ @@ -32,6 +40,16 @@ public final class SwitchActivity extends ControlActivity { public SwitchActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Switch activity properties. * @@ -179,8 +197,9 @@ public SwitchActivity withDefaultActivities(List defaultActivities) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model SwitchActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SwitchActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseLinkedService.java index 7c72930311162..6709031c34cae 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SybaseLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for Sybase data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SybaseLinkedService.class, visible = true) @JsonTypeName("Sybase") @Fluent public final class SybaseLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Sybase"; + /* * Sybase linked service properties. */ @@ -32,6 +40,16 @@ public final class SybaseLinkedService extends LinkedService { public SybaseLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Sybase linked service properties. * @@ -216,8 +234,8 @@ public SybaseLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -226,8 +244,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the SybaseLinkedService object itself. @@ -249,8 +267,9 @@ public SybaseLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SybaseLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SybaseLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseSource.java index 63282ea2ad9c8..070ce7cb7f59d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for Sybase databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SybaseSource.class, visible = true) @JsonTypeName("SybaseSource") @Fluent public final class SybaseSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SybaseSource"; + /* * Database query. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class SybaseSource extends TabularSource { public SybaseSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Database query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseTableDataset.java index a5b335585cc92..c5649d5c26203 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.SybaseTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The Sybase table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SybaseTableDataset.class, visible = true) @JsonTypeName("SybaseTable") @Fluent public final class SybaseTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SybaseTable"; + /* * Sybase table dataset properties. */ @@ -31,6 +39,16 @@ public final class SybaseTableDataset extends Dataset { public SybaseTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Sybase table dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookActivity.java index 818ef68796c48..384ef6bd149d7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SynapseNotebookActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Execute Synapse notebook activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SynapseNotebookActivity.class, + visible = true) @JsonTypeName("SynapseNotebook") @Fluent public final class SynapseNotebookActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SynapseNotebook"; + /* * Execute Synapse notebook activity properties. */ @@ -32,6 +44,16 @@ public final class SynapseNotebookActivity extends ExecutionActivity { public SynapseNotebookActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Execute Synapse notebook activity properties. * @@ -365,8 +387,9 @@ public SynapseNotebookActivity withSparkConfig(Map sparkConfig) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SynapseNotebookActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SynapseNotebookActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookReference.java index 16b8a97ece141..aefa62cb71b32 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookReference.java @@ -78,12 +78,13 @@ public SynapseNotebookReference withReferenceName(Object referenceName) { */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model SynapseNotebookReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property type in model SynapseNotebookReference")); } if (referenceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property referenceName in model SynapseNotebookReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property referenceName in model SynapseNotebookReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java index 79e4fa6e333d5..930f696f979e6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.SynapseSparkJobActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,21 @@ /** * Execute spark job activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = SynapseSparkJobDefinitionActivity.class, + visible = true) @JsonTypeName("SparkJob") @Fluent public final class SynapseSparkJobDefinitionActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "SparkJob"; + /* * Execute spark job activity properties. */ @@ -32,6 +44,16 @@ public final class SynapseSparkJobDefinitionActivity extends ExecutionActivity { public SynapseSparkJobDefinitionActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Execute spark job activity properties. * @@ -160,8 +182,8 @@ public SynapseSparkJobDefinitionActivity withArguments(List arguments) { } /** - * Get the file property: The main file used for the job, which will override the 'file' of the spark job - * definition you provide. Type: string (or Expression with resultType string). + * Get the file property: The main file used for the job, which will override the 'file' of the spark job definition + * you provide. Type: string (or Expression with resultType string). * * @return the file value. */ @@ -170,8 +192,8 @@ public Object file() { } /** - * Set the file property: The main file used for the job, which will override the 'file' of the spark job - * definition you provide. Type: string (or Expression with resultType string). + * Set the file property: The main file used for the job, which will override the 'file' of the spark job definition + * you provide. Type: string (or Expression with resultType string). * * @param file the file value to set. * @return the SynapseSparkJobDefinitionActivity object itself. @@ -186,8 +208,8 @@ public SynapseSparkJobDefinitionActivity withFile(Object file) { /** * Get the scanFolder property: Scanning subfolders from the root folder of the main definition file, these files - * will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, - * and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean). + * will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and + * the folders name are case sensitive. Type: boolean (or Expression with resultType boolean). * * @return the scanFolder value. */ @@ -197,8 +219,8 @@ public Object scanFolder() { /** * Set the scanFolder property: Scanning subfolders from the root folder of the main definition file, these files - * will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, - * and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean). + * will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and + * the folders name are case sensitive. Type: boolean (or Expression with resultType boolean). * * @param scanFolder the scanFolder value to set. * @return the SynapseSparkJobDefinitionActivity object itself. @@ -212,9 +234,9 @@ public SynapseSparkJobDefinitionActivity withScanFolder(Object scanFolder) { } /** - * Get the className property: The fully-qualified identifier or the main class that is in the main definition - * file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression - * with resultType string). + * Get the className property: The fully-qualified identifier or the main class that is in the main definition file, + * which will override the 'className' of the spark job definition you provide. Type: string (or Expression with + * resultType string). * * @return the className value. */ @@ -223,9 +245,9 @@ public Object className() { } /** - * Set the className property: The fully-qualified identifier or the main class that is in the main definition - * file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression - * with resultType string). + * Set the className property: The fully-qualified identifier or the main class that is in the main definition file, + * which will override the 'className' of the spark job definition you provide. Type: string (or Expression with + * resultType string). * * @param className the className value to set. * @return the SynapseSparkJobDefinitionActivity object itself. @@ -367,8 +389,8 @@ public SynapseSparkJobDefinitionActivity withExecutorSize(Object executorSize) { } /** - * Get the conf property: Spark configuration properties, which will override the 'conf' of the spark job - * definition you provide. + * Get the conf property: Spark configuration properties, which will override the 'conf' of the spark job definition + * you provide. * * @return the conf value. */ @@ -377,8 +399,8 @@ public Object conf() { } /** - * Set the conf property: Spark configuration properties, which will override the 'conf' of the spark job - * definition you provide. + * Set the conf property: Spark configuration properties, which will override the 'conf' of the spark job definition + * you provide. * * @param conf the conf value to set. * @return the SynapseSparkJobDefinitionActivity object itself. @@ -419,8 +441,8 @@ public SynapseSparkJobDefinitionActivity withDriverSize(Object driverSize) { } /** - * Get the numExecutors property: Number of executors to launch for this job, which will override the - * 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer). + * Get the numExecutors property: Number of executors to launch for this job, which will override the 'numExecutors' + * of the spark job definition you provide. Type: integer (or Expression with resultType integer). * * @return the numExecutors value. */ @@ -429,8 +451,8 @@ public Object numExecutors() { } /** - * Set the numExecutors property: Number of executors to launch for this job, which will override the - * 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer). + * Set the numExecutors property: Number of executors to launch for this job, which will override the 'numExecutors' + * of the spark job definition you provide. Type: integer (or Expression with resultType integer). * * @param numExecutors the numExecutors value to set. * @return the SynapseSparkJobDefinitionActivity object itself. @@ -522,8 +544,9 @@ public SynapseSparkJobDefinitionActivity withSparkConfig(Map spa public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model SynapseSparkJobDefinitionActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model SynapseSparkJobDefinitionActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobReference.java index 1704af457b3c1..a95ec756afa58 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobReference.java @@ -78,12 +78,13 @@ public SynapseSparkJobReference withReferenceName(Object referenceName) { */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model SynapseSparkJobReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property type in model SynapseSparkJobReference")); } if (referenceName() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property referenceName in model SynapseSparkJobReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property referenceName in model SynapseSparkJobReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularSource.java index 4dd18c2e1090a..a943085dc6dd3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularSource.java @@ -7,17 +7,14 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Copy activity sources of tabular type. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = TabularSource.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TabularSource.class, visible = true) @JsonTypeName("TabularSource") @JsonSubTypes({ @JsonSubTypes.Type(name = "AzureTableSource", value = AzureTableSource.class), @@ -87,15 +84,20 @@ @Fluent public class TabularSource extends CopySource { /* - * Query timeout. Type: string (or Expression with resultType string), pattern: - * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "TabularSource"; + + /* + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -106,6 +108,16 @@ public class TabularSource extends CopySource { public TabularSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the queryTimeout property: Query timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularTranslator.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularTranslator.java index 044b4c6d0a483..ce57d4eef72b2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularTranslator.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularTranslator.java @@ -6,60 +6,56 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity tabular translator. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TabularTranslator.class, visible = true) @JsonTypeName("TabularTranslator") @Fluent public final class TabularTranslator extends CopyTranslator { /* - * Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression with - * resultType string). This property will be retired. Please use mappings property. + * Copy translator type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "TabularTranslator"; + + /* + * Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression with resultType string). This property will be retired. Please use mappings property. */ @JsonProperty(value = "columnMappings") private Object columnMappings; /* - * The schema mapping to map between tabular data and hierarchical data. Example: {"Column1": "$.Column1", - * "Column2": "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType - * object). This property will be retired. Please use mappings property. + * The schema mapping to map between tabular data and hierarchical data. Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will be retired. Please use mappings property. */ @JsonProperty(value = "schemaMapping") private Object schemaMapping; /* - * The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType - * object). + * The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType object). */ @JsonProperty(value = "collectionReference") private Object collectionReference; /* - * Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression - * with resultType boolean). + * Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "mapComplexValuesToString") private Object mapComplexValuesToString; /* - * Column mappings with logical types. Tabular->tabular example: - * [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{ - * "name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. - * Hierarchical->tabular example: - * [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{ - * "path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. - * Type: object (or Expression with resultType object). + * Column mappings with logical types. Tabular->tabular example: [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Hierarchical->tabular example: [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or Expression with resultType object). */ @JsonProperty(value = "mappings") private Object mappings; /* - * Whether to enable the advanced type conversion feature in the Copy activity. Type: boolean (or Expression with - * resultType boolean). + * Whether to enable the advanced type conversion feature in the Copy activity. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "typeConversion") private Object typeConversion; @@ -77,9 +73,18 @@ public TabularTranslator() { } /** - * Get the columnMappings property: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" - * Type: string (or Expression with resultType string). This property will be retired. Please use mappings - * property. + * Get the type property: Copy translator type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the columnMappings property: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: + * string (or Expression with resultType string). This property will be retired. Please use mappings property. * * @return the columnMappings value. */ @@ -88,9 +93,8 @@ public Object columnMappings() { } /** - * Set the columnMappings property: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" - * Type: string (or Expression with resultType string). This property will be retired. Please use mappings - * property. + * Set the columnMappings property: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: + * string (or Expression with resultType string). This property will be retired. Please use mappings property. * * @param columnMappings the columnMappings value to set. * @return the TabularTranslator object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarGZipReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarGZipReadSettings.java index db3c661dcd000..9750e1392f9be 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarGZipReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarGZipReadSettings.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The TarGZip compression read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TarGZipReadSettings.class, visible = true) @JsonTypeName("TarGZipReadSettings") @Fluent public final class TarGZipReadSettings extends CompressionReadSettings { + /* + * The Compression setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "TarGZipReadSettings"; + /* * Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). */ @@ -28,6 +36,16 @@ public final class TarGZipReadSettings extends CompressionReadSettings { public TarGZipReadSettings() { } + /** + * Get the type property: The Compression setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preserveCompressionFileNameAsFolder property: Preserve the compression file name as folder path. Type: * boolean (or Expression with resultType boolean). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarReadSettings.java index 2e893c7a3940c..c78a2241fce28 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarReadSettings.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The Tar compression read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TarReadSettings.class, visible = true) @JsonTypeName("TarReadSettings") @Fluent public final class TarReadSettings extends CompressionReadSettings { + /* + * The Compression setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "TarReadSettings"; + /* * Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). */ @@ -28,6 +36,16 @@ public final class TarReadSettings extends CompressionReadSettings { public TarReadSettings() { } + /** + * Get the type property: The Compression setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preserveCompressionFileNameAsFolder property: Preserve the compression file name as folder path. Type: * boolean (or Expression with resultType boolean). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeamDeskLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeamDeskLinkedService.java index b9379d6e03c35..9efde035249d0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeamDeskLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeamDeskLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.TeamDeskLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for TeamDesk. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TeamDeskLinkedService.class, visible = true) @JsonTypeName("TeamDesk") @Fluent public final class TeamDeskLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "TeamDesk"; + /* * TeamDesk linked service properties. */ @@ -32,6 +40,16 @@ public final class TeamDeskLinkedService extends LinkedService { public TeamDeskLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: TeamDesk linked service properties. * @@ -195,8 +213,8 @@ public TeamDeskLinkedService withApiToken(SecretBase apiToken) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -205,8 +223,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the TeamDeskLinkedService object itself. @@ -228,8 +246,9 @@ public TeamDeskLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model TeamDeskLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model TeamDeskLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataLinkedService.java index 8ffda58e79366..1d3cc30923cfc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.TeradataLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for Teradata data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TeradataLinkedService.class, visible = true) @JsonTypeName("Teradata") @Fluent public final class TeradataLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Teradata"; + /* * Teradata linked service properties. */ @@ -32,6 +40,16 @@ public final class TeradataLinkedService extends LinkedService { public TeradataLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Teradata linked service properties. * @@ -195,8 +213,8 @@ public TeradataLinkedService withPassword(SecretBase password) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -205,8 +223,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the TeradataLinkedService object itself. @@ -228,8 +246,9 @@ public TeradataLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model TeradataLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model TeradataLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataPartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataPartitionSettings.java index 8ec5a372bb911..0652490218e74 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataPartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataPartitionSettings.java @@ -13,22 +13,19 @@ @Fluent public final class TeradataPartitionSettings { /* - * The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression - * with resultType string). + * The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /* - * The maximum value of column specified in partitionColumnName that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionUpperBound") private Object partitionUpperBound; /* - * The minimum value of column specified in partitionColumnName that will be used for proceeding range - * partitioning. Type: string (or Expression with resultType string). + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @JsonProperty(value = "partitionLowerBound") private Object partitionLowerBound; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataSource.java index 5ac8c34df2d75..d1a9a14e4b7a0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Teradata source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TeradataSource.class, visible = true) @JsonTypeName("TeradataSource") @Fluent public final class TeradataSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "TeradataSource"; + /* * Teradata query. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class TeradataSource extends TabularSource { private Object query; /* - * The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", - * "Hash", "DynamicRange". + * The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -41,6 +48,16 @@ public final class TeradataSource extends TabularSource { public TeradataSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: Teradata query. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataTableDataset.java index 31dc0e433540e..773f68d4712e0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.TeradataTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * The Teradata database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TeradataTableDataset.class, visible = true) @JsonTypeName("TeradataTable") @Fluent public final class TeradataTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "TeradataTable"; + /* * Teradata dataset properties. */ @@ -31,6 +39,16 @@ public final class TeradataTableDataset extends Dataset { public TeradataTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Teradata dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TextFormat.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TextFormat.java index de0e1bd2f491c..6ff9077d1424f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TextFormat.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TextFormat.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The data stored in text format. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TextFormat.class, visible = true) @JsonTypeName("TextFormat") @Fluent public final class TextFormat extends DatasetStorageFormat { + /* + * Type of dataset storage format. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "TextFormat"; + /* * The column delimiter. Type: string (or Expression with resultType string). */ @@ -47,32 +55,25 @@ public final class TextFormat extends DatasetStorageFormat { private Object nullValue; /* - * The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes - * another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported - * values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with - * resultType string). + * The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */ @JsonProperty(value = "encodingName") private Object encodingName; /* - * Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with - * resultType boolean). + * Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "treatEmptyAsNull") private Object treatEmptyAsNull; /* - * The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or - * Expression with resultType integer). + * The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or Expression with resultType integer). */ @JsonProperty(value = "skipLineCount") private Object skipLineCount; /* - * When used as input, treat the first row of data as headers. When used as output,write the headers into the - * output as the first row of data. The default value is false. Type: boolean (or Expression with resultType - * boolean). + * When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "firstRowAsHeader") private Object firstRowAsHeader; @@ -83,6 +84,16 @@ public final class TextFormat extends DatasetStorageFormat { public TextFormat() { } + /** + * Get the type property: Type of dataset storage format. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the columnDelimiter property: The column delimiter. Type: string (or Expression with resultType string). * @@ -210,8 +221,8 @@ public TextFormat withEncodingName(Object encodingName) { } /** - * Get the treatEmptyAsNull property: Treat empty column values in the text file as null. The default value is - * true. Type: boolean (or Expression with resultType boolean). + * Get the treatEmptyAsNull property: Treat empty column values in the text file as null. The default value is true. + * Type: boolean (or Expression with resultType boolean). * * @return the treatEmptyAsNull value. */ @@ -220,8 +231,8 @@ public Object treatEmptyAsNull() { } /** - * Set the treatEmptyAsNull property: Treat empty column values in the text file as null. The default value is - * true. Type: boolean (or Expression with resultType boolean). + * Set the treatEmptyAsNull property: Treat empty column values in the text file as null. The default value is true. + * Type: boolean (or Expression with resultType boolean). * * @param treatEmptyAsNull the treatEmptyAsNull value to set. * @return the TextFormat object itself. @@ -232,8 +243,8 @@ public TextFormat withTreatEmptyAsNull(Object treatEmptyAsNull) { } /** - * Get the skipLineCount property: The number of lines/rows to be skipped when parsing text files. The default - * value is 0. Type: integer (or Expression with resultType integer). + * Get the skipLineCount property: The number of lines/rows to be skipped when parsing text files. The default value + * is 0. Type: integer (or Expression with resultType integer). * * @return the skipLineCount value. */ @@ -242,8 +253,8 @@ public Object skipLineCount() { } /** - * Set the skipLineCount property: The number of lines/rows to be skipped when parsing text files. The default - * value is 0. Type: integer (or Expression with resultType integer). + * Set the skipLineCount property: The number of lines/rows to be skipped when parsing text files. The default value + * is 0. Type: integer (or Expression with resultType integer). * * @param skipLineCount the skipLineCount value to set. * @return the TextFormat object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Transformation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Transformation.java index 419c335de7de6..8bf803148e01f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Transformation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Transformation.java @@ -156,8 +156,8 @@ public Transformation withFlowlet(DataFlowReference flowlet) { */ public void validate() { if (name() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property name in model Transformation")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property name in model Transformation")); } if (dataset() != null) { dataset().validate(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Trigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Trigger.java index 8d8c1caed14cb..c40dfebf7a05b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Trigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Trigger.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.HashMap; @@ -19,11 +20,7 @@ /** * Azure data factory nested object which contains information about creating pipeline run. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, - property = "type", - defaultImpl = Trigger.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Trigger.class, visible = true) @JsonTypeName("Trigger") @JsonSubTypes({ @JsonSubTypes.Type(name = "MultiplePipelineTrigger", value = MultiplePipelineTrigger.class), @@ -32,6 +29,13 @@ @JsonSubTypes.Type(name = "ChainingTrigger", value = ChainingTrigger.class) }) @Fluent public class Trigger { + /* + * Trigger type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type; + /* * Trigger description. */ @@ -60,6 +64,16 @@ public class Trigger { * Creates an instance of Trigger class. */ public Trigger() { + this.type = "Trigger"; + } + + /** + * Get the type property: Trigger type. + * + * @return the type value. + */ + public String type() { + return this.type; } /** @@ -83,8 +97,8 @@ public Trigger withDescription(String description) { } /** - * Get the runtimeState property: Indicates if trigger is running or not. Updated when Start/Stop APIs are called - * on the Trigger. + * Get the runtimeState property: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on + * the Trigger. * * @return the runtimeState value. */ @@ -113,8 +127,8 @@ public Trigger withAnnotations(List annotations) { } /** - * Get the additionalProperties property: Azure data factory nested object which contains information about - * creating pipeline run. + * Get the additionalProperties property: Azure data factory nested object which contains information about creating + * pipeline run. * * @return the additionalProperties value. */ @@ -124,8 +138,8 @@ public Map additionalProperties() { } /** - * Set the additionalProperties property: Azure data factory nested object which contains information about - * creating pipeline run. + * Set the additionalProperties property: Azure data factory nested object which contains information about creating + * pipeline run. * * @param additionalProperties the additionalProperties value to set. * @return the Trigger object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerDependencyReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerDependencyReference.java index d44d37f22d81d..9bc7451be39ca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerDependencyReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerDependencyReference.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; @@ -16,9 +17,9 @@ */ @JsonTypeInfo( use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, property = "type", - defaultImpl = TriggerDependencyReference.class) + defaultImpl = TriggerDependencyReference.class, + visible = true) @JsonTypeName("TriggerDependencyReference") @JsonSubTypes({ @JsonSubTypes.Type( @@ -26,6 +27,13 @@ value = TumblingWindowTriggerDependencyReference.class) }) @Fluent public class TriggerDependencyReference extends DependencyReference { + /* + * The type of dependency reference. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "TriggerDependencyReference"; + /* * Referenced trigger. */ @@ -38,6 +46,16 @@ public class TriggerDependencyReference extends DependencyReference { public TriggerDependencyReference() { } + /** + * Get the type property: The type of dependency reference. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the referenceTrigger property: Referenced trigger. * @@ -67,8 +85,9 @@ public TriggerDependencyReference withReferenceTrigger(TriggerReference referenc public void validate() { super.validate(); if (referenceTrigger() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property referenceTrigger in model TriggerDependencyReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property referenceTrigger in model TriggerDependencyReference")); } else { referenceTrigger().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerFilterParameters.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerFilterParameters.java index b6ded370945a1..43e6bb5828d40 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerFilterParameters.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerFilterParameters.java @@ -53,8 +53,7 @@ public TriggerFilterParameters withContinuationToken(String continuationToken) { } /** - * Get the parentTriggerName property: The name of the parent TumblingWindowTrigger to get the child rerun - * triggers. + * Get the parentTriggerName property: The name of the parent TumblingWindowTrigger to get the child rerun triggers. * * @return the parentTriggerName value. */ @@ -63,8 +62,7 @@ public String parentTriggerName() { } /** - * Set the parentTriggerName property: The name of the parent TumblingWindowTrigger to get the child rerun - * triggers. + * Set the parentTriggerName property: The name of the parent TumblingWindowTrigger to get the child rerun triggers. * * @param parentTriggerName the parentTriggerName value to set. * @return the TriggerFilterParameters object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerListResponse.java index e1eaf01a7becb..659ab5fd3514f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerListResponse.java @@ -80,8 +80,8 @@ public TriggerListResponse withNextLink(String nextLink) { */ public void validate() { if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model TriggerListResponse")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property value in model TriggerListResponse")); } else { value().forEach(e -> e.validate()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerReference.java index a46e1a23b92ad..9d7a076ccaa8d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerReference.java @@ -78,12 +78,12 @@ public TriggerReference withReferenceName(String referenceName) { */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model TriggerReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property type in model TriggerReference")); } if (referenceName() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property referenceName in model TriggerReference")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property referenceName in model TriggerReference")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTrigger.java index 6f0e99df5dc1d..a9e2754892c11 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTrigger.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.TumblingWindowTriggerTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.time.OffsetDateTime; @@ -17,10 +18,17 @@ * Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also * supports backfill scenarios (when start time is in the past). */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TumblingWindowTrigger.class, visible = true) @JsonTypeName("TumblingWindowTrigger") @Fluent public final class TumblingWindowTrigger extends Trigger { + /* + * Trigger type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "TumblingWindowTrigger"; + /* * Pipeline for which runs are created when an event is fired for trigger window that is ready. */ @@ -39,6 +47,16 @@ public final class TumblingWindowTrigger extends Trigger { public TumblingWindowTrigger() { } + /** + * Get the type property: Trigger type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the pipeline property: Pipeline for which runs are created when an event is fired for trigger window that is * ready. @@ -291,14 +309,15 @@ public TumblingWindowTrigger withDependsOn(List dependsOn) public void validate() { super.validate(); if (pipeline() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property pipeline in model TumblingWindowTrigger")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property pipeline in model TumblingWindowTrigger")); } else { pipeline().validate(); } if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model TumblingWindowTrigger")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model TumblingWindowTrigger")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTriggerDependencyReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTriggerDependencyReference.java index 8d759f4434c23..820747233e25b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTriggerDependencyReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTriggerDependencyReference.java @@ -6,16 +6,28 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Referenced tumbling window trigger dependency. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "type", + defaultImpl = TumblingWindowTriggerDependencyReference.class, + visible = true) @JsonTypeName("TumblingWindowTriggerDependencyReference") @Fluent public final class TumblingWindowTriggerDependencyReference extends TriggerDependencyReference { + /* + * The type of dependency reference. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "TumblingWindowTriggerDependencyReference"; + /* * Timespan applied to the start time of a tumbling window when evaluating dependency. */ @@ -23,8 +35,7 @@ public final class TumblingWindowTriggerDependencyReference extends TriggerDepen private String offset; /* - * The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be - * used. + * The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. */ @JsonProperty(value = "size") private String size; @@ -35,6 +46,16 @@ public final class TumblingWindowTriggerDependencyReference extends TriggerDepen public TumblingWindowTriggerDependencyReference() { } + /** + * Get the type property: The type of dependency reference. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the offset property: Timespan applied to the start time of a tumbling window when evaluating dependency. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TwilioLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TwilioLinkedService.java index 859b56f8d3ef4..a53bd200e0bfe 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TwilioLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TwilioLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.TwilioLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for Twilio. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TwilioLinkedService.class, visible = true) @JsonTypeName("Twilio") @Fluent public final class TwilioLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Twilio"; + /* * Twilio linked service properties. */ @@ -32,6 +40,16 @@ public final class TwilioLinkedService extends LinkedService { public TwilioLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Twilio linked service properties. * @@ -134,8 +152,9 @@ public TwilioLinkedService withPassword(SecretBase password) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model TwilioLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model TwilioLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TypeConversionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TypeConversionSettings.java index 69763fc3c4da2..099889599fb87 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TypeConversionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TypeConversionSettings.java @@ -13,8 +13,7 @@ @Fluent public final class TypeConversionSettings { /* - * Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType - * boolean). + * Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "allowDataTruncation") private Object allowDataTruncation; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UntilActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UntilActivity.java index cb1f36166dbc7..03a52e8fe14be 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UntilActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UntilActivity.java @@ -8,18 +8,26 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.UntilActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; /** - * This activity executes inner activities until the specified boolean expression results to true or timeout is - * reached, whichever is earlier. + * This activity executes inner activities until the specified boolean expression results to true or timeout is reached, + * whichever is earlier. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = UntilActivity.class, visible = true) @JsonTypeName("Until") @Fluent public final class UntilActivity extends ControlActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Until"; + /* * Until activity properties. */ @@ -32,6 +40,16 @@ public final class UntilActivity extends ControlActivity { public UntilActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Until activity properties. * @@ -121,8 +139,8 @@ public UntilActivity withExpression(Expression expression) { } /** - * Get the timeout property: Specifies the timeout for the activity to run. If there is no value specified, it - * takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType + * Get the timeout property: Specifies the timeout for the activity to run. If there is no value specified, it takes + * the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType * string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). * * @return the timeout value. @@ -132,8 +150,8 @@ public Object timeout() { } /** - * Set the timeout property: Specifies the timeout for the activity to run. If there is no value specified, it - * takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType + * Set the timeout property: Specifies the timeout for the activity to run. If there is no value specified, it takes + * the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType * string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). * * @param timeout the timeout value to set. @@ -179,8 +197,9 @@ public UntilActivity withActivities(List activities) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model UntilActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model UntilActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeNodeRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeNodeRequest.java index edaea3f24e2e9..8b8a62be702f3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeNodeRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeNodeRequest.java @@ -13,8 +13,7 @@ @Fluent public final class UpdateIntegrationRuntimeNodeRequest { /* - * The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and - * maxConcurrentJobs(inclusive) are allowed. + * The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. */ @JsonProperty(value = "concurrentJobsLimit") private Integer concurrentJobsLimit; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeRequest.java index ca1525b008c82..8b51bee62999e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeRequest.java @@ -13,15 +13,13 @@ @Fluent public final class UpdateIntegrationRuntimeRequest { /* - * Enables or disables the auto-update feature of the self-hosted integration runtime. See - * https://go.microsoft.com/fwlink/?linkid=854189. + * Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189. */ @JsonProperty(value = "autoUpdate") private IntegrationRuntimeAutoUpdate autoUpdate; /* - * The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen - * on that time. + * The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time. */ @JsonProperty(value = "updateDelayOffset") private String updateDelayOffset; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserAccessPolicy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserAccessPolicy.java index 1356cb8a736d4..7bbeef7b73976 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserAccessPolicy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserAccessPolicy.java @@ -13,15 +13,13 @@ @Fluent public final class UserAccessPolicy { /* - * The string with permissions for Data Plane access. Currently only 'r' is supported which grants read only - * access. + * The string with permissions for Data Plane access. Currently only 'r' is supported which grants read only access. */ @JsonProperty(value = "permissions") private String permissions; /* - * The resource path to get access relative to factory. Currently only empty string is supported which corresponds - * to the factory resource. + * The resource path to get access relative to factory. Currently only empty string is supported which corresponds to the factory resource. */ @JsonProperty(value = "accessResourcePath") private String accessResourcePath; @@ -39,8 +37,7 @@ public final class UserAccessPolicy { private String startTime; /* - * Expiration time for the token. Maximum duration for the token is eight hours and by default the token will - * expire in eight hours. + * Expiration time for the token. Maximum duration for the token is eight hours and by default the token will expire in eight hours. */ @JsonProperty(value = "expireTime") private String expireTime; @@ -96,8 +93,8 @@ public UserAccessPolicy withAccessResourcePath(String accessResourcePath) { } /** - * Get the profileName property: The name of the profile. Currently only the default is supported. The default - * value is DefaultProfile. + * Get the profileName property: The name of the profile. Currently only the default is supported. The default value + * is DefaultProfile. * * @return the profileName value. */ @@ -106,8 +103,8 @@ public String profileName() { } /** - * Set the profileName property: The name of the profile. Currently only the default is supported. The default - * value is DefaultProfile. + * Set the profileName property: The name of the profile. Currently only the default is supported. The default value + * is DefaultProfile. * * @param profileName the profileName value to set. * @return the UserAccessPolicy object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserProperty.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserProperty.java index 0058c48c4eac5..427adcec52e47 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserProperty.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserProperty.java @@ -78,12 +78,12 @@ public UserProperty withValue(Object value) { */ public void validate() { if (name() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property name in model UserProperty")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property name in model UserProperty")); } if (value() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property value in model UserProperty")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property value in model UserProperty")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ValidationActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ValidationActivity.java index 1c6570daec4be..6e93f4b00f64d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ValidationActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ValidationActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ValidationActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * This activity verifies that an external resource exists. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ValidationActivity.class, visible = true) @JsonTypeName("Validation") @Fluent public final class ValidationActivity extends ControlActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Validation"; + /* * Validation activity properties. */ @@ -31,6 +39,16 @@ public final class ValidationActivity extends ControlActivity { public ValidationActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Validation activity properties. * @@ -95,8 +113,8 @@ public ValidationActivity withUserProperties(List userProperties) } /** - * Get the timeout property: Specifies the timeout for the activity to run. If there is no value specified, it - * takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType + * Get the timeout property: Specifies the timeout for the activity to run. If there is no value specified, it takes + * the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType * string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). * * @return the timeout value. @@ -106,8 +124,8 @@ public Object timeout() { } /** - * Set the timeout property: Specifies the timeout for the activity to run. If there is no value specified, it - * takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType + * Set the timeout property: Specifies the timeout for the activity to run. If there is no value specified, it takes + * the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType * string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). * * @param timeout the timeout value to set. @@ -122,8 +140,8 @@ public ValidationActivity withTimeout(Object timeout) { } /** - * Get the sleep property: A delay in seconds between validation attempts. If no value is specified, 10 seconds - * will be used as the default. Type: integer (or Expression with resultType integer). + * Get the sleep property: A delay in seconds between validation attempts. If no value is specified, 10 seconds will + * be used as the default. Type: integer (or Expression with resultType integer). * * @return the sleep value. */ @@ -132,8 +150,8 @@ public Object sleep() { } /** - * Set the sleep property: A delay in seconds between validation attempts. If no value is specified, 10 seconds - * will be used as the default. Type: integer (or Expression with resultType integer). + * Set the sleep property: A delay in seconds between validation attempts. If no value is specified, 10 seconds will + * be used as the default. Type: integer (or Expression with resultType integer). * * @param sleep the sleep value to set. * @return the ValidationActivity object itself. @@ -173,8 +191,7 @@ public ValidationActivity withMinimumSize(Object minimumSize) { /** * Get the childItems property: Can be used if dataset points to a folder. If set to true, the folder must have at - * least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType - * boolean). + * least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). * * @return the childItems value. */ @@ -184,8 +201,7 @@ public Object childItems() { /** * Set the childItems property: Can be used if dataset points to a folder. If set to true, the folder must have at - * least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType - * boolean). + * least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). * * @param childItems the childItems value to set. * @return the ValidationActivity object itself. @@ -230,8 +246,9 @@ public ValidationActivity withDataset(DatasetReference dataset) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ValidationActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ValidationActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VariableSpecification.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VariableSpecification.java index 6fec57a7adcc7..a16b90eaca06a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VariableSpecification.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VariableSpecification.java @@ -78,8 +78,8 @@ public VariableSpecification withDefaultValue(Object defaultValue) { */ public void validate() { if (type() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property type in model VariableSpecification")); + throw LOGGER.atError() + .log(new IllegalArgumentException("Missing required property type in model VariableSpecification")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaLinkedService.java index 7b22a89c1734f..858deb07b7867 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.VerticaLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Vertica linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = VerticaLinkedService.class, visible = true) @JsonTypeName("Vertica") @Fluent public final class VerticaLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Vertica"; + /* * Vertica linked service properties. */ @@ -32,6 +40,16 @@ public final class VerticaLinkedService extends LinkedService { public VerticaLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Vertica linked service properties. * @@ -126,8 +144,8 @@ public VerticaLinkedService withPwd(AzureKeyVaultSecretReference pwd) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -136,8 +154,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the VerticaLinkedService object itself. @@ -159,8 +177,9 @@ public VerticaLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model VerticaLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model VerticaLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaSource.java index 6f1dafb9bb2f9..af7d186a57432 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Vertica source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = VerticaSource.class, visible = true) @JsonTypeName("VerticaSource") @Fluent public final class VerticaSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "VerticaSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class VerticaSource extends TabularSource { public VerticaSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaTableDataset.java index 9113e064d47a1..21280c66e6042 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.VerticaDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Vertica dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = VerticaTableDataset.class, visible = true) @JsonTypeName("VerticaTable") @Fluent public final class VerticaTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "VerticaTable"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class VerticaTableDataset extends Dataset { public VerticaTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WaitActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WaitActivity.java index 42ab901f571db..925479493a45f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WaitActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WaitActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.WaitActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * This activity suspends pipeline execution for the specified interval. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WaitActivity.class, visible = true) @JsonTypeName("Wait") @Fluent public final class WaitActivity extends ControlActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Wait"; + /* * Wait activity properties. */ @@ -31,6 +39,16 @@ public final class WaitActivity extends ControlActivity { public WaitActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Wait activity properties. * @@ -126,8 +144,9 @@ public WaitActivity withWaitTimeInSeconds(Object waitTimeInSeconds) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model WaitActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model WaitActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseLinkedService.java index 4f1e858ad46aa..c1045e89f4ddf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.WarehouseLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Microsoft Fabric Warehouse linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WarehouseLinkedService.class, visible = true) @JsonTypeName("Warehouse") @Fluent public final class WarehouseLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Warehouse"; + /* * Microsoft Fabric Warehouse linked service properties. */ @@ -32,6 +40,16 @@ public final class WarehouseLinkedService extends LinkedService { public WarehouseLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Microsoft Fabric Warehouse linked service properties. * @@ -228,8 +246,8 @@ public WarehouseLinkedService withTenant(Object tenant) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -238,8 +256,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the WarehouseLinkedService object itself. @@ -253,9 +271,9 @@ public WarehouseLinkedService withEncryptedCredential(String encryptedCredential } /** - * Get the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Get the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @return the servicePrincipalCredentialType value. */ @@ -264,9 +282,9 @@ public Object servicePrincipalCredentialType() { } /** - * Set the servicePrincipalCredentialType property: The service principal credential type to use in - * Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. - * Type: string (or Expression with resultType string). + * Set the servicePrincipalCredentialType property: The service principal credential type to use in Server-To-Server + * authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + * Expression with resultType string). * * @param servicePrincipalCredentialType the servicePrincipalCredentialType value to set. * @return the WarehouseLinkedService object itself. @@ -317,8 +335,9 @@ public WarehouseLinkedService withServicePrincipalCredential(SecretBase serviceP public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model WarehouseLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model WarehouseLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSink.java index 81322f0baee80..ae2979ba71e57 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSink.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Microsoft Fabric Warehouse sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WarehouseSink.class, visible = true) @JsonTypeName("WarehouseSink") @Fluent public final class WarehouseSink extends CopySink { + /* + * Copy sink type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "WarehouseSink"; + /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ @@ -23,8 +31,7 @@ public final class WarehouseSink extends CopySink { private Object preCopyScript; /* - * Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType - * boolean). + * Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "allowCopyCommand") private Object allowCopyCommand; @@ -36,15 +43,13 @@ public final class WarehouseSink extends CopySink { private DWCopyCommandSettings copyCommandSettings; /* - * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string - * (or Expression with resultType string). + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */ @JsonProperty(value = "tableOption") private Object tableOption; /* - * Write behavior when copying data into azure Microsoft Fabric Data Warehouse. Type: DWWriteBehaviorEnum (or - * Expression with resultType DWWriteBehaviorEnum) + * Write behavior when copying data into azure Microsoft Fabric Data Warehouse. Type: DWWriteBehaviorEnum (or Expression with resultType DWWriteBehaviorEnum) */ @JsonProperty(value = "writeBehavior") private Object writeBehavior; @@ -55,6 +60,16 @@ public final class WarehouseSink extends CopySink { public WarehouseSink() { } + /** + * Get the type property: Copy sink type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preCopyScript property: SQL pre-copy script. Type: string (or Expression with resultType string). * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSource.java index eb555a034c7f5..5ae602a7165e9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Microsoft Fabric Warehouse source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WarehouseSource.class, visible = true) @JsonTypeName("WarehouseSource") @Fluent public final class WarehouseSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "WarehouseSource"; + /* * Microsoft Fabric Warehouse reader query. Type: string (or Expression with resultType string). */ @@ -23,30 +31,25 @@ public final class WarehouseSource extends TabularSource { private Object sqlReaderQuery; /* - * Name of the stored procedure for a Microsoft Fabric Warehouse source. This cannot be used at the same time as - * SqlReaderQuery. Type: string (or Expression with resultType string). + * Name of the stored procedure for a Microsoft Fabric Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */ @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; /* - * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - * Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. */ @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* - * Specifies the transaction locking behavior for the Microsoft Fabric Warehouse source. Allowed values: - * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: - * string (or Expression with resultType string). + * Specifies the transaction locking behavior for the Microsoft Fabric Warehouse source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). */ @JsonProperty(value = "isolationLevel") private Object isolationLevel; /* - * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", - * "PhysicalPartitionsOfTable", "DynamicRange". + * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". */ @JsonProperty(value = "partitionOption") private Object partitionOption; @@ -63,6 +66,16 @@ public final class WarehouseSource extends TabularSource { public WarehouseSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the sqlReaderQuery property: Microsoft Fabric Warehouse reader query. Type: string (or Expression with * resultType string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseTableDataset.java index dc3b5495c7e7c..cc5c653b574cb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseTableDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.WarehouseTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Microsoft Fabric Warehouse dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WarehouseTableDataset.class, visible = true) @JsonTypeName("WarehouseTable") @Fluent public final class WarehouseTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "WarehouseTable"; + /* * Microsoft Fabric Warehouse dataset properties. */ @@ -31,6 +39,16 @@ public final class WarehouseTableDataset extends Dataset { public WarehouseTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Microsoft Fabric Warehouse dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivity.java index 8d70b2143970b..b82c6d630890c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.WebActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Web activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WebActivity.class, visible = true) @JsonTypeName("WebActivity") @Fluent public final class WebActivity extends ExecutionActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "WebActivity"; + /* * Web activity properties. */ @@ -32,6 +40,16 @@ public final class WebActivity extends ExecutionActivity { public WebActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Web activity properties. * @@ -137,8 +155,7 @@ public WebActivity withMethod(WebActivityMethod method) { } /** - * Get the url property: Web activity target endpoint and path. Type: string (or Expression with resultType - * string). + * Get the url property: Web activity target endpoint and path. Type: string (or Expression with resultType string). * * @return the url value. */ @@ -147,8 +164,7 @@ public Object url() { } /** - * Set the url property: Web activity target endpoint and path. Type: string (or Expression with resultType - * string). + * Set the url property: Web activity target endpoint and path. Type: string (or Expression with resultType string). * * @param url the url value to set. * @return the WebActivity object itself. @@ -168,7 +184,7 @@ public WebActivity withUrl(Object url) { * * @return the headers value. */ - public Map headers() { + public Map headers() { return this.innerTypeProperties() == null ? null : this.innerTypeProperties().headers(); } @@ -180,7 +196,7 @@ public Map headers() { * @param headers the headers value to set. * @return the WebActivity object itself. */ - public WebActivity withHeaders(Map headers) { + public WebActivity withHeaders(Map headers) { if (this.innerTypeProperties() == null) { this.innerTypeProperties = new WebActivityTypeProperties(); } @@ -287,9 +303,9 @@ public WebActivity withHttpRequestTimeout(Object httpRequestTimeout) { } /** - * Get the turnOffAsync property: Option to disable invoking HTTP GET on location given in response header of a - * HTTP 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set - * false then continues to invoke HTTP GET call on location given in http response headers. + * Get the turnOffAsync property: Option to disable invoking HTTP GET on location given in response header of a HTTP + * 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set false + * then continues to invoke HTTP GET call on location given in http response headers. * * @return the turnOffAsync value. */ @@ -298,9 +314,9 @@ public Boolean turnOffAsync() { } /** - * Set the turnOffAsync property: Option to disable invoking HTTP GET on location given in response header of a - * HTTP 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set - * false then continues to invoke HTTP GET call on location given in http response headers. + * Set the turnOffAsync property: Option to disable invoking HTTP GET on location given in response header of a HTTP + * 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set false + * then continues to invoke HTTP GET call on location given in http response headers. * * @param turnOffAsync the turnOffAsync value to set. * @return the WebActivity object itself. @@ -391,8 +407,9 @@ public WebActivity withConnectVia(IntegrationRuntimeReference connectVia) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model WebActivity")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property innerTypeProperties in model WebActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivityAuthentication.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivityAuthentication.java index 21a6304817ca5..0c5db8e7e06ef 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivityAuthentication.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivityAuthentication.java @@ -25,8 +25,7 @@ public final class WebActivityAuthentication { private SecretBase pfx; /* - * Web activity authentication user name for basic authentication or ClientID when used for ServicePrincipal. Type: - * string (or Expression with resultType string). + * Web activity authentication user name for basic authentication or ClientID when used for ServicePrincipal. Type: string (or Expression with resultType string). */ @JsonProperty(value = "username") private Object username; @@ -38,15 +37,13 @@ public final class WebActivityAuthentication { private SecretBase password; /* - * Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression - * with resultType string). + * Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression with resultType string). */ @JsonProperty(value = "resource") private Object resource; /* - * TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string - * (or Expression with resultType string). + * TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). */ @JsonProperty(value = "userTenant") private Object userTenant; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebAnonymousAuthentication.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebAnonymousAuthentication.java index d65bbb6cbc5a2..d37b395bfaa6e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebAnonymousAuthentication.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebAnonymousAuthentication.java @@ -5,22 +5,45 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A WebLinkedService that uses anonymous authentication to communicate with an HTTP endpoint. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "authenticationType") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "authenticationType", + defaultImpl = WebAnonymousAuthentication.class, + visible = true) @JsonTypeName("Anonymous") @Fluent public final class WebAnonymousAuthentication extends WebLinkedServiceTypeProperties { + /* + * Type of authentication used to connect to the web table source. + */ + @JsonTypeId + @JsonProperty(value = "authenticationType", required = true) + private WebAuthenticationType authenticationType = WebAuthenticationType.ANONYMOUS; + /** * Creates an instance of WebAnonymousAuthentication class. */ public WebAnonymousAuthentication() { } + /** + * Get the authenticationType property: Type of authentication used to connect to the web table source. + * + * @return the authenticationType value. + */ + @Override + public WebAuthenticationType authenticationType() { + return this.authenticationType; + } + /** * {@inheritDoc} */ diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebBasicAuthentication.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebBasicAuthentication.java index 3ff72128fa3aa..e454d42dff9f5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebBasicAuthentication.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebBasicAuthentication.java @@ -7,16 +7,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A WebLinkedService that uses basic authentication to communicate with an HTTP endpoint. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "authenticationType") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "authenticationType", + defaultImpl = WebBasicAuthentication.class, + visible = true) @JsonTypeName("Basic") @Fluent public final class WebBasicAuthentication extends WebLinkedServiceTypeProperties { + /* + * Type of authentication used to connect to the web table source. + */ + @JsonTypeId + @JsonProperty(value = "authenticationType", required = true) + private WebAuthenticationType authenticationType = WebAuthenticationType.BASIC; + /* * User name for Basic authentication. Type: string (or Expression with resultType string). */ @@ -35,6 +47,16 @@ public final class WebBasicAuthentication extends WebLinkedServiceTypeProperties public WebBasicAuthentication() { } + /** + * Get the authenticationType property: Type of authentication used to connect to the web table source. + * + * @return the authenticationType value. + */ + @Override + public WebAuthenticationType authenticationType() { + return this.authenticationType; + } + /** * Get the username property: User name for Basic authentication. Type: string (or Expression with resultType * string). @@ -95,12 +117,14 @@ public WebBasicAuthentication withUrl(Object url) { public void validate() { super.validate(); if (username() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property username in model WebBasicAuthentication")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property username in model WebBasicAuthentication")); } if (password() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property password in model WebBasicAuthentication")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property password in model WebBasicAuthentication")); } else { password().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebClientCertificateAuthentication.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebClientCertificateAuthentication.java index ef18b3cad8808..0f6d75cdf03d5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebClientCertificateAuthentication.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebClientCertificateAuthentication.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; @@ -14,10 +15,21 @@ * A WebLinkedService that uses client certificate based authentication to communicate with an HTTP endpoint. This * scheme follows mutual authentication; the server must also provide valid credentials to the client. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "authenticationType") +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + property = "authenticationType", + defaultImpl = WebClientCertificateAuthentication.class, + visible = true) @JsonTypeName("ClientCertificate") @Fluent public final class WebClientCertificateAuthentication extends WebLinkedServiceTypeProperties { + /* + * Type of authentication used to connect to the web table source. + */ + @JsonTypeId + @JsonProperty(value = "authenticationType", required = true) + private WebAuthenticationType authenticationType = WebAuthenticationType.CLIENT_CERTIFICATE; + /* * Base64-encoded contents of a PFX file. */ @@ -36,6 +48,16 @@ public final class WebClientCertificateAuthentication extends WebLinkedServiceTy public WebClientCertificateAuthentication() { } + /** + * Get the authenticationType property: Type of authentication used to connect to the web table source. + * + * @return the authenticationType value. + */ + @Override + public WebAuthenticationType authenticationType() { + return this.authenticationType; + } + /** * Get the pfx property: Base64-encoded contents of a PFX file. * @@ -94,14 +116,16 @@ public WebClientCertificateAuthentication withUrl(Object url) { public void validate() { super.validate(); if (pfx() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property pfx in model WebClientCertificateAuthentication")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property pfx in model WebClientCertificateAuthentication")); } else { pfx().validate(); } if (password() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property password in model WebClientCertificateAuthentication")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property password in model WebClientCertificateAuthentication")); } else { password().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedService.java index e00758e97186f..3a2d1260d9599 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedService.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Web linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WebLinkedService.class, visible = true) @JsonTypeName("Web") @Fluent public final class WebLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Web"; + /* * Web linked service properties. */ @@ -31,6 +39,16 @@ public final class WebLinkedService extends LinkedService { public WebLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the typeProperties property: Web linked service properties. * @@ -96,8 +114,9 @@ public WebLinkedService withAnnotations(List annotations) { public void validate() { super.validate(); if (typeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property typeProperties in model WebLinkedService")); + throw LOGGER.atError() + .log( + new IllegalArgumentException("Missing required property typeProperties in model WebLinkedService")); } else { typeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedServiceTypeProperties.java index 05b9d19d7ae08..ef8da54742b56 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedServiceTypeProperties.java @@ -8,18 +8,19 @@ import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** - * Base definition of WebLinkedServiceTypeProperties, this typeProperties is polymorphic based on authenticationType, - * so not flattened in SDK models. + * Base definition of WebLinkedServiceTypeProperties, this typeProperties is polymorphic based on authenticationType, so + * not flattened in SDK models. */ @JsonTypeInfo( use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.PROPERTY, property = "authenticationType", - defaultImpl = WebLinkedServiceTypeProperties.class) + defaultImpl = WebLinkedServiceTypeProperties.class, + visible = true) @JsonTypeName("WebLinkedServiceTypeProperties") @JsonSubTypes({ @JsonSubTypes.Type(name = "Anonymous", value = WebAnonymousAuthentication.class), @@ -28,8 +29,14 @@ @Fluent public class WebLinkedServiceTypeProperties { /* - * The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: string (or Expression with - * resultType string). + * Type of authentication used to connect to the web table source. + */ + @JsonTypeId + @JsonProperty(value = "authenticationType", required = true) + private WebAuthenticationType authenticationType; + + /* + * The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: string (or Expression with resultType string). */ @JsonProperty(value = "url", required = true) private Object url; @@ -38,6 +45,16 @@ public class WebLinkedServiceTypeProperties { * Creates an instance of WebLinkedServiceTypeProperties class. */ public WebLinkedServiceTypeProperties() { + this.authenticationType = WebAuthenticationType.fromString("WebLinkedServiceTypeProperties"); + } + + /** + * Get the authenticationType property: Type of authentication used to connect to the web table source. + * + * @return the authenticationType value. + */ + public WebAuthenticationType authenticationType() { + return this.authenticationType; } /** @@ -69,8 +86,9 @@ public WebLinkedServiceTypeProperties withUrl(Object url) { */ public void validate() { if (url() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property url in model WebLinkedServiceTypeProperties")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property url in model WebLinkedServiceTypeProperties")); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebSource.java index 15a0201796cf7..17048b151b379 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebSource.java @@ -6,19 +6,26 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity source for web page table. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WebSource.class, visible = true) @JsonTypeName("WebSource") @Fluent public final class WebSource extends CopySource { /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "WebSource"; + + /* + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -29,6 +36,16 @@ public final class WebSource extends CopySource { public WebSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the additionalColumns property: Specifies the additional columns to be added to source data. Type: array of * objects(AdditionalColumns) (or Expression with resultType array of objects). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebTableDataset.java index 8ef746ad8059b..0a86a00726c23 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebTableDataset.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.WebTableDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * The dataset points to a HTML table in the web page. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WebTableDataset.class, visible = true) @JsonTypeName("WebTable") @Fluent public final class WebTableDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "WebTable"; + /* * Web table dataset properties. */ @@ -32,6 +40,16 @@ public final class WebTableDataset extends Dataset { public WebTableDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Web table dataset properties. * @@ -163,8 +181,9 @@ public WebTableDataset withPath(Object path) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model WebTableDataset")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model WebTableDataset")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebhookActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebhookActivity.java index 3fa6437eae395..6e8a54d0c9a9d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebhookActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebhookActivity.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.WebhookActivityTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * WebHook activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WebhookActivity.class, visible = true) @JsonTypeName("WebHook") @Fluent public final class WebhookActivity extends ControlActivity { + /* + * Type of activity. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "WebHook"; + /* * WebHook activity properties. */ @@ -38,6 +46,16 @@ public final class WebhookActivity extends ControlActivity { public WebhookActivity() { } + /** + * Get the type property: Type of activity. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: WebHook activity properties. * @@ -203,7 +221,7 @@ public WebhookActivity withTimeout(String timeout) { * * @return the headers value. */ - public Map headers() { + public Map headers() { return this.innerTypeProperties() == null ? null : this.innerTypeProperties().headers(); } @@ -215,7 +233,7 @@ public Map headers() { * @param headers the headers value to set. * @return the WebhookActivity object itself. */ - public WebhookActivity withHeaders(Map headers) { + public WebhookActivity withHeaders(Map headers) { if (this.innerTypeProperties() == null) { this.innerTypeProperties = new WebhookActivityTypeProperties(); } @@ -307,8 +325,9 @@ public WebhookActivity withReportStatusOnCallBack(Object reportStatusOnCallBack) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError( - new IllegalArgumentException("Missing required property innerTypeProperties in model WebhookActivity")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model WebhookActivity")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WranglingDataFlow.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WranglingDataFlow.java index fb3793bb92d7b..8c43ec72a4899 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WranglingDataFlow.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WranglingDataFlow.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.PowerQueryTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -14,10 +15,17 @@ /** * Power Query data flow. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WranglingDataFlow.class, visible = true) @JsonTypeName("WranglingDataFlow") @Fluent public final class WranglingDataFlow extends DataFlow { + /* + * Type of data flow. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "WranglingDataFlow"; + /* * PowerQuery data flow type properties. */ @@ -30,6 +38,16 @@ public final class WranglingDataFlow extends DataFlow { public WranglingDataFlow() { } + /** + * Get the type property: Type of data flow. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: PowerQuery data flow type properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroLinkedService.java index 7e2a807b7798e..ae6f38a8a7f09 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.XeroLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Xero Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = XeroLinkedService.class, visible = true) @JsonTypeName("Xero") @Fluent public final class XeroLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Xero"; + /* * Xero Service linked service properties. */ @@ -32,6 +40,16 @@ public final class XeroLinkedService extends LinkedService { public XeroLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Xero Service linked service properties. * @@ -226,8 +244,8 @@ public XeroLinkedService withUseHostVerification(Object useHostVerification) { } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -236,8 +254,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the XeroLinkedService object itself. @@ -251,8 +269,8 @@ public XeroLinkedService withUsePeerVerification(Object usePeerVerification) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -261,8 +279,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the XeroLinkedService object itself. @@ -284,8 +302,9 @@ public XeroLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model XeroLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model XeroLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroObjectDataset.java index e33b5a612b073..203ea60129af9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Xero Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = XeroObjectDataset.class, visible = true) @JsonTypeName("XeroObject") @Fluent public final class XeroObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "XeroObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class XeroObjectDataset extends Dataset { public XeroObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroSource.java index a3756e62e2ce5..f3ea47c84190a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Xero Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = XeroSource.class, visible = true) @JsonTypeName("XeroSource") @Fluent public final class XeroSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "XeroSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class XeroSource extends TabularSource { public XeroSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlDataset.java index 3d1181d2ca83c..671c03828ed3c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.XmlDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Xml dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = XmlDataset.class, visible = true) @JsonTypeName("Xml") @Fluent public final class XmlDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Xml"; + /* * Xml dataset properties. */ @@ -31,6 +39,16 @@ public final class XmlDataset extends Dataset { public XmlDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Xml dataset properties. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlReadSettings.java index f050b0ab0ecb2..e29b2804f1435 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlReadSettings.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * Xml read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = XmlReadSettings.class, visible = true) @JsonTypeName("XmlReadSettings") @Fluent public final class XmlReadSettings extends FormatReadSettings { + /* + * The read setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "XmlReadSettings"; + /* * Compression settings. */ @@ -23,30 +31,25 @@ public final class XmlReadSettings extends FormatReadSettings { private CompressionReadSettings compressionProperties; /* - * Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. - * Type: string (or Expression with resultType string). + * Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). */ @JsonProperty(value = "validationMode") private Object validationMode; /* - * Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with - * resultType boolean). + * Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "detectDataType") private Object detectDataType; /* - * Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType - * boolean). + * Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). */ @JsonProperty(value = "namespaces") private Object namespaces; /* - * Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no - * prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be - * used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). + * Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). */ @JsonProperty(value = "namespacePrefixes") private Object namespacePrefixes; @@ -57,6 +60,16 @@ public final class XmlReadSettings extends FormatReadSettings { public XmlReadSettings() { } + /** + * Get the type property: The read setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the compressionProperties property: Compression settings. * @@ -122,8 +135,8 @@ public XmlReadSettings withDetectDataType(Object detectDataType) { } /** - * Get the namespaces property: Indicates whether namespace is enabled when reading the xml files. Type: boolean - * (or Expression with resultType boolean). + * Get the namespaces property: Indicates whether namespace is enabled when reading the xml files. Type: boolean (or + * Expression with resultType boolean). * * @return the namespaces value. */ @@ -132,8 +145,8 @@ public Object namespaces() { } /** - * Set the namespaces property: Indicates whether namespace is enabled when reading the xml files. Type: boolean - * (or Expression with resultType boolean). + * Set the namespaces property: Indicates whether namespace is enabled when reading the xml files. Type: boolean (or + * Expression with resultType boolean). * * @param namespaces the namespaces value to set. * @return the XmlReadSettings object itself. @@ -146,8 +159,8 @@ public XmlReadSettings withNamespaces(Object namespaces) { /** * Get the namespacePrefixes property: Namespace uri to prefix mappings to override the prefixes in column names * when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name - * in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or - * Expression with resultType object). + * in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression + * with resultType object). * * @return the namespacePrefixes value. */ @@ -158,8 +171,8 @@ public Object namespacePrefixes() { /** * Set the namespacePrefixes property: Namespace uri to prefix mappings to override the prefixes in column names * when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name - * in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or - * Expression with resultType object). + * in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression + * with resultType object). * * @param namespacePrefixes the namespacePrefixes value to set. * @return the XmlReadSettings object itself. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlSource.java index 7d440966a613e..2bff432ce6a7d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Xml source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = XmlSource.class, visible = true) @JsonTypeName("XmlSource") @Fluent public final class XmlSource extends CopySource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "XmlSource"; + /* * Xml store settings. */ @@ -29,8 +37,7 @@ public final class XmlSource extends CopySource { private XmlReadSettings formatSettings; /* - * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or - * Expression with resultType array of objects). + * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ @JsonProperty(value = "additionalColumns") private Object additionalColumns; @@ -41,6 +48,16 @@ public final class XmlSource extends CopySource { public XmlSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the storeSettings property: Xml store settings. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZendeskLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZendeskLinkedService.java index e8c2816611e36..2749adfa6abbb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZendeskLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZendeskLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ZendeskLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Linked service for Zendesk. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ZendeskLinkedService.class, visible = true) @JsonTypeName("Zendesk") @Fluent public final class ZendeskLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Zendesk"; + /* * Zendesk linked service properties. */ @@ -32,6 +40,16 @@ public final class ZendeskLinkedService extends LinkedService { public ZendeskLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Zendesk linked service properties. * @@ -195,8 +213,8 @@ public ZendeskLinkedService withApiToken(SecretBase apiToken) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -205,8 +223,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ZendeskLinkedService object itself. @@ -228,8 +246,9 @@ public ZendeskLinkedService withEncryptedCredential(String encryptedCredential) public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ZendeskLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ZendeskLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZipDeflateReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZipDeflateReadSettings.java index a36bb31790354..942b3a047cfcd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZipDeflateReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZipDeflateReadSettings.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * The ZipDeflate compression read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ZipDeflateReadSettings.class, visible = true) @JsonTypeName("ZipDeflateReadSettings") @Fluent public final class ZipDeflateReadSettings extends CompressionReadSettings { + /* + * The Compression setting type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ZipDeflateReadSettings"; + /* * Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). */ @@ -28,6 +36,16 @@ public final class ZipDeflateReadSettings extends CompressionReadSettings { public ZipDeflateReadSettings() { } + /** + * Get the type property: The Compression setting type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the preserveZipFileNameAsFolder property: Preserve the zip file name as folder path. Type: boolean (or * Expression with resultType boolean). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoLinkedService.java index fae78f4503d0c..3fb0de1b5c4ca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoLinkedService.java @@ -8,6 +8,7 @@ import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.datafactory.fluent.models.ZohoLinkedServiceTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -16,10 +17,17 @@ /** * Zoho server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ZohoLinkedService.class, visible = true) @JsonTypeName("Zoho") @Fluent public final class ZohoLinkedService extends LinkedService { + /* + * Type of linked service. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "Zoho"; + /* * Zoho server linked service properties. */ @@ -32,6 +40,16 @@ public final class ZohoLinkedService extends LinkedService { public ZohoLinkedService() { } + /** + * Get the type property: Type of linked service. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Zoho server linked service properties. * @@ -199,8 +217,8 @@ public ZohoLinkedService withUseHostVerification(Object useHostVerification) { } /** - * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Get the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @return the usePeerVerification value. */ @@ -209,8 +227,8 @@ public Object usePeerVerification() { } /** - * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting - * over SSL. The default value is true. + * Set the usePeerVerification property: Specifies whether to verify the identity of the server when connecting over + * SSL. The default value is true. * * @param usePeerVerification the usePeerVerification value to set. * @return the ZohoLinkedService object itself. @@ -224,8 +242,8 @@ public ZohoLinkedService withUsePeerVerification(Object usePeerVerification) { } /** - * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @return the encryptedCredential value. */ @@ -234,8 +252,8 @@ public String encryptedCredential() { } /** - * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are - * encrypted using the integration runtime credential manager. Type: string. + * Set the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted + * using the integration runtime credential manager. Type: string. * * @param encryptedCredential the encryptedCredential value to set. * @return the ZohoLinkedService object itself. @@ -257,8 +275,9 @@ public ZohoLinkedService withEncryptedCredential(String encryptedCredential) { public void validate() { super.validate(); if (innerTypeProperties() == null) { - throw LOGGER.logExceptionAsError(new IllegalArgumentException( - "Missing required property innerTypeProperties in model ZohoLinkedService")); + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerTypeProperties in model ZohoLinkedService")); } else { innerTypeProperties().validate(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoObjectDataset.java index ad3e46aac670c..a798a635ed2ac 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoObjectDataset.java @@ -7,6 +7,7 @@ import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.util.List; @@ -15,10 +16,17 @@ /** * Zoho server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ZohoObjectDataset.class, visible = true) @JsonTypeName("ZohoObject") @Fluent public final class ZohoObjectDataset extends Dataset { + /* + * Type of dataset. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ZohoObject"; + /* * Properties specific to this dataset type. */ @@ -31,6 +39,16 @@ public final class ZohoObjectDataset extends Dataset { public ZohoObjectDataset() { } + /** + * Get the type property: Type of dataset. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the innerTypeProperties property: Properties specific to this dataset type. * diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoSource.java index bfaa8dc75523f..54f35fd440dce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoSource.java @@ -6,16 +6,24 @@ import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeId; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** * A copy activity Zoho server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ZohoSource.class, visible = true) @JsonTypeName("ZohoSource") @Fluent public final class ZohoSource extends TabularSource { + /* + * Copy source type. + */ + @JsonTypeId + @JsonProperty(value = "type", required = true) + private String type = "ZohoSource"; + /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ @@ -28,6 +36,16 @@ public final class ZohoSource extends TabularSource { public ZohoSource() { } + /** + * Get the type property: Copy source type. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + /** * Get the query property: A query to retrieve data from source. Type: string (or Expression with resultType * string). diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/package-info.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/package-info.java index 9b3163ee9f9c9..6a4809cc5d3b9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/package-info.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/package-info.java @@ -4,7 +4,7 @@ /** * Package containing the data models for DataFactoryManagementClient. - * The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data - * Factory V2 services. + * The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory + * V2 services. */ package com.azure.resourcemanager.datafactory.models; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/package-info.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/package-info.java index afc4b48d043c6..7ce67aaec59eb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/package-info.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/package-info.java @@ -4,7 +4,7 @@ /** * Package containing the classes for DataFactoryManagementClient. - * The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data - * Factory V2 services. + * The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory + * V2 services. */ package com.azure.resourcemanager.datafactory; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/module-info.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/module-info.java index cbdb53ef51f9a..8f1fc8bbd693f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/module-info.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/module-info.java @@ -10,4 +10,4 @@ exports com.azure.resourcemanager.datafactory.models; opens com.azure.resourcemanager.datafactory.fluent.models to com.azure.core, com.fasterxml.jackson.databind; opens com.azure.resourcemanager.datafactory.models to com.azure.core, com.fasterxml.jackson.databind; -} +} \ No newline at end of file diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/resources/META-INF/native-image/com.azure.resourcemanager/azure-resourcemanager-datafactory/reflect-config.json b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/resources/META-INF/native-image/com.azure.resourcemanager/azure-resourcemanager-datafactory/reflect-config.json index 51959e24378f7..a1227b27c8343 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/resources/META-INF/native-image/com.azure.resourcemanager/azure-resourcemanager-datafactory/reflect-config.json +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/resources/META-INF/native-image/com.azure.resourcemanager/azure-resourcemanager-datafactory/reflect-config.json @@ -604,17 +604,7 @@ "allDeclaredFields" : true, "allDeclaredMethods" : true }, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityCredentialResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityTypeProperties", + "name" : "com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner", "allDeclaredConstructors" : true, "allDeclaredFields" : true, "allDeclaredMethods" : true @@ -833,6 +823,36 @@ "allDeclaredConstructors" : true, "allDeclaredFields" : true, "allDeclaredMethods" : true +}, { + "name" : "com.azure.resourcemanager.datafactory.models.ManagedIdentityCredentialResource", + "allDeclaredConstructors" : true, + "allDeclaredFields" : true, + "allDeclaredMethods" : true +}, { + "name" : "com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential", + "allDeclaredConstructors" : true, + "allDeclaredFields" : true, + "allDeclaredMethods" : true +}, { + "name" : "com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityTypeProperties", + "allDeclaredConstructors" : true, + "allDeclaredFields" : true, + "allDeclaredMethods" : true +}, { + "name" : "com.azure.resourcemanager.datafactory.models.ServicePrincipalCredentialResource", + "allDeclaredConstructors" : true, + "allDeclaredFields" : true, + "allDeclaredMethods" : true +}, { + "name" : "com.azure.resourcemanager.datafactory.models.ServicePrincipalCredential", + "allDeclaredConstructors" : true, + "allDeclaredFields" : true, + "allDeclaredMethods" : true +}, { + "name" : "com.azure.resourcemanager.datafactory.fluent.models.ServicePrincipalCredentialTypeProperties", + "allDeclaredConstructors" : true, + "allDeclaredFields" : true, + "allDeclaredMethods" : true }, { "name" : "com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkReference", "allDeclaredConstructors" : true, @@ -5133,16 +5153,6 @@ "allDeclaredConstructors" : true, "allDeclaredFields" : true, "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ServicePrincipalCredential", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ServicePrincipalCredentialTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true }, { "name" : "com.azure.resourcemanager.datafactory.models.FactoryIdentityType", "allDeclaredConstructors" : true, @@ -5548,11 +5558,6 @@ "allDeclaredConstructors" : true, "allDeclaredFields" : true, "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ScriptType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true }, { "name" : "com.azure.resourcemanager.datafactory.models.ScriptActivityParameterType", "allDeclaredConstructors" : true, diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ActivityRunsQueryByPipelineRunSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ActivityRunsQueryByPipelineRunSamples.java index 7badf0a7847a0..85df856b9e10b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ActivityRunsQueryByPipelineRunSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ActivityRunsQueryByPipelineRunSamples.java @@ -12,8 +12,7 @@ */ public final class ActivityRunsQueryByPipelineRunSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ActivityRuns_QueryByPipelineRun.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ActivityRuns_QueryByPipelineRun.json */ /** * Sample code: ActivityRuns_QueryByPipelineRun. @@ -22,10 +21,11 @@ public final class ActivityRunsQueryByPipelineRunSamples { */ public static void activityRunsQueryByPipelineRun(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.activityRuns().queryByPipelineRunWithResponse("exampleResourceGroup", "exampleFactoryName", - "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", - new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z")) - .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")), - com.azure.core.util.Context.NONE); + manager.activityRuns() + .queryByPipelineRunWithResponse("exampleResourceGroup", "exampleFactoryName", + "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", + new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z")) + .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureCreateOrUpdateSamples.java index 8c77093a681a7..476c96e80ff75 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureCreateOrUpdateSamples.java @@ -15,8 +15,7 @@ */ public final class ChangeDataCaptureCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Create.json */ /** * Sample code: ChangeDataCapture_Create. @@ -24,18 +23,20 @@ public final class ChangeDataCaptureCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().define("exampleChangeDataCapture") + manager.changeDataCaptures() + .define("exampleChangeDataCapture") .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withSourceConnectionsInfo((List) null) - .withTargetConnectionsInfo((List) null).withPolicy((MapperPolicy) null) + .withTargetConnectionsInfo((List) null) + .withPolicy((MapperPolicy) null) .withDescription( "Sample demo change data capture to transfer data from delimited (csv) to Azure SQL Database with automapped and non-automapped mappings.") - .withAllowVNetOverride(false).create(); + .withAllowVNetOverride(false) + .create(); } /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Update.json */ /** * Sample code: ChangeDataCapture_Update. @@ -43,10 +44,15 @@ public static void changeDataCaptureCreate(com.azure.resourcemanager.datafactory * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - ChangeDataCaptureResource resource = manager.changeDataCaptures().getWithResponse("exampleResourceGroup", - "exampleFactoryName", "exampleChangeDataCapture", null, com.azure.core.util.Context.NONE).getValue(); - resource.update().withDescription( - "Sample demo change data capture to transfer data from delimited (csv) to Azure SQL Database. Updating table mappings.") - .withAllowVNetOverride(false).withStatus("Stopped").apply(); + ChangeDataCaptureResource resource = manager.changeDataCaptures() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", null, + com.azure.core.util.Context.NONE) + .getValue(); + resource.update() + .withDescription( + "Sample demo change data capture to transfer data from delimited (csv) to Azure SQL Database. Updating table mappings.") + .withAllowVNetOverride(false) + .withStatus("Stopped") + .apply(); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureDeleteSamples.java index c8d3c98549127..e3f2c1b5cb30e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureDeleteSamples.java @@ -9,8 +9,7 @@ */ public final class ChangeDataCaptureDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Delete.json */ /** * Sample code: ChangeDataCapture_Delete. @@ -18,7 +17,8 @@ public final class ChangeDataCaptureDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleChangeDataCapture", com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureGetSamples.java index 85979b8606507..13d24c6e51716 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureGetSamples.java @@ -9,9 +9,7 @@ */ public final class ChangeDataCaptureGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Get - * .json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Get.json */ /** * Sample code: ChangeDataCapture_Get. @@ -19,7 +17,8 @@ public final class ChangeDataCaptureGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleChangeDataCapture", null, com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", null, + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureListByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureListByFactorySamples.java index 418b2dbf29897..dd9aeb3fc15fa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureListByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureListByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class ChangeDataCaptureListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_ListByFactory.json */ /** * Sample code: ChangeDataCapture_ListByFactory. @@ -19,7 +18,7 @@ public final class ChangeDataCaptureListByFactorySamples { */ public static void changeDataCaptureListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureStartSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureStartSamples.java index 2a4cef80a05a7..7bacb58833608 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureStartSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureStartSamples.java @@ -9,8 +9,7 @@ */ public final class ChangeDataCaptureStartSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_Start.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Start.json */ /** * Sample code: ChangeDataCapture_Start. @@ -18,7 +17,8 @@ public final class ChangeDataCaptureStartSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().startWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleChangeDataCapture", com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .startWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureStatusSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureStatusSamples.java index 83132a49ab5b5..568884589229c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureStatusSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureStatusSamples.java @@ -9,8 +9,7 @@ */ public final class ChangeDataCaptureStatusSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_Status.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Status.json */ /** * Sample code: ChangeDataCapture_Start. @@ -18,7 +17,8 @@ public final class ChangeDataCaptureStatusSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().statusWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleChangeDataCapture", com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .statusWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureStopSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureStopSamples.java index 7c49c4fb8e547..0c2f865d23a05 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureStopSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureStopSamples.java @@ -9,8 +9,7 @@ */ public final class ChangeDataCaptureStopSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ChangeDataCapture_Stop.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Stop.json */ /** * Sample code: ChangeDataCapture_Stop. @@ -18,7 +17,8 @@ public final class ChangeDataCaptureStopSamples { * @param manager Entry point to DataFactoryManager. */ public static void changeDataCaptureStop(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.changeDataCaptures().stopWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleChangeDataCapture", com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .stopWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsCreateOrUpdateSamples.java index bb475097b716f..4792a544f6074 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsCreateOrUpdateSamples.java @@ -4,6 +4,7 @@ package com.azure.resourcemanager.datafactory.generated; +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential; /** @@ -11,9 +12,7 @@ */ public final class CredentialOperationsCreateOrUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Create. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Create.json */ /** * Sample code: Credentials_Create. @@ -21,10 +20,10 @@ public final class CredentialOperationsCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void credentialsCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.credentialOperations().define("exampleCredential") - .withExistingFactory("exampleResourceGroup", "exampleFactoryName") - .withProperties(new ManagedIdentityCredential().withResourceId( - "/subscriptions/12345678-1234-1234-1234-12345678abc/resourcegroups/exampleResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/exampleUami")) - .create(); + manager.credentialOperations() + .createOrUpdateWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleCredential", + new CredentialResourceInner().withProperties(new ManagedIdentityCredential().withResourceId( + "/subscriptions/12345678-1234-1234-1234-12345678abc/resourcegroups/exampleResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/exampleUami")), + null, com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsDeleteSamples.java index aa87c9a81fd60..7bbd0c81a458c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsDeleteSamples.java @@ -9,9 +9,7 @@ */ public final class CredentialOperationsDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Delete. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Delete.json */ /** * Sample code: Credentials_Delete. @@ -19,7 +17,8 @@ public final class CredentialOperationsDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void credentialsDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.credentialOperations().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleCredential", com.azure.core.util.Context.NONE); + manager.credentialOperations() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleCredential", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsGetSamples.java index 026396083768c..6cb3798bae5b9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsGetSamples.java @@ -9,8 +9,7 @@ */ public final class CredentialOperationsGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Get.json */ /** * Sample code: Credentials_Get. @@ -18,7 +17,8 @@ public final class CredentialOperationsGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void credentialsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.credentialOperations().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleCredential", null, com.azure.core.util.Context.NONE); + manager.credentialOperations() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleCredential", null, + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsListByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsListByFactorySamples.java index a05dde59eb8b2..f5bec661965d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsListByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsListByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class CredentialOperationsListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Credentials_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_ListByFactory.json */ /** * Sample code: Credentials_ListByFactory. @@ -18,7 +17,7 @@ public final class CredentialOperationsListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void credentialsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.credentialOperations().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.credentialOperations() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionAddDataFlowSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionAddDataFlowSamples.java index bf975e76a79df..89b2ea6e73475 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionAddDataFlowSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionAddDataFlowSamples.java @@ -29,8 +29,7 @@ */ public final class DataFlowDebugSessionAddDataFlowSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DataFlowDebugSession_AddDataFlow.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_AddDataFlow.json */ /** * Sample code: DataFlowDebugSession_AddDataFlow. @@ -39,13 +38,17 @@ public final class DataFlowDebugSessionAddDataFlowSamples { */ public static void dataFlowDebugSessionAddDataFlow(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - manager.dataFlowDebugSessions().addDataFlowWithResponse("exampleResourceGroup", "exampleFactoryName", - new DataFlowDebugPackage().withSessionId("f06ed247-9d07-49b2-b05e-2cb4a2fc871e") - .withDataFlow(new DataFlowDebugResource().withName("dataflow1").withProperties(new MappingDataFlow() - .withSources(Arrays.asList(new DataFlowSource().withName("source1") - .withDataset(new DatasetReference().withReferenceName("DelimitedText2")))) - .withSinks(Arrays.asList()).withTransformations(Arrays.asList()).withScript( - "\n\nsource(output(\n\t\tColumn_1 as string\n\t),\n\tallowSchemaDrift: true,\n\tvalidateSchema: false) ~> source1"))) + manager.dataFlowDebugSessions() + .addDataFlowWithResponse("exampleResourceGroup", "exampleFactoryName", new DataFlowDebugPackage() + .withSessionId("f06ed247-9d07-49b2-b05e-2cb4a2fc871e") + .withDataFlow(new DataFlowDebugResource().withName("dataflow1") + .withProperties(new MappingDataFlow() + .withSources(Arrays.asList(new DataFlowSource().withName("source1") + .withDataset(new DatasetReference().withReferenceName("DelimitedText2")))) + .withSinks(Arrays.asList()) + .withTransformations(Arrays.asList()) + .withScript( + "\n\nsource(output(\n\t\tColumn_1 as string\n\t),\n\tallowSchemaDrift: true,\n\tvalidateSchema: false) ~> source1"))) .withDatasets(Arrays.asList(new DatasetDebugResource().withName("dataset1") .withProperties(new DelimitedTextDataset() .withSchema(SerializerFactory.createDefaultManagementSerializerAdapter() @@ -54,7 +57,10 @@ public static void dataFlowDebugSessionAddDataFlow(com.azure.resourcemanager.dat .withAnnotations(Arrays.asList()) .withLocation(new AzureBlobStorageLocation().withFileName("Ansiencoding.csv") .withContainer("dataflow-sample-data")) - .withColumnDelimiter(",").withQuoteChar("\"").withEscapeChar("\\").withFirstRowAsHeader(true)))) + .withColumnDelimiter(",") + .withQuoteChar("\"") + .withEscapeChar("\\") + .withFirstRowAsHeader(true)))) .withLinkedServices(Arrays.asList(new LinkedServiceDebugResource().withName("linkedService1") .withProperties(new AzureBlobStorageLinkedService().withAnnotations(Arrays.asList()) .withConnectionString( @@ -62,16 +68,17 @@ public static void dataFlowDebugSessionAddDataFlow(com.azure.resourcemanager.dat .withEncryptedCredential("fakeTokenPlaceholder")))) .withDebugSettings(new DataFlowDebugPackageDebugSettings() .withSourceSettings(Arrays.asList( - new DataFlowSourceSetting().withSourceName("source1").withRowLimit(1000) + new DataFlowSourceSetting().withSourceName("source1") + .withRowLimit(1000) .withAdditionalProperties(mapOf()), - new DataFlowSourceSetting() - .withSourceName("source2").withRowLimit(222).withAdditionalProperties(mapOf()))) + new DataFlowSourceSetting().withSourceName("source2") + .withRowLimit(222) + .withAdditionalProperties(mapOf()))) .withParameters(mapOf("sourcePath", "Toy")) - .withDatasetParameters(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"Movies\":{\"path\":\"abc\"},\"Output\":{\"time\":\"def\"}}", Object.class, - SerializerEncoding.JSON))) - .withAdditionalProperties(mapOf()), - com.azure.core.util.Context.NONE); + .withDatasetParameters(SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"Movies\":{\"path\":\"abc\"},\"Output\":{\"time\":\"def\"}}", Object.class, + SerializerEncoding.JSON))) + .withAdditionalProperties(mapOf()), com.azure.core.util.Context.NONE); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionCreateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionCreateSamples.java index 44eb5cdc0dc71..98f0d253105b5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionCreateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionCreateSamples.java @@ -18,8 +18,7 @@ */ public final class DataFlowDebugSessionCreateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DataFlowDebugSession_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_Create.json */ /** * Sample code: DataFlowDebugSession_Create. @@ -34,7 +33,9 @@ public static void dataFlowDebugSessionCreate(com.azure.resourcemanager.datafact .withProperties(new ManagedIntegrationRuntime() .withComputeProperties(new IntegrationRuntimeComputeProperties().withLocation("AutoResolve") .withDataFlowProperties(new IntegrationRuntimeDataFlowProperties() - .withComputeType(DataFlowComputeType.GENERAL).withCoreCount(48).withTimeToLive(10) + .withComputeType(DataFlowComputeType.GENERAL) + .withCoreCount(48) + .withTimeToLive(10) .withAdditionalProperties(mapOf())) .withAdditionalProperties(mapOf())))), com.azure.core.util.Context.NONE); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionDeleteSamples.java index 05332ce9c587c..d2a3302714e11 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionDeleteSamples.java @@ -11,8 +11,7 @@ */ public final class DataFlowDebugSessionDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DataFlowDebugSession_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_Delete.json */ /** * Sample code: DataFlowDebugSession_Delete. @@ -20,8 +19,9 @@ public final class DataFlowDebugSessionDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void dataFlowDebugSessionDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlowDebugSessions().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - new DeleteDataFlowDebugSessionRequest().withSessionId("91fb57e0-8292-47be-89ff-c8f2d2bb2a7e"), - com.azure.core.util.Context.NONE); + manager.dataFlowDebugSessions() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", + new DeleteDataFlowDebugSessionRequest().withSessionId("91fb57e0-8292-47be-89ff-c8f2d2bb2a7e"), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionExecuteCommandSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionExecuteCommandSamples.java index 08665d7d58041..48e1b4376bcea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionExecuteCommandSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionExecuteCommandSamples.java @@ -13,8 +13,7 @@ */ public final class DataFlowDebugSessionExecuteCommandSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DataFlowDebugSession_ExecuteCommand.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_ExecuteCommand.json */ /** * Sample code: DataFlowDebugSession_ExecuteCommand. @@ -23,10 +22,11 @@ public final class DataFlowDebugSessionExecuteCommandSamples { */ public static void dataFlowDebugSessionExecuteCommand(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlowDebugSessions().executeCommand("exampleResourceGroup", "exampleFactoryName", - new DataFlowDebugCommandRequest().withSessionId("f06ed247-9d07-49b2-b05e-2cb4a2fc871e") - .withCommand(DataFlowDebugCommandType.EXECUTE_PREVIEW_QUERY) - .withCommandPayload(new DataFlowDebugCommandPayload().withStreamName("source1").withRowLimits(100)), - com.azure.core.util.Context.NONE); + manager.dataFlowDebugSessions() + .executeCommand("exampleResourceGroup", "exampleFactoryName", + new DataFlowDebugCommandRequest().withSessionId("f06ed247-9d07-49b2-b05e-2cb4a2fc871e") + .withCommand(DataFlowDebugCommandType.EXECUTE_PREVIEW_QUERY) + .withCommandPayload(new DataFlowDebugCommandPayload().withStreamName("source1").withRowLimits(100)), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionQueryByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionQueryByFactorySamples.java index cd7c85cc980ac..8ef81cb1275a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionQueryByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionQueryByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class DataFlowDebugSessionQueryByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DataFlowDebugSession_QueryByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_QueryByFactory.json */ /** * Sample code: DataFlowDebugSession_QueryByFactory. @@ -19,7 +18,7 @@ public final class DataFlowDebugSessionQueryByFactorySamples { */ public static void dataFlowDebugSessionQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlowDebugSessions().queryByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.dataFlowDebugSessions() + .queryByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsCreateOrUpdateSamples.java index 9314d37ad28dd..7b63d9d556a30 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsCreateOrUpdateSamples.java @@ -16,8 +16,7 @@ */ public final class DataFlowsCreateOrUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Create.json */ /** * Sample code: DataFlows_Create. @@ -25,7 +24,9 @@ public final class DataFlowsCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void dataFlowsCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlows().define("exampleDataFlow").withExistingFactory("exampleResourceGroup", "exampleFactoryName") + manager.dataFlows() + .define("exampleDataFlow") + .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withProperties(new MappingDataFlow().withDescription( "Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation.") .withSources(Arrays.asList( @@ -53,8 +54,7 @@ public static void dataFlowsCreate(com.azure.resourcemanager.datafactory.DataFac } /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Update.json */ /** * Sample code: DataFlows_Update. @@ -62,31 +62,34 @@ public static void dataFlowsCreate(com.azure.resourcemanager.datafactory.DataFac * @param manager Entry point to DataFactoryManager. */ public static void dataFlowsUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - DataFlowResource resource = manager.dataFlows().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleDataFlow", null, com.azure.core.util.Context.NONE).getValue(); - resource.update().withProperties(new MappingDataFlow().withDescription( - "Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation.") - .withSources(Arrays.asList( - new DataFlowSource().withName("USDCurrency") - .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetUSD")), - new DataFlowSource().withName("CADSource") - .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetCAD")))) - .withSinks(Arrays.asList( - new DataFlowSink().withName("USDSink") - .withDataset(new DatasetReference().withReferenceName("USDOutput")), - new DataFlowSink().withName("CADSink") - .withDataset(new DatasetReference().withReferenceName("CADOutput")))) - .withScriptLines(Arrays.asList("source(output(", "PreviousConversionRate as double,", "Country as string,", - "DateTime1 as string,", "CurrentConversionRate as double", "),", "allowSchemaDrift: false,", - "validateSchema: false) ~> USDCurrency", "source(output(", "PreviousConversionRate as double,", - "Country as string,", "DateTime1 as string,", "CurrentConversionRate as double", "),", - "allowSchemaDrift: true,", "validateSchema: false) ~> CADSource", - "USDCurrency, CADSource union(byName: true)~> Union", - "Union derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn", - "NewCurrencyColumn split(Country == 'USD',", - "Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)", - "ConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink", - "ConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink"))) + DataFlowResource resource = manager.dataFlows() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", null, + com.azure.core.util.Context.NONE) + .getValue(); + resource.update() + .withProperties(new MappingDataFlow().withDescription( + "Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation.") + .withSources(Arrays.asList( + new DataFlowSource().withName("USDCurrency") + .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetUSD")), + new DataFlowSource().withName("CADSource") + .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetCAD")))) + .withSinks(Arrays.asList( + new DataFlowSink().withName("USDSink") + .withDataset(new DatasetReference().withReferenceName("USDOutput")), + new DataFlowSink().withName("CADSink") + .withDataset(new DatasetReference().withReferenceName("CADOutput")))) + .withScriptLines(Arrays.asList("source(output(", "PreviousConversionRate as double,", + "Country as string,", "DateTime1 as string,", "CurrentConversionRate as double", "),", + "allowSchemaDrift: false,", "validateSchema: false) ~> USDCurrency", "source(output(", + "PreviousConversionRate as double,", "Country as string,", "DateTime1 as string,", + "CurrentConversionRate as double", "),", "allowSchemaDrift: true,", + "validateSchema: false) ~> CADSource", "USDCurrency, CADSource union(byName: true)~> Union", + "Union derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn", + "NewCurrencyColumn split(Country == 'USD',", + "Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)", + "ConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink", + "ConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink"))) .apply(); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsDeleteSamples.java index 872c0e7817a2f..a5e2f8daeeb0c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsDeleteSamples.java @@ -9,8 +9,7 @@ */ public final class DataFlowsDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Delete.json */ /** * Sample code: DataFlows_Delete. @@ -18,7 +17,8 @@ public final class DataFlowsDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void dataFlowsDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlows().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", - com.azure.core.util.Context.NONE); + manager.dataFlows() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsGetSamples.java index a9f599a180828..65ffbd08f9b06 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsGetSamples.java @@ -9,8 +9,7 @@ */ public final class DataFlowsGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Get.json */ /** * Sample code: DataFlows_Get. @@ -18,7 +17,8 @@ public final class DataFlowsGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void dataFlowsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlows().getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", null, - com.azure.core.util.Context.NONE); + manager.dataFlows() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", null, + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsListByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsListByFactorySamples.java index d24e4be5c69f8..1a0ce1260ef20 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsListByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DataFlowsListByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class DataFlowsListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DataFlows_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_ListByFactory.json */ /** * Sample code: DataFlows_ListByFactory. @@ -18,7 +17,7 @@ public final class DataFlowsListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void dataFlowsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.dataFlows().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.dataFlows() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsCreateOrUpdateSamples.java index b81666c651930..64ceb6ee0cc82 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsCreateOrUpdateSamples.java @@ -21,8 +21,7 @@ */ public final class DatasetsCreateOrUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Create.json */ /** * Sample code: Datasets_Create. @@ -31,24 +30,25 @@ public final class DatasetsCreateOrUpdateSamples { */ public static void datasetsCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - manager.datasets().define("exampleDataset").withExistingFactory("exampleResourceGroup", "exampleFactoryName") + manager.datasets() + .define("exampleDataset") + .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withProperties(new AzureBlobDataset() .withLinkedServiceName(new LinkedServiceReference().withReferenceName("exampleLinkedService")) .withParameters(mapOf("MyFileName", new ParameterSpecification().withType(ParameterType.STRING), "MyFolderPath", new ParameterSpecification().withType(ParameterType.STRING))) - .withFolderPath(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@dataset().MyFolderPath\"}", Object.class, - SerializerEncoding.JSON)) - .withFileName(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@dataset().MyFileName\"}", Object.class, - SerializerEncoding.JSON)) + .withFolderPath(SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFolderPath\"}", Object.class, + SerializerEncoding.JSON)) + .withFileName(SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFileName\"}", Object.class, + SerializerEncoding.JSON)) .withFormat(new TextFormat())) .create(); } /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Update.json */ /** * Sample code: Datasets_Update. @@ -57,18 +57,23 @@ public static void datasetsCreate(com.azure.resourcemanager.datafactory.DataFact */ public static void datasetsUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - DatasetResource resource = manager.datasets().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleDataset", null, com.azure.core.util.Context.NONE).getValue(); - resource.update().withProperties(new AzureBlobDataset().withDescription("Example description") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("exampleLinkedService")) - .withParameters(mapOf("MyFileName", new ParameterSpecification().withType(ParameterType.STRING), - "MyFolderPath", new ParameterSpecification().withType(ParameterType.STRING))) - .withFolderPath(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@dataset().MyFolderPath\"}", Object.class, - SerializerEncoding.JSON)) - .withFileName(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@dataset().MyFileName\"}", Object.class, SerializerEncoding.JSON)) - .withFormat(new TextFormat())).apply(); + DatasetResource resource = manager.datasets() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", null, + com.azure.core.util.Context.NONE) + .getValue(); + resource.update() + .withProperties(new AzureBlobDataset().withDescription("Example description") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("exampleLinkedService")) + .withParameters(mapOf("MyFileName", new ParameterSpecification().withType(ParameterType.STRING), + "MyFolderPath", new ParameterSpecification().withType(ParameterType.STRING))) + .withFolderPath(SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFolderPath\"}", Object.class, + SerializerEncoding.JSON)) + .withFileName(SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFileName\"}", Object.class, + SerializerEncoding.JSON)) + .withFormat(new TextFormat())) + .apply(); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsDeleteSamples.java index db1f4b86aaeda..0b5e9503d90d3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsDeleteSamples.java @@ -9,8 +9,7 @@ */ public final class DatasetsDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Delete.json */ /** * Sample code: Datasets_Delete. @@ -18,7 +17,8 @@ public final class DatasetsDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void datasetsDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.datasets().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", - com.azure.core.util.Context.NONE); + manager.datasets() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsGetSamples.java index 887ecbd857a43..53fd0dc7dee7c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsGetSamples.java @@ -9,8 +9,7 @@ */ public final class DatasetsGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Get.json */ /** * Sample code: Datasets_Get. @@ -18,7 +17,8 @@ public final class DatasetsGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void datasetsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.datasets().getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", null, - com.azure.core.util.Context.NONE); + manager.datasets() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", null, + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsListByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsListByFactorySamples.java index 739944c49d029..f2b7f1e497f80 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsListByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/DatasetsListByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class DatasetsListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Datasets_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_ListByFactory.json */ /** * Sample code: Datasets_ListByFactory. @@ -18,7 +17,7 @@ public final class DatasetsListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void datasetsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.datasets().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.datasets() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ExposureControlGetFeatureValueByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ExposureControlGetFeatureValueByFactorySamples.java index ee117d8122c01..0d2a01244d9ab 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ExposureControlGetFeatureValueByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ExposureControlGetFeatureValueByFactorySamples.java @@ -11,8 +11,7 @@ */ public final class ExposureControlGetFeatureValueByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ExposureControl_GetFeatureValueByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ExposureControl_GetFeatureValueByFactory.json */ /** * Sample code: ExposureControl_GetFeatureValueByFactory. @@ -21,8 +20,10 @@ public final class ExposureControlGetFeatureValueByFactorySamples { */ public static void exposureControlGetFeatureValueByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.exposureControls().getFeatureValueByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", - new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac").withFeatureType("Feature"), - com.azure.core.util.Context.NONE); + manager.exposureControls() + .getFeatureValueByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", + new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac") + .withFeatureType("Feature"), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ExposureControlGetFeatureValueSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ExposureControlGetFeatureValueSamples.java index ad6cac2142f67..fd363a1173433 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ExposureControlGetFeatureValueSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ExposureControlGetFeatureValueSamples.java @@ -11,8 +11,7 @@ */ public final class ExposureControlGetFeatureValueSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ExposureControl_GetFeatureValue.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ExposureControl_GetFeatureValue.json */ /** * Sample code: ExposureControl_GetFeatureValue. @@ -21,8 +20,10 @@ public final class ExposureControlGetFeatureValueSamples { */ public static void exposureControlGetFeatureValue(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.exposureControls().getFeatureValueWithResponse("WestEurope", - new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac").withFeatureType("Feature"), - com.azure.core.util.Context.NONE); + manager.exposureControls() + .getFeatureValueWithResponse("WestEurope", + new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac") + .withFeatureType("Feature"), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ExposureControlQueryFeatureValuesByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ExposureControlQueryFeatureValuesByFactorySamples.java index f6f6b2807ac72..326c2f21f8c43 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ExposureControlQueryFeatureValuesByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ExposureControlQueryFeatureValuesByFactorySamples.java @@ -13,8 +13,7 @@ */ public final class ExposureControlQueryFeatureValuesByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ExposureControl_QueryFeatureValuesByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ExposureControl_QueryFeatureValuesByFactory.json */ /** * Sample code: ExposureControl_QueryFeatureValuesByFactory. @@ -23,11 +22,12 @@ public final class ExposureControlQueryFeatureValuesByFactorySamples { */ public static void exposureControlQueryFeatureValuesByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.exposureControls().queryFeatureValuesByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", - new ExposureControlBatchRequest().withExposureControlRequests(Arrays.asList( - new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac") - .withFeatureType("Feature"), - new ExposureControlRequest().withFeatureName("ADFSampleFeature").withFeatureType("Feature"))), - com.azure.core.util.Context.NONE); + manager.exposureControls() + .queryFeatureValuesByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", + new ExposureControlBatchRequest().withExposureControlRequests(Arrays.asList( + new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac") + .withFeatureType("Feature"), + new ExposureControlRequest().withFeatureName("ADFSampleFeature").withFeatureType("Feature"))), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesConfigureFactoryRepoSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesConfigureFactoryRepoSamples.java index e57f4515e55d5..d4c3340bebaa2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesConfigureFactoryRepoSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesConfigureFactoryRepoSamples.java @@ -12,8 +12,7 @@ */ public final class FactoriesConfigureFactoryRepoSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Factories_ConfigureFactoryRepo.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_ConfigureFactoryRepo.json */ /** * Sample code: Factories_ConfigureFactoryRepo. @@ -21,11 +20,16 @@ public final class FactoriesConfigureFactoryRepoSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesConfigureFactoryRepo(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.factories().configureFactoryRepoWithResponse("East US", new FactoryRepoUpdate().withFactoryResourceId( - "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName") - .withRepoConfiguration(new FactoryVstsConfiguration().withAccountName("ADF").withRepositoryName("repo") - .withCollaborationBranch("master").withRootFolder("/").withLastCommitId("").withProjectName("project") - .withTenantId("")), - com.azure.core.util.Context.NONE); + manager.factories() + .configureFactoryRepoWithResponse("East US", new FactoryRepoUpdate().withFactoryResourceId( + "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName") + .withRepoConfiguration(new FactoryVstsConfiguration().withAccountName("ADF") + .withRepositoryName("repo") + .withCollaborationBranch("master") + .withRootFolder("/") + .withLastCommitId("") + .withProjectName("project") + .withTenantId("")), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesCreateOrUpdateSamples.java index d691bcd886970..44c7e4b8d0ec9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesCreateOrUpdateSamples.java @@ -9,8 +9,7 @@ */ public final class FactoriesCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Factories_CreateOrUpdate.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_CreateOrUpdate.json */ /** * Sample code: Factories_CreateOrUpdate. @@ -18,7 +17,10 @@ public final class FactoriesCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesCreateOrUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.factories().define("exampleFactoryName").withRegion("East US") - .withExistingResourceGroup("exampleResourceGroup").create(); + manager.factories() + .define("exampleFactoryName") + .withRegion("East US") + .withExistingResourceGroup("exampleResourceGroup") + .create(); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesDeleteSamples.java index 20547e16aff17..98044f11f8116 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesDeleteSamples.java @@ -9,8 +9,7 @@ */ public final class FactoriesDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Delete.json */ /** * Sample code: Factories_Delete. @@ -18,7 +17,8 @@ public final class FactoriesDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.factories().deleteByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.factories() + .deleteByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesGetByResourceGroupSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesGetByResourceGroupSamples.java index 7a2a35dfd5ef7..e5d2a819bad43 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesGetByResourceGroupSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesGetByResourceGroupSamples.java @@ -9,8 +9,7 @@ */ public final class FactoriesGetByResourceGroupSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Get.json */ /** * Sample code: Factories_Get. @@ -18,7 +17,8 @@ public final class FactoriesGetByResourceGroupSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.factories().getByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", null, - com.azure.core.util.Context.NONE); + manager.factories() + .getByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", null, + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesGetDataPlaneAccessSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesGetDataPlaneAccessSamples.java index 8a33bff102004..ea2e0eb8c946c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesGetDataPlaneAccessSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesGetDataPlaneAccessSamples.java @@ -11,8 +11,7 @@ */ public final class FactoriesGetDataPlaneAccessSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Factories_GetDataPlaneAccess.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_GetDataPlaneAccess.json */ /** * Sample code: Factories_GetDataPlaneAccess. @@ -20,9 +19,13 @@ public final class FactoriesGetDataPlaneAccessSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesGetDataPlaneAccess(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.factories().getDataPlaneAccessWithResponse("exampleResourceGroup", "exampleFactoryName", - new UserAccessPolicy().withPermissions("r").withAccessResourcePath("").withProfileName("DefaultProfile") - .withStartTime("2018-11-10T02:46:20.2659347Z").withExpireTime("2018-11-10T09:46:20.2659347Z"), - com.azure.core.util.Context.NONE); + manager.factories() + .getDataPlaneAccessWithResponse("exampleResourceGroup", "exampleFactoryName", + new UserAccessPolicy().withPermissions("r") + .withAccessResourcePath("") + .withProfileName("DefaultProfile") + .withStartTime("2018-11-10T02:46:20.2659347Z") + .withExpireTime("2018-11-10T09:46:20.2659347Z"), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesGetGitHubAccessTokenSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesGetGitHubAccessTokenSamples.java index 855ddcda4cca1..5a98dbc3592aa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesGetGitHubAccessTokenSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesGetGitHubAccessTokenSamples.java @@ -11,8 +11,7 @@ */ public final class FactoriesGetGitHubAccessTokenSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Factories_GetGitHubAccessToken.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_GetGitHubAccessToken.json */ /** * Sample code: Factories_GetGitHubAccessToken. @@ -20,9 +19,11 @@ public final class FactoriesGetGitHubAccessTokenSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesGetGitHubAccessToken(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.factories().getGitHubAccessTokenWithResponse("exampleResourceGroup", "exampleFactoryName", - new GitHubAccessTokenRequest().withGitHubAccessCode("fakeTokenPlaceholder").withGitHubClientId("some") - .withGitHubAccessTokenBaseUrl("fakeTokenPlaceholder"), - com.azure.core.util.Context.NONE); + manager.factories() + .getGitHubAccessTokenWithResponse("exampleResourceGroup", "exampleFactoryName", + new GitHubAccessTokenRequest().withGitHubAccessCode("fakeTokenPlaceholder") + .withGitHubClientId("some") + .withGitHubAccessTokenBaseUrl("fakeTokenPlaceholder"), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesListByResourceGroupSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesListByResourceGroupSamples.java index 0b58377e193f0..77d34b0288eb9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesListByResourceGroupSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesListByResourceGroupSamples.java @@ -9,8 +9,7 @@ */ public final class FactoriesListByResourceGroupSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Factories_ListByResourceGroup.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_ListByResourceGroup.json */ /** * Sample code: Factories_ListByResourceGroup. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesListSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesListSamples.java index 4475f001b740f..ecad9caa4b566 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesListSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesListSamples.java @@ -9,8 +9,7 @@ */ public final class FactoriesListSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_List.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_List.json */ /** * Sample code: Factories_List. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesUpdateSamples.java index 4c6ed6cca04ad..c59abd015e130 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/FactoriesUpdateSamples.java @@ -13,8 +13,7 @@ */ public final class FactoriesUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Update.json */ /** * Sample code: Factories_Update. @@ -22,8 +21,10 @@ public final class FactoriesUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void factoriesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - Factory resource = manager.factories().getByResourceGroupWithResponse("exampleResourceGroup", - "exampleFactoryName", null, com.azure.core.util.Context.NONE).getValue(); + Factory resource = manager.factories() + .getByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", null, + com.azure.core.util.Context.NONE) + .getValue(); resource.update().withTags(mapOf("exampleTag", "exampleValue")).apply(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersCreateOrUpdateSamples.java index 17025c7882950..63300b1fd4704 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersCreateOrUpdateSamples.java @@ -13,8 +13,7 @@ */ public final class GlobalParametersCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * GlobalParameters_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Create.json */ /** * Sample code: GlobalParameters_Create. @@ -22,13 +21,15 @@ public final class GlobalParametersCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void globalParametersCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.globalParameters().define("default").withExistingFactory("exampleResourceGroup", "exampleFactoryName") - .withProperties((Map) null).create(); + manager.globalParameters() + .define("default") + .withExistingFactory("exampleResourceGroup", "exampleFactoryName") + .withProperties((Map) null) + .create(); } /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * GlobalParameters_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Update.json */ /** * Sample code: GlobalParameters_Update. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersDeleteSamples.java index 8b26117a454cf..f03140a985473 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersDeleteSamples.java @@ -9,8 +9,7 @@ */ public final class GlobalParametersDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * GlobalParameters_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Delete.json */ /** * Sample code: GlobalParameters_Delete. @@ -18,7 +17,8 @@ public final class GlobalParametersDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void globalParametersDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.globalParameters().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "default", - com.azure.core.util.Context.NONE); + manager.globalParameters() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "default", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersGetSamples.java index f76cc9f13819a..a043ebedd95a2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersGetSamples.java @@ -9,9 +9,7 @@ */ public final class GlobalParametersGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Get. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Get.json */ /** * Sample code: GlobalParameters_Get. @@ -19,7 +17,7 @@ public final class GlobalParametersGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void globalParametersGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.globalParameters().getWithResponse("exampleResourceGroup", "exampleFactoryName", "default", - com.azure.core.util.Context.NONE); + manager.globalParameters() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "default", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersListByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersListByFactorySamples.java index 219c25286a8fd..5fd6b2a01936d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersListByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersListByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class GlobalParametersListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * GlobalParameters_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_ListByFactory.json */ /** * Sample code: GlobalParameters_ListByFactory. @@ -18,7 +17,7 @@ public final class GlobalParametersListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void globalParametersListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.globalParameters().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.globalParameters() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesDeleteSamples.java index 101568b1caae6..175265bc78c69 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesDeleteSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimeNodesDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimeNodes_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_Delete.json */ /** * Sample code: IntegrationRuntimesNodes_Delete. @@ -19,7 +18,8 @@ public final class IntegrationRuntimeNodesDeleteSamples { */ public static void integrationRuntimesNodesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimeNodes().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", "Node_1", com.azure.core.util.Context.NONE); + manager.integrationRuntimeNodes() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetIpAddressSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetIpAddressSamples.java index 6ea79d517d5fb..156ae5302d1c3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetIpAddressSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetIpAddressSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimeNodesGetIpAddressSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimeNodes_GetIpAddress.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_GetIpAddress.json */ /** * Sample code: IntegrationRuntimeNodes_GetIpAddress. @@ -19,7 +18,8 @@ public final class IntegrationRuntimeNodesGetIpAddressSamples { */ public static void integrationRuntimeNodesGetIpAddress(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimeNodes().getIpAddressWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", "Node_1", com.azure.core.util.Context.NONE); + manager.integrationRuntimeNodes() + .getIpAddressWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + "Node_1", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetSamples.java index a229e9559fa1d..d2c5ebc22212e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimeNodesGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimeNodes_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_Get.json */ /** * Sample code: IntegrationRuntimeNodes_Get. @@ -18,7 +17,8 @@ public final class IntegrationRuntimeNodesGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimeNodesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimeNodes().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", "Node_1", com.azure.core.util.Context.NONE); + manager.integrationRuntimeNodes() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesUpdateSamples.java index 20e9a3ee86f3c..d4f0f8fe11233 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesUpdateSamples.java @@ -11,8 +11,7 @@ */ public final class IntegrationRuntimeNodesUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimeNodes_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_Update.json */ /** * Sample code: IntegrationRuntimeNodes_Update. @@ -20,8 +19,8 @@ public final class IntegrationRuntimeNodesUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimeNodesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimeNodes().updateWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", "Node_1", new UpdateIntegrationRuntimeNodeRequest().withConcurrentJobsLimit(2), - com.azure.core.util.Context.NONE); + manager.integrationRuntimeNodes() + .updateWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1", + new UpdateIntegrationRuntimeNodeRequest().withConcurrentJobsLimit(2), com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadataGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadataGetSamples.java index 158e09fa27d7c..55d6826f1c98a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadataGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadataGetSamples.java @@ -11,8 +11,7 @@ */ public final class IntegrationRuntimeObjectMetadataGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimeObjectMetadata_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeObjectMetadata_Get.json */ /** * Sample code: IntegrationRuntimeObjectMetadata_Get. @@ -21,8 +20,8 @@ public final class IntegrationRuntimeObjectMetadataGetSamples { */ public static void integrationRuntimeObjectMetadataGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimeObjectMetadatas().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "testactivityv2", new GetSsisObjectMetadataRequest().withMetadataPath("ssisFolders"), - com.azure.core.util.Context.NONE); + manager.integrationRuntimeObjectMetadatas() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "testactivityv2", + new GetSsisObjectMetadataRequest().withMetadataPath("ssisFolders"), com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadataRefreshSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadataRefreshSamples.java index cd8695c37dd64..8f38e5267377c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadataRefreshSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadataRefreshSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimeObjectMetadataRefreshSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimeObjectMetadata_Refresh.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeObjectMetadata_Refresh.json */ /** * Sample code: IntegrationRuntimeObjectMetadata_Refresh. @@ -19,7 +18,7 @@ public final class IntegrationRuntimeObjectMetadataRefreshSamples { */ public static void integrationRuntimeObjectMetadataRefresh(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimeObjectMetadatas().refresh("exampleResourceGroup", "exampleFactoryName", - "testactivityv2", com.azure.core.util.Context.NONE); + manager.integrationRuntimeObjectMetadatas() + .refresh("exampleResourceGroup", "exampleFactoryName", "testactivityv2", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateLinkedIntegrationRuntimeSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateLinkedIntegrationRuntimeSamples.java index 0977068680348..dcf61c1da43f6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateLinkedIntegrationRuntimeSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateLinkedIntegrationRuntimeSamples.java @@ -11,8 +11,7 @@ */ public final class IntegrationRuntimesCreateLinkedIntegrationRuntimeSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_CreateLinkedIntegrationRuntime.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_CreateLinkedIntegrationRuntime.json */ /** * Sample code: IntegrationRuntimes_CreateLinkedIntegrationRuntime. @@ -21,11 +20,13 @@ public final class IntegrationRuntimesCreateLinkedIntegrationRuntimeSamples { */ public static void integrationRuntimesCreateLinkedIntegrationRuntime( com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().createLinkedIntegrationRuntimeWithResponse("exampleResourceGroup", - "exampleFactoryName", "exampleIntegrationRuntime", - new CreateLinkedIntegrationRuntimeRequest().withName("bfa92911-9fb6-4fbe-8f23-beae87bc1c83") - .withSubscriptionId("061774c7-4b5a-4159-a55b-365581830283") - .withDataFactoryName("e9955d6d-56ea-4be3-841c-52a12c1a9981").withDataFactoryLocation("West US"), - com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .createLinkedIntegrationRuntimeWithResponse("exampleResourceGroup", "exampleFactoryName", + "exampleIntegrationRuntime", + new CreateLinkedIntegrationRuntimeRequest().withName("bfa92911-9fb6-4fbe-8f23-beae87bc1c83") + .withSubscriptionId("061774c7-4b5a-4159-a55b-365581830283") + .withDataFactoryName("e9955d6d-56ea-4be3-841c-52a12c1a9981") + .withDataFactoryLocation("West US"), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateOrUpdateSamples.java index 5a835528b2d2a..2bfcc9eebb7c1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateOrUpdateSamples.java @@ -11,8 +11,7 @@ */ public final class IntegrationRuntimesCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Create.json */ /** * Sample code: IntegrationRuntimes_Create. @@ -20,7 +19,8 @@ public final class IntegrationRuntimesCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().define("exampleIntegrationRuntime") + manager.integrationRuntimes() + .define("exampleIntegrationRuntime") .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withProperties(new SelfHostedIntegrationRuntime().withDescription("A selfhosted integration runtime")) .create(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesDeleteSamples.java index 4e6539aeaa855..d8a4448d4934c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesDeleteSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimesDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Delete.json */ /** * Sample code: IntegrationRuntimes_Delete. @@ -18,7 +17,8 @@ public final class IntegrationRuntimesDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetConnectionInfoSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetConnectionInfoSamples.java index 12b167e5b9710..86b5267f5c3c2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetConnectionInfoSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetConnectionInfoSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimesGetConnectionInfoSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_GetConnectionInfo.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_GetConnectionInfo.json */ /** * Sample code: IntegrationRuntimes_GetConnectionInfo. @@ -19,7 +18,8 @@ public final class IntegrationRuntimesGetConnectionInfoSamples { */ public static void integrationRuntimesGetConnectionInfo(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().getConnectionInfoWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .getConnectionInfoWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetMonitoringDataSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetMonitoringDataSamples.java index ddafd1de7c905..9e2ce1c67e432 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetMonitoringDataSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetMonitoringDataSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimesGetMonitoringDataSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_GetMonitoringData.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_GetMonitoringData.json */ /** * Sample code: IntegrationRuntimes_GetMonitoringData. @@ -19,7 +18,8 @@ public final class IntegrationRuntimesGetMonitoringDataSamples { */ public static void integrationRuntimesGetMonitoringData(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().getMonitoringDataWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .getMonitoringDataWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetSamples.java index 6889c1b897bd1..32ae630514093 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimesGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Get.json */ /** * Sample code: IntegrationRuntimes_Get. @@ -18,7 +17,8 @@ public final class IntegrationRuntimesGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", null, com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", null, + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetStatusSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetStatusSamples.java index d1e81fa8403ce..571f697710a3f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetStatusSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetStatusSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimesGetStatusSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_GetStatus.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_GetStatus.json */ /** * Sample code: IntegrationRuntimes_GetStatus. @@ -18,7 +17,8 @@ public final class IntegrationRuntimesGetStatusSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesGetStatus(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().getStatusWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .getStatusWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListAuthKeysSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListAuthKeysSamples.java index a9f4d6a12534b..48068386978d3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListAuthKeysSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListAuthKeysSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimesListAuthKeysSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_ListAuthKeys.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_ListAuthKeys.json */ /** * Sample code: IntegrationRuntimes_ListAuthKeys. @@ -19,7 +18,8 @@ public final class IntegrationRuntimesListAuthKeysSamples { */ public static void integrationRuntimesListAuthKeys(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().listAuthKeysWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .listAuthKeysWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListByFactorySamples.java index 4be80c9eb6930..78e35962d8434 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimesListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_ListByFactory.json */ /** * Sample code: IntegrationRuntimes_ListByFactory. @@ -19,7 +18,7 @@ public final class IntegrationRuntimesListByFactorySamples { */ public static void integrationRuntimesListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListOutboundNetworkDependenciesEndpointsSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListOutboundNetworkDependenciesEndpointsSamples.java index 14b7f0af0602a..999c5fc2c0433 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListOutboundNetworkDependenciesEndpointsSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListOutboundNetworkDependenciesEndpointsSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimesListOutboundNetworkDependenciesEndpointsSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_ListOutboundNetworkDependenciesEndpoints.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_ListOutboundNetworkDependenciesEndpoints.json */ /** * Sample code: IntegrationRuntimes_OutboundNetworkDependenciesEndpoints. @@ -19,7 +18,8 @@ public final class IntegrationRuntimesListOutboundNetworkDependenciesEndpointsSa */ public static void integrationRuntimesOutboundNetworkDependenciesEndpoints( com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().listOutboundNetworkDependenciesEndpointsWithResponse("exampleResourceGroup", - "exampleFactoryName", "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .listOutboundNetworkDependenciesEndpointsWithResponse("exampleResourceGroup", "exampleFactoryName", + "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRegenerateAuthKeySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRegenerateAuthKeySamples.java index 7adfe6a2b32a4..033f4dfdc979a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRegenerateAuthKeySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRegenerateAuthKeySamples.java @@ -12,8 +12,7 @@ */ public final class IntegrationRuntimesRegenerateAuthKeySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_RegenerateAuthKey.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_RegenerateAuthKey.json */ /** * Sample code: IntegrationRuntimes_RegenerateAuthKey. @@ -22,9 +21,9 @@ public final class IntegrationRuntimesRegenerateAuthKeySamples { */ public static void integrationRuntimesRegenerateAuthKey(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().regenerateAuthKeyWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", - new IntegrationRuntimeRegenerateKeyParameters().withKeyName(IntegrationRuntimeAuthKeyName.AUTH_KEY2), - com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .regenerateAuthKeyWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + new IntegrationRuntimeRegenerateKeyParameters().withKeyName(IntegrationRuntimeAuthKeyName.AUTH_KEY2), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRemoveLinksSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRemoveLinksSamples.java index f37924edc3453..d485d020a7da3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRemoveLinksSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRemoveLinksSamples.java @@ -11,8 +11,7 @@ */ public final class IntegrationRuntimesRemoveLinksSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_RemoveLinks.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_RemoveLinks.json */ /** * Sample code: IntegrationRuntimes_Upgrade. @@ -20,9 +19,9 @@ public final class IntegrationRuntimesRemoveLinksSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesUpgrade(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().removeLinksWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", - new LinkedIntegrationRuntimeRequest().withLinkedFactoryName("exampleFactoryName-linked"), - com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .removeLinksWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + new LinkedIntegrationRuntimeRequest().withLinkedFactoryName("exampleFactoryName-linked"), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStartSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStartSamples.java index 5cfd6eb6dd39e..35f9eeb3509d6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStartSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStartSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimesStartSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Start.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Start.json */ /** * Sample code: IntegrationRuntimes_Start. @@ -18,7 +17,8 @@ public final class IntegrationRuntimesStartSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().start("exampleResourceGroup", "exampleFactoryName", - "exampleManagedIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .start("exampleResourceGroup", "exampleFactoryName", "exampleManagedIntegrationRuntime", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStopSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStopSamples.java index 8d81e941c9a06..bd15fb9fb472d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStopSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStopSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimesStopSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Stop.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Stop.json */ /** * Sample code: IntegrationRuntimes_Stop. @@ -18,7 +17,8 @@ public final class IntegrationRuntimesStopSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesStop(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().stop("exampleResourceGroup", "exampleFactoryName", - "exampleManagedIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .stop("exampleResourceGroup", "exampleFactoryName", "exampleManagedIntegrationRuntime", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesSyncCredentialsSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesSyncCredentialsSamples.java index 3970b751292d4..0a1a17c8d08cc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesSyncCredentialsSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesSyncCredentialsSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimesSyncCredentialsSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_SyncCredentials.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_SyncCredentials.json */ /** * Sample code: IntegrationRuntimes_SyncCredentials. @@ -19,7 +18,8 @@ public final class IntegrationRuntimesSyncCredentialsSamples { */ public static void integrationRuntimesSyncCredentials(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().syncCredentialsWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .syncCredentialsWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpdateSamples.java index 25ea920b1f72a..4d098374f75c5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpdateSamples.java @@ -12,8 +12,7 @@ */ public final class IntegrationRuntimesUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Update.json */ /** * Sample code: IntegrationRuntimes_Update. @@ -21,8 +20,10 @@ public final class IntegrationRuntimesUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - IntegrationRuntimeResource resource = manager.integrationRuntimes().getWithResponse("exampleResourceGroup", - "exampleFactoryName", "exampleIntegrationRuntime", null, com.azure.core.util.Context.NONE).getValue(); + IntegrationRuntimeResource resource = manager.integrationRuntimes() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", null, + com.azure.core.util.Context.NONE) + .getValue(); resource.update().withAutoUpdate(IntegrationRuntimeAutoUpdate.OFF).withUpdateDelayOffset("\"PT3H\"").apply(); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpgradeSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpgradeSamples.java index 3705f50eeda3a..869b47a3900c0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpgradeSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpgradeSamples.java @@ -9,8 +9,7 @@ */ public final class IntegrationRuntimesUpgradeSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * IntegrationRuntimes_Upgrade.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Upgrade.json */ /** * Sample code: IntegrationRuntimes_Upgrade. @@ -18,7 +17,8 @@ public final class IntegrationRuntimesUpgradeSamples { * @param manager Entry point to DataFactoryManager. */ public static void integrationRuntimesUpgrade(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.integrationRuntimes().upgradeWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleIntegrationRuntime", com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .upgradeWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesCreateOrUpdateSamples.java index 1bbab8b01bceb..cdebbaa619c26 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesCreateOrUpdateSamples.java @@ -15,9 +15,7 @@ */ public final class LinkedServicesCreateOrUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Create - * .json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Create.json */ /** * Sample code: LinkedServices_Create. @@ -26,19 +24,19 @@ public final class LinkedServicesCreateOrUpdateSamples { */ public static void linkedServicesCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - manager.linkedServices().define("exampleLinkedService") + manager.linkedServices() + .define("exampleLinkedService") .withExistingFactory("exampleResourceGroup", "exampleFactoryName") - .withProperties(new AzureStorageLinkedService() - .withConnectionString(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( + .withProperties(new AzureStorageLinkedService().withConnectionString(SerializerFactory + .createDefaultManagementSerializerAdapter() + .deserialize( "{\"type\":\"SecureString\",\"value\":\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=\"}", Object.class, SerializerEncoding.JSON))) .create(); } /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Update - * .json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Update.json */ /** * Sample code: LinkedServices_Update. @@ -47,12 +45,16 @@ public static void linkedServicesCreate(com.azure.resourcemanager.datafactory.Da */ public static void linkedServicesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - LinkedServiceResource resource = manager.linkedServices().getWithResponse("exampleResourceGroup", - "exampleFactoryName", "exampleLinkedService", null, com.azure.core.util.Context.NONE).getValue(); - resource.update().withProperties(new AzureStorageLinkedService().withDescription("Example description") - .withConnectionString(SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"SecureString\",\"value\":\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=\"}", - Object.class, SerializerEncoding.JSON))) + LinkedServiceResource resource = manager.linkedServices() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", null, + com.azure.core.util.Context.NONE) + .getValue(); + resource.update() + .withProperties(new AzureStorageLinkedService().withDescription("Example description") + .withConnectionString(SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize( + "{\"type\":\"SecureString\",\"value\":\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=\"}", + Object.class, SerializerEncoding.JSON))) .apply(); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesDeleteSamples.java index 8bd41b296556c..47295b94e913e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesDeleteSamples.java @@ -9,9 +9,7 @@ */ public final class LinkedServicesDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Delete - * .json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Delete.json */ /** * Sample code: LinkedServices_Delete. @@ -19,7 +17,8 @@ public final class LinkedServicesDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void linkedServicesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.linkedServices().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleLinkedService", com.azure.core.util.Context.NONE); + manager.linkedServices() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesGetSamples.java index f34b6d95cfdfb..a3a62cbc60be9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesGetSamples.java @@ -9,9 +9,7 @@ */ public final class LinkedServicesGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Get. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Get.json */ /** * Sample code: LinkedServices_Get. @@ -19,7 +17,8 @@ public final class LinkedServicesGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void linkedServicesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.linkedServices().getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", - null, com.azure.core.util.Context.NONE); + manager.linkedServices() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", null, + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesListByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesListByFactorySamples.java index 00e76e2e37493..9320163b269dd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesListByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesListByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class LinkedServicesListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * LinkedServices_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_ListByFactory.json */ /** * Sample code: LinkedServices_ListByFactory. @@ -18,7 +17,7 @@ public final class LinkedServicesListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void linkedServicesListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.linkedServices().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.linkedServices() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsCreateOrUpdateSamples.java index 6cfa7d76ec68d..7306c7f57bdfc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsCreateOrUpdateSamples.java @@ -14,8 +14,7 @@ */ public final class ManagedPrivateEndpointsCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedPrivateEndpoints_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_Create.json */ /** * Sample code: ManagedVirtualNetworks_Create. @@ -23,10 +22,12 @@ public final class ManagedPrivateEndpointsCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void managedVirtualNetworksCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedPrivateEndpoints().define("exampleManagedPrivateEndpointName") + manager.managedPrivateEndpoints() + .define("exampleManagedPrivateEndpointName") .withExistingManagedVirtualNetwork("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName") - .withProperties(new ManagedPrivateEndpoint().withFqdns(Arrays.asList()).withGroupId("blob") + .withProperties(new ManagedPrivateEndpoint().withFqdns(Arrays.asList()) + .withGroupId("blob") .withPrivateLinkResourceId( "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.Storage/storageAccounts/exampleBlobStorage") .withAdditionalProperties(mapOf())) diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsDeleteSamples.java index 53f92131fadb7..ab2c90a5f0427 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsDeleteSamples.java @@ -9,8 +9,7 @@ */ public final class ManagedPrivateEndpointsDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedPrivateEndpoints_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_Delete.json */ /** * Sample code: ManagedVirtualNetworks_Delete. @@ -18,7 +17,8 @@ public final class ManagedPrivateEndpointsDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void managedVirtualNetworksDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedPrivateEndpoints().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleManagedVirtualNetworkName", "exampleManagedPrivateEndpointName", com.azure.core.util.Context.NONE); + manager.managedPrivateEndpoints() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", + "exampleManagedPrivateEndpointName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsGetSamples.java index 3b0bd35d67836..31b1890aa6aca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsGetSamples.java @@ -9,8 +9,7 @@ */ public final class ManagedPrivateEndpointsGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedPrivateEndpoints_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_Get.json */ /** * Sample code: ManagedPrivateEndpoints_Get. @@ -18,8 +17,8 @@ public final class ManagedPrivateEndpointsGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void managedPrivateEndpointsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedPrivateEndpoints().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleManagedVirtualNetworkName", "exampleManagedPrivateEndpointName", null, - com.azure.core.util.Context.NONE); + manager.managedPrivateEndpoints() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", + "exampleManagedPrivateEndpointName", null, com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsListByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsListByFactorySamples.java index 678b8aa4e6986..64afa500c35a6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsListByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsListByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class ManagedPrivateEndpointsListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedPrivateEndpoints_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_ListByFactory.json */ /** * Sample code: ManagedPrivateEndpoints_ListByFactory. @@ -19,7 +18,8 @@ public final class ManagedPrivateEndpointsListByFactorySamples { */ public static void managedPrivateEndpointsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedPrivateEndpoints().listByFactory("exampleResourceGroup", "exampleFactoryName", - "exampleManagedVirtualNetworkName", com.azure.core.util.Context.NONE); + manager.managedPrivateEndpoints() + .listByFactory("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksCreateOrUpdateSamples.java index fb47bfc56329e..b2598059eb1f7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksCreateOrUpdateSamples.java @@ -13,8 +13,7 @@ */ public final class ManagedVirtualNetworksCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedVirtualNetworks_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedVirtualNetworks_Create.json */ /** * Sample code: ManagedVirtualNetworks_Create. @@ -22,9 +21,11 @@ public final class ManagedVirtualNetworksCreateOrUpdateSamples { * @param manager Entry point to DataFactoryManager. */ public static void managedVirtualNetworksCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedVirtualNetworks().define("exampleManagedVirtualNetworkName") + manager.managedVirtualNetworks() + .define("exampleManagedVirtualNetworkName") .withExistingFactory("exampleResourceGroup", "exampleFactoryName") - .withProperties(new ManagedVirtualNetwork().withAdditionalProperties(mapOf())).create(); + .withProperties(new ManagedVirtualNetwork().withAdditionalProperties(mapOf())) + .create(); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksGetSamples.java index 18ba58a5c8777..cfa6942410194 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksGetSamples.java @@ -9,8 +9,7 @@ */ public final class ManagedVirtualNetworksGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedVirtualNetworks_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedVirtualNetworks_Get.json */ /** * Sample code: ManagedVirtualNetworks_Get. @@ -18,7 +17,8 @@ public final class ManagedVirtualNetworksGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void managedVirtualNetworksGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedVirtualNetworks().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleManagedVirtualNetworkName", null, com.azure.core.util.Context.NONE); + manager.managedVirtualNetworks() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", null, + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksListByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksListByFactorySamples.java index d857efd520e43..ae6f1362b89fc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksListByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksListByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class ManagedVirtualNetworksListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ManagedVirtualNetworks_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedVirtualNetworks_ListByFactory.json */ /** * Sample code: ManagedVirtualNetworks_ListByFactory. @@ -19,7 +18,7 @@ public final class ManagedVirtualNetworksListByFactorySamples { */ public static void managedVirtualNetworksListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.managedVirtualNetworks().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.managedVirtualNetworks() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/OperationsListSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/OperationsListSamples.java index a89fb1539aeab..9d2e79197593a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/OperationsListSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/OperationsListSamples.java @@ -9,8 +9,7 @@ */ public final class OperationsListSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Operations_List.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Operations_List.json */ /** * Sample code: Operations_List. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsCancelSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsCancelSamples.java index 16555a76c9550..fb28a8b61a6b4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsCancelSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsCancelSamples.java @@ -9,9 +9,7 @@ */ public final class PipelineRunsCancelSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Cancel. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Cancel.json */ /** * Sample code: PipelineRuns_Cancel. @@ -19,7 +17,8 @@ public final class PipelineRunsCancelSamples { * @param manager Entry point to DataFactoryManager. */ public static void pipelineRunsCancel(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.pipelineRuns().cancelWithResponse("exampleResourceGroup", "exampleFactoryName", - "16ac5348-ff82-4f95-a80d-638c1d47b721", null, com.azure.core.util.Context.NONE); + manager.pipelineRuns() + .cancelWithResponse("exampleResourceGroup", "exampleFactoryName", "16ac5348-ff82-4f95-a80d-638c1d47b721", + null, com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsGetSamples.java index bcdff7b01795b..88a5318d333c8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsGetSamples.java @@ -9,8 +9,7 @@ */ public final class PipelineRunsGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Get.json */ /** * Sample code: PipelineRuns_Get. @@ -18,7 +17,8 @@ public final class PipelineRunsGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void pipelineRunsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.pipelineRuns().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", com.azure.core.util.Context.NONE); + manager.pipelineRuns() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsQueryByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsQueryByFactorySamples.java index c9759b3ae076f..542e9315d4cb3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsQueryByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsQueryByFactorySamples.java @@ -16,8 +16,7 @@ */ public final class PipelineRunsQueryByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * PipelineRuns_QueryByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_QueryByFactory.json */ /** * Sample code: PipelineRuns_QueryByFactory. @@ -25,11 +24,13 @@ public final class PipelineRunsQueryByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void pipelineRunsQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.pipelineRuns().queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", - new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z")) - .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")) - .withFilters(Arrays.asList(new RunQueryFilter().withOperand(RunQueryFilterOperand.PIPELINE_NAME) - .withOperator(RunQueryFilterOperator.EQUALS).withValues(Arrays.asList("examplePipeline")))), - com.azure.core.util.Context.NONE); + manager.pipelineRuns() + .queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", + new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z")) + .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")) + .withFilters(Arrays.asList(new RunQueryFilter().withOperand(RunQueryFilterOperand.PIPELINE_NAME) + .withOperator(RunQueryFilterOperator.EQUALS) + .withValues(Arrays.asList("examplePipeline")))), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateOrUpdateSamples.java index 2b6b2f1463340..6d21c0db68b0b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateOrUpdateSamples.java @@ -29,8 +29,7 @@ */ public final class PipelinesCreateOrUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Create.json */ /** * Sample code: Pipelines_Create. @@ -39,35 +38,39 @@ public final class PipelinesCreateOrUpdateSamples { */ public static void pipelinesCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - manager.pipelines().define("examplePipeline").withExistingFactory("exampleResourceGroup", "exampleFactoryName") + manager.pipelines() + .define("examplePipeline") + .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withActivities(Arrays.asList(new ForEachActivity().withName("ExampleForeachActivity") .withIsSequential(true) .withItems(new Expression().withValue("@pipeline().parameters.OutputBlobNameList")) .withActivities(Arrays.asList(new CopyActivity().withName("ExampleCopyActivity") - .withInputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset").withParameters( - mapOf("MyFileName", "examplecontainer.csv", "MyFolderPath", "examplecontainer")))) + .withInputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset") + .withParameters( + mapOf("MyFileName", "examplecontainer.csv", "MyFolderPath", "examplecontainer")))) .withOutputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset") .withParameters(mapOf("MyFileName", - SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@item()\"}", Object.class, - SerializerEncoding.JSON), + SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@item()\"}", Object.class, + SerializerEncoding.JSON), "MyFolderPath", "examplecontainer")))) - .withSource(new BlobSource()).withSink(new BlobSink()).withDataIntegrationUnits(32))))) + .withSource(new BlobSource()) + .withSink(new BlobSink()) + .withDataIntegrationUnits(32))))) .withParameters(mapOf("JobId", new ParameterSpecification().withType(ParameterType.STRING), "OutputBlobNameList", new ParameterSpecification().withType(ParameterType.ARRAY))) .withVariables(mapOf("TestVariableArray", new VariableSpecification().withType(VariableType.ARRAY))) .withRunDimensions(mapOf("JobId", - SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@pipeline().parameters.JobId\"}", Object.class, - SerializerEncoding.JSON))) + SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@pipeline().parameters.JobId\"}", Object.class, + SerializerEncoding.JSON))) .withPolicy(new PipelinePolicy() .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("0.00:10:00"))) .create(); } /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Update.json */ /** * Sample code: Pipelines_Update. @@ -76,20 +79,28 @@ public static void pipelinesCreate(com.azure.resourcemanager.datafactory.DataFac */ public static void pipelinesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - PipelineResource resource = manager.pipelines().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "examplePipeline", null, com.azure.core.util.Context.NONE).getValue(); - resource.update().withDescription("Example description").withActivities(Arrays.asList(new ForEachActivity() - .withName("ExampleForeachActivity").withIsSequential(true) - .withItems(new Expression().withValue("@pipeline().parameters.OutputBlobNameList")) - .withActivities(Arrays.asList(new CopyActivity().withName("ExampleCopyActivity") - .withInputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset") - .withParameters(mapOf("MyFileName", "examplecontainer.csv", "MyFolderPath", "examplecontainer")))) - .withOutputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset") - .withParameters(mapOf("MyFileName", - SerializerFactory.createDefaultManagementSerializerAdapter().deserialize( - "{\"type\":\"Expression\",\"value\":\"@item()\"}", Object.class, SerializerEncoding.JSON), - "MyFolderPath", "examplecontainer")))) - .withSource(new BlobSource()).withSink(new BlobSink()).withDataIntegrationUnits(32))))) + PipelineResource resource = manager.pipelines() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null, + com.azure.core.util.Context.NONE) + .getValue(); + resource.update() + .withDescription("Example description") + .withActivities(Arrays.asList(new ForEachActivity().withName("ExampleForeachActivity") + .withIsSequential(true) + .withItems(new Expression().withValue("@pipeline().parameters.OutputBlobNameList")) + .withActivities(Arrays.asList(new CopyActivity().withName("ExampleCopyActivity") + .withInputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset") + .withParameters( + mapOf("MyFileName", "examplecontainer.csv", "MyFolderPath", "examplecontainer")))) + .withOutputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset") + .withParameters(mapOf("MyFileName", + SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("{\"type\":\"Expression\",\"value\":\"@item()\"}", Object.class, + SerializerEncoding.JSON), + "MyFolderPath", "examplecontainer")))) + .withSource(new BlobSource()) + .withSink(new BlobSink()) + .withDataIntegrationUnits(32))))) .withParameters(mapOf("OutputBlobNameList", new ParameterSpecification().withType(ParameterType.ARRAY))) .withPolicy(new PipelinePolicy() .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("0.00:10:00"))) diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateRunSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateRunSamples.java index b2c02f4a8b88f..096b1d13ab390 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateRunSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateRunSamples.java @@ -15,9 +15,7 @@ */ public final class PipelinesCreateRunSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_CreateRun. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_CreateRun.json */ /** * Sample code: Pipelines_CreateRun. @@ -26,11 +24,13 @@ public final class PipelinesCreateRunSamples { */ public static void pipelinesCreateRun(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - manager.pipelines().createRunWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null, - null, null, null, - mapOf("OutputBlobNameList", SerializerFactory.createDefaultManagementSerializerAdapter() - .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON)), - com.azure.core.util.Context.NONE); + manager.pipelines() + .createRunWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null, null, null, + null, + mapOf("OutputBlobNameList", + SerializerFactory.createDefaultManagementSerializerAdapter() + .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON)), + com.azure.core.util.Context.NONE); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesDeleteSamples.java index a5f6e4bc6993b..9270f26ee26c1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesDeleteSamples.java @@ -9,8 +9,7 @@ */ public final class PipelinesDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Delete.json */ /** * Sample code: Pipelines_Delete. @@ -18,7 +17,8 @@ public final class PipelinesDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void pipelinesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.pipelines().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", - com.azure.core.util.Context.NONE); + manager.pipelines() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesGetSamples.java index 8203b9831ff09..c0ce6b4a9f9a6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesGetSamples.java @@ -9,8 +9,7 @@ */ public final class PipelinesGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Get.json */ /** * Sample code: Pipelines_Get. @@ -18,7 +17,8 @@ public final class PipelinesGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void pipelinesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.pipelines().getWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null, - com.azure.core.util.Context.NONE); + manager.pipelines() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null, + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesListByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesListByFactorySamples.java index aeca938bb6199..765e55a4ff475 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesListByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PipelinesListByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class PipelinesListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Pipelines_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_ListByFactory.json */ /** * Sample code: Pipelines_ListByFactory. @@ -18,7 +17,7 @@ public final class PipelinesListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void pipelinesListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.pipelines().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.pipelines() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndPointConnectionsListByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndPointConnectionsListByFactorySamples.java index 9b6026a700399..13a9f10386edb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndPointConnectionsListByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndPointConnectionsListByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class PrivateEndPointConnectionsListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * PrivateEndPointConnections_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PrivateEndPointConnections_ListByFactory.json */ /** * Sample code: privateEndPointConnections_ListByFactory. @@ -19,7 +18,7 @@ public final class PrivateEndPointConnectionsListByFactorySamples { */ public static void privateEndPointConnectionsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.privateEndPointConnections().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.privateEndPointConnections() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationCreateOrUpdateSamples.java index f135d01b1dfed..df152506985ed 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationCreateOrUpdateSamples.java @@ -13,8 +13,7 @@ */ public final class PrivateEndpointConnectionOperationCreateOrUpdateSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * ApproveRejectPrivateEndpointConnection.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ApproveRejectPrivateEndpointConnection.json */ /** * Sample code: Approves or rejects a private endpoint connection for a factory. @@ -23,11 +22,13 @@ public final class PrivateEndpointConnectionOperationCreateOrUpdateSamples { */ public static void approvesOrRejectsAPrivateEndpointConnectionForAFactory( com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.privateEndpointConnectionOperations().define("connection") + manager.privateEndpointConnectionOperations() + .define("connection") .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withProperties(new PrivateLinkConnectionApprovalRequest() .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("Approved") - .withDescription("Approved by admin.").withActionsRequired("")) + .withDescription("Approved by admin.") + .withActionsRequired("")) .withPrivateEndpoint(new PrivateEndpoint().withId( "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/privateEndpoints/myPrivateEndpoint"))) .create(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationDeleteSamples.java index d87d1727779e4..49eec711c63af 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationDeleteSamples.java @@ -9,8 +9,7 @@ */ public final class PrivateEndpointConnectionOperationDeleteSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * DeletePrivateEndpointConnection.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DeletePrivateEndpointConnection.json */ /** * Sample code: Delete a private endpoint connection for a datafactory. @@ -19,7 +18,8 @@ public final class PrivateEndpointConnectionOperationDeleteSamples { */ public static void deleteAPrivateEndpointConnectionForADatafactory( com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.privateEndpointConnectionOperations().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", - "connection", com.azure.core.util.Context.NONE); + manager.privateEndpointConnectionOperations() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "connection", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationGetSamples.java index 2aff36cc31735..b8a4eb7984fce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationGetSamples.java @@ -9,8 +9,7 @@ */ public final class PrivateEndpointConnectionOperationGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * GetPrivateEndpointConnection.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GetPrivateEndpointConnection.json */ /** * Sample code: Get a private endpoint connection for a datafactory. @@ -19,7 +18,8 @@ public final class PrivateEndpointConnectionOperationGetSamples { */ public static void getAPrivateEndpointConnectionForADatafactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.privateEndpointConnectionOperations().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "connection", null, com.azure.core.util.Context.NONE); + manager.privateEndpointConnectionOperations() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "connection", null, + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesGetSamples.java index 6ed39ae67ada8..30e519b656de2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesGetSamples.java @@ -9,8 +9,7 @@ */ public final class PrivateLinkResourcesGetSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * GetPrivateLinkResources.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GetPrivateLinkResources.json */ /** * Sample code: Get private link resources of a site. @@ -19,7 +18,7 @@ public final class PrivateLinkResourcesGetSamples { */ public static void getPrivateLinkResourcesOfASite(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.privateLinkResources().getWithResponse("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.privateLinkResources() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsCancelSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsCancelSamples.java index 0451db85b6aa8..19128511304ec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsCancelSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsCancelSamples.java @@ -9,9 +9,7 @@ */ public final class TriggerRunsCancelSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Cancel. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Cancel.json */ /** * Sample code: Triggers_Cancel. @@ -19,7 +17,8 @@ public final class TriggerRunsCancelSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersCancel(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggerRuns().cancelWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", com.azure.core.util.Context.NONE); + manager.triggerRuns() + .cancelWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", + "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsQueryByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsQueryByFactorySamples.java index 7d33f01881b47..b67c3e5645389 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsQueryByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsQueryByFactorySamples.java @@ -16,8 +16,7 @@ */ public final class TriggerRunsQueryByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * TriggerRuns_QueryByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_QueryByFactory.json */ /** * Sample code: TriggerRuns_QueryByFactory. @@ -25,11 +24,13 @@ public final class TriggerRunsQueryByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void triggerRunsQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggerRuns().queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", - new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z")) - .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")) - .withFilters(Arrays.asList(new RunQueryFilter().withOperand(RunQueryFilterOperand.TRIGGER_NAME) - .withOperator(RunQueryFilterOperator.EQUALS).withValues(Arrays.asList("exampleTrigger")))), - com.azure.core.util.Context.NONE); + manager.triggerRuns() + .queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", + new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z")) + .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")) + .withFilters(Arrays.asList(new RunQueryFilter().withOperand(RunQueryFilterOperand.TRIGGER_NAME) + .withOperator(RunQueryFilterOperator.EQUALS) + .withValues(Arrays.asList("exampleTrigger")))), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsRerunSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsRerunSamples.java index ef0c6f94e775d..34f23eea3913f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsRerunSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsRerunSamples.java @@ -9,9 +9,7 @@ */ public final class TriggerRunsRerunSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Rerun. - * json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Rerun.json */ /** * Sample code: Triggers_Rerun. @@ -19,7 +17,8 @@ public final class TriggerRunsRerunSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersRerun(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggerRuns().rerunWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", com.azure.core.util.Context.NONE); + manager.triggerRuns() + .rerunWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", + "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersCreateOrUpdateSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersCreateOrUpdateSamples.java index 14d85ce55426e..8095ba3496a1e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersCreateOrUpdateSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersCreateOrUpdateSamples.java @@ -23,8 +23,7 @@ */ public final class TriggersCreateOrUpdateSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Create.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Create.json */ /** * Sample code: Triggers_Create. @@ -33,9 +32,8 @@ public final class TriggersCreateOrUpdateSamples { */ public static void triggersCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - manager - .triggers().define( - "exampleTrigger") + manager.triggers() + .define("exampleTrigger") .withExistingFactory("exampleResourceGroup", "exampleFactoryName") .withProperties(new ScheduleTrigger() .withPipelines(Arrays.asList(new TriggerPipelineReference() @@ -44,15 +42,16 @@ public static void triggersCreate(com.azure.resourcemanager.datafactory.DataFact SerializerFactory.createDefaultManagementSerializerAdapter() .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON))))) .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.MINUTE) - .withInterval(4).withStartTime(OffsetDateTime.parse("2018-06-16T00:39:13.8441801Z")) - .withEndTime(OffsetDateTime.parse("2018-06-16T00:55:13.8441801Z")).withTimeZone("UTC") + .withInterval(4) + .withStartTime(OffsetDateTime.parse("2018-06-16T00:39:13.8441801Z")) + .withEndTime(OffsetDateTime.parse("2018-06-16T00:55:13.8441801Z")) + .withTimeZone("UTC") .withAdditionalProperties(mapOf()))) .create(); } /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Update.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Update.json */ /** * Sample code: Triggers_Update. @@ -61,8 +60,10 @@ public static void triggersCreate(com.azure.resourcemanager.datafactory.DataFact */ public static void triggersUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) throws IOException { - TriggerResource resource = manager.triggers().getWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleTrigger", null, com.azure.core.util.Context.NONE).getValue(); + TriggerResource resource = manager.triggers() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", null, + com.azure.core.util.Context.NONE) + .getValue(); resource.update() .withProperties(new ScheduleTrigger().withDescription("Example description") .withPipelines(Arrays.asList(new TriggerPipelineReference() @@ -71,8 +72,10 @@ public static void triggersUpdate(com.azure.resourcemanager.datafactory.DataFact SerializerFactory.createDefaultManagementSerializerAdapter() .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON))))) .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.MINUTE) - .withInterval(4).withStartTime(OffsetDateTime.parse("2018-06-16T00:39:14.905167Z")) - .withEndTime(OffsetDateTime.parse("2018-06-16T00:55:14.905167Z")).withTimeZone("UTC") + .withInterval(4) + .withStartTime(OffsetDateTime.parse("2018-06-16T00:39:14.905167Z")) + .withEndTime(OffsetDateTime.parse("2018-06-16T00:55:14.905167Z")) + .withTimeZone("UTC") .withAdditionalProperties(mapOf()))) .apply(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersDeleteSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersDeleteSamples.java index 0cbb779ebc558..e325e52a2d6b3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersDeleteSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersDeleteSamples.java @@ -9,8 +9,7 @@ */ public final class TriggersDeleteSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Delete.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Delete.json */ /** * Sample code: Triggers_Delete. @@ -18,7 +17,8 @@ public final class TriggersDeleteSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - com.azure.core.util.Context.NONE); + manager.triggers() + .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersGetEventSubscriptionStatusSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersGetEventSubscriptionStatusSamples.java index 45878a952855e..30b299d5571e5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersGetEventSubscriptionStatusSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersGetEventSubscriptionStatusSamples.java @@ -9,8 +9,7 @@ */ public final class TriggersGetEventSubscriptionStatusSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Triggers_GetEventSubscriptionStatus.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_GetEventSubscriptionStatus.json */ /** * Sample code: Triggers_GetEventSubscriptionStatus. @@ -19,7 +18,8 @@ public final class TriggersGetEventSubscriptionStatusSamples { */ public static void triggersGetEventSubscriptionStatus(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().getEventSubscriptionStatusWithResponse("exampleResourceGroup", "exampleFactoryName", - "exampleTrigger", com.azure.core.util.Context.NONE); + manager.triggers() + .getEventSubscriptionStatusWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersGetSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersGetSamples.java index 62a94d43cbd42..d328188c2ee5d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersGetSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersGetSamples.java @@ -9,8 +9,7 @@ */ public final class TriggersGetSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Get.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Get.json */ /** * Sample code: Triggers_Get. @@ -18,7 +17,8 @@ public final class TriggersGetSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", null, - com.azure.core.util.Context.NONE); + manager.triggers() + .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", null, + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersListByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersListByFactorySamples.java index 4542178674da8..ddcc6f7ba7068 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersListByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersListByFactorySamples.java @@ -9,8 +9,7 @@ */ public final class TriggersListByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Triggers_ListByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_ListByFactory.json */ /** * Sample code: Triggers_ListByFactory. @@ -18,7 +17,7 @@ public final class TriggersListByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().listByFactory("exampleResourceGroup", "exampleFactoryName", - com.azure.core.util.Context.NONE); + manager.triggers() + .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersQueryByFactorySamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersQueryByFactorySamples.java index faf6caa719744..3748a47e10997 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersQueryByFactorySamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersQueryByFactorySamples.java @@ -11,8 +11,7 @@ */ public final class TriggersQueryByFactorySamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Triggers_QueryByFactory.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_QueryByFactory.json */ /** * Sample code: Triggers_QueryByFactory. @@ -20,7 +19,9 @@ public final class TriggersQueryByFactorySamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", - new TriggerFilterParameters().withParentTriggerName("exampleTrigger"), com.azure.core.util.Context.NONE); + manager.triggers() + .queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName", + new TriggerFilterParameters().withParentTriggerName("exampleTrigger"), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersStartSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersStartSamples.java index d74888b7a17eb..28fcbdc9e1de4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersStartSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersStartSamples.java @@ -9,8 +9,7 @@ */ public final class TriggersStartSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Start.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Start.json */ /** * Sample code: Triggers_Start. @@ -18,7 +17,7 @@ public final class TriggersStartSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().start("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - com.azure.core.util.Context.NONE); + manager.triggers() + .start("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersStopSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersStopSamples.java index ece7ac25c7376..daa6c4d2cfb84 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersStopSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersStopSamples.java @@ -9,8 +9,7 @@ */ public final class TriggersStopSamples { /* - * x-ms-original-file: - * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Stop.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Stop.json */ /** * Sample code: Triggers_Stop. @@ -18,7 +17,7 @@ public final class TriggersStopSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersStop(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().stop("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - com.azure.core.util.Context.NONE); + manager.triggers() + .stop("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersSubscribeToEventsSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersSubscribeToEventsSamples.java index 3bd32b9cb5f89..1aa5229b07f5c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersSubscribeToEventsSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersSubscribeToEventsSamples.java @@ -9,8 +9,7 @@ */ public final class TriggersSubscribeToEventsSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Triggers_SubscribeToEvents.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_SubscribeToEvents.json */ /** * Sample code: Triggers_SubscribeToEvents. @@ -18,7 +17,8 @@ public final class TriggersSubscribeToEventsSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersSubscribeToEvents(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().subscribeToEvents("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - com.azure.core.util.Context.NONE); + manager.triggers() + .subscribeToEvents("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersUnsubscribeFromEventsSamples.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersUnsubscribeFromEventsSamples.java index a80483243fe91..8b0a5892017a8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersUnsubscribeFromEventsSamples.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/samples/java/com/azure/resourcemanager/datafactory/generated/TriggersUnsubscribeFromEventsSamples.java @@ -9,8 +9,7 @@ */ public final class TriggersUnsubscribeFromEventsSamples { /* - * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ - * Triggers_UnsubscribeFromEvents.json + * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_UnsubscribeFromEvents.json */ /** * Sample code: Triggers_UnsubscribeFromEvents. @@ -18,7 +17,8 @@ public final class TriggersUnsubscribeFromEventsSamples { * @param manager Entry point to DataFactoryManager. */ public static void triggersUnsubscribeFromEvents(com.azure.resourcemanager.datafactory.DataFactoryManager manager) { - manager.triggers().unsubscribeFromEvents("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", - com.azure.core.util.Context.NONE); + manager.triggers() + .unsubscribeFromEvents("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityDependencyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityDependencyTests.java index d16a90fcfadc4..96325df736729 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityDependencyTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityDependencyTests.java @@ -16,19 +16,21 @@ public final class ActivityDependencyTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ActivityDependency model = BinaryData.fromString( - "{\"activity\":\"wcoezbrhub\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"okkqfqjbvleo\":\"dataygo\"}}") + "{\"activity\":\"biorktal\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Skipped\",\"Skipped\"],\"\":{\"iloxggdufiq\":\"datahxmsivfo\",\"hvcyyysfg\":\"datadieuzaofj\"}}") .toObject(ActivityDependency.class); - Assertions.assertEquals("wcoezbrhub", model.activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependencyConditions().get(0)); + Assertions.assertEquals("biorktal", model.activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependencyConditions().get(0)); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ActivityDependency model = new ActivityDependency().withActivity("wcoezbrhub") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)).withAdditionalProperties(mapOf()); + ActivityDependency model = new ActivityDependency().withActivity("biorktal") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, + DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(ActivityDependency.class); - Assertions.assertEquals("wcoezbrhub", model.activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependencyConditions().get(0)); + Assertions.assertEquals("biorktal", model.activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependencyConditions().get(0)); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityPolicyTests.java index f8673593b5520..657471c12653c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityPolicyTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityPolicyTests.java @@ -14,22 +14,25 @@ public final class ActivityPolicyTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ActivityPolicy model = BinaryData.fromString( - "{\"timeout\":\"datab\",\"retry\":\"dataqwwtevfeugc\",\"retryIntervalInSeconds\":67435770,\"secureInput\":true,\"secureOutput\":true,\"\":{\"dleohysdgkb\":\"datavfrymqqfksqfc\",\"jqztrpjme\":\"dataxygwvtkrqiyuqds\"}}") + "{\"timeout\":\"dataqrfywwzsip\",\"retry\":\"dataqhvktcztmqdkh\",\"retryIntervalInSeconds\":1446450607,\"secureInput\":true,\"secureOutput\":false,\"\":{\"xqfrntz\":\"datavlyaprjzbx\"}}") .toObject(ActivityPolicy.class); - Assertions.assertEquals(67435770, model.retryIntervalInSeconds()); + Assertions.assertEquals(1446450607, model.retryIntervalInSeconds()); Assertions.assertEquals(true, model.secureInput()); - Assertions.assertEquals(true, model.secureOutput()); + Assertions.assertEquals(false, model.secureOutput()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ActivityPolicy model = new ActivityPolicy().withTimeout("datab").withRetry("dataqwwtevfeugc") - .withRetryIntervalInSeconds(67435770).withSecureInput(true).withSecureOutput(true) + ActivityPolicy model = new ActivityPolicy().withTimeout("dataqrfywwzsip") + .withRetry("dataqhvktcztmqdkh") + .withRetryIntervalInSeconds(1446450607) + .withSecureInput(true) + .withSecureOutput(false) .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(ActivityPolicy.class); - Assertions.assertEquals(67435770, model.retryIntervalInSeconds()); + Assertions.assertEquals(1446450607, model.retryIntervalInSeconds()); Assertions.assertEquals(true, model.secureInput()); - Assertions.assertEquals(true, model.secureOutput()); + Assertions.assertEquals(false, model.secureOutput()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityRunTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityRunTests.java index b6b17cbc4b111..dccb1481b5d06 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityRunTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityRunTests.java @@ -13,17 +13,17 @@ public final class ActivityRunTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ActivityRun model = BinaryData.fromString( - "{\"pipelineName\":\"tj\",\"pipelineRunId\":\"ysdzhez\",\"activityName\":\"vaiqyuvvf\",\"activityType\":\"kphhq\",\"activityRunId\":\"kvylauyavl\",\"linkedServiceName\":\"mncsttijfybvp\",\"status\":\"krsgsgb\",\"activityRunStart\":\"2021-08-14T00:34:57Z\",\"activityRunEnd\":\"2021-08-27T04:01:57Z\",\"durationInMs\":1030277029,\"input\":\"datadgkynscliqhzvhxn\",\"output\":\"datamtk\",\"error\":\"dataotppnv\",\"\":{\"dhlfkqojpykvgt\":\"dataxhihfrbbcevqagtl\"}}") + "{\"pipelineName\":\"xvd\",\"pipelineRunId\":\"fwafqrouda\",\"activityName\":\"avehhrvkbunzo\",\"activityType\":\"dhcxgkmoy\",\"activityRunId\":\"dyuib\",\"linkedServiceName\":\"fdn\",\"status\":\"ydvfvfcjnae\",\"activityRunStart\":\"2020-12-25T22:41:16Z\",\"activityRunEnd\":\"2021-04-06T16:13:31Z\",\"durationInMs\":716191050,\"input\":\"dataorffukiscv\",\"output\":\"datazhwplefaxvxilc\",\"error\":\"datagnhnzeyq\",\"\":{\"dbeesmie\":\"datajfzqlqhycavodgg\",\"wqfbylyrfgiagt\":\"datanlrariaawiuagy\",\"zjvusfzldmo\":\"dataojocqwogf\",\"own\":\"datauxylfsbtkadpy\"}}") .toObject(ActivityRun.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ActivityRun model = new ActivityRun().withAdditionalProperties( - mapOf("durationInMs", 1030277029, "linkedServiceName", "mncsttijfybvp", "activityRunStart", - "2021-08-14T00:34:57Z", "activityRunEnd", "2021-08-27T04:01:57Z", "activityName", "vaiqyuvvf", "error", - "dataotppnv", "pipelineName", "tj", "output", "datamtk", "activityRunId", "kvylauyavl", "input", - "datadgkynscliqhzvhxn", "pipelineRunId", "ysdzhez", "activityType", "kphhq", "status", "krsgsgb")); + mapOf("durationInMs", 716191050, "linkedServiceName", "fdn", "activityRunStart", "2020-12-25T22:41:16Z", + "activityRunEnd", "2021-04-06T16:13:31Z", "activityName", "avehhrvkbunzo", "error", "datagnhnzeyq", + "pipelineName", "xvd", "output", "datazhwplefaxvxilc", "activityRunId", "dyuib", "input", + "dataorffukiscv", "pipelineRunId", "fwafqrouda", "activityType", "dhcxgkmoy", "status", "ydvfvfcjnae")); model = BinaryData.fromObject(model).toObject(ActivityRun.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityTests.java index 94f9d9d34ae07..0bc036ab7543d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityTests.java @@ -20,38 +20,48 @@ public final class ActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { Activity model = BinaryData.fromString( - "{\"type\":\"Activity\",\"name\":\"volvtn\",\"description\":\"qfzgemjdftul\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"amtmcz\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"qioknssxmojm\":\"datawcw\"}},{\"activity\":\"vpkjpr\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"ydbsd\":\"datazqljyxgtczh\"}}],\"userProperties\":[{\"name\":\"kx\",\"value\":\"dataaehvbbxuri\"}],\"\":{\"ckpyklyhplu\":\"datafnhtbaxkgxyw\",\"gzibthostgktstv\":\"datadpvruud\",\"odqkdlwwqfb\":\"dataxeclzedqbcvhzlhp\",\"lmbtxhwgfwsrt\":\"datamlkxtrqjfs\"}}") + "{\"type\":\"hud\",\"name\":\"goo\",\"description\":\"kqfqjbvl\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"iqtqzfavyvnq\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"jkqa\":\"datayeua\"}},{\"activity\":\"qgzsles\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Completed\",\"Skipped\"],\"\":{\"vbquwr\":\"datatiewdj\",\"uffkmrqemvvh\":\"dataehwagoh\"}},{\"activity\":\"xtdr\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Failed\",\"Skipped\"],\"\":{\"ewzcjznmwcp\":\"databj\",\"v\":\"dataguaadraufactkahz\",\"eekulfgslqubkwd\":\"datajjziuxxpsh\"}}],\"userProperties\":[{\"name\":\"rds\",\"value\":\"datatujbazpju\"},{\"name\":\"hminyflnorwmduv\",\"value\":\"datapklvxw\"},{\"name\":\"ygdxpgpqchis\",\"value\":\"dataepn\"},{\"name\":\"bjcrxgibbdaxco\",\"value\":\"datafozauorsuk\"}],\"\":{\"vnuuepzl\":\"databqplh\",\"vmnnrw\":\"dataphwzsoldweyuqdu\"}}") .toObject(Activity.class); - Assertions.assertEquals("volvtn", model.name()); - Assertions.assertEquals("qfzgemjdftul", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("amtmcz", model.dependsOn().get(0).activity()); + Assertions.assertEquals("goo", model.name()); + Assertions.assertEquals("kqfqjbvl", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("iqtqzfavyvnq", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("kx", model.userProperties().get(0).name()); + Assertions.assertEquals("rds", model.userProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Activity model = new Activity().withName("volvtn").withDescription("qfzgemjdftul") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + Activity model = new Activity().withName("goo") + .withDescription("kqfqjbvl") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("amtmcz") + new ActivityDependency().withActivity("iqtqzfavyvnq") .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vpkjpr") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + new ActivityDependency().withActivity("qgzsles") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SKIPPED, + DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("xtdr") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, + DependencyCondition.FAILED, DependencyCondition.SKIPPED)) .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("kx").withValue("dataaehvbbxuri"))) - .withAdditionalProperties(mapOf("type", "Activity")); + .withUserProperties(Arrays.asList(new UserProperty().withName("rds").withValue("datatujbazpju"), + new UserProperty().withName("hminyflnorwmduv").withValue("datapklvxw"), + new UserProperty().withName("ygdxpgpqchis").withValue("dataepn"), + new UserProperty().withName("bjcrxgibbdaxco").withValue("datafozauorsuk"))) + .withAdditionalProperties(mapOf("type", "hud")); model = BinaryData.fromObject(model).toObject(Activity.class); - Assertions.assertEquals("volvtn", model.name()); - Assertions.assertEquals("qfzgemjdftul", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("amtmcz", model.dependsOn().get(0).activity()); + Assertions.assertEquals("goo", model.name()); + Assertions.assertEquals("kqfqjbvl", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("iqtqzfavyvnq", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("kx", model.userProperties().get(0).name()); + Assertions.assertEquals("rds", model.userProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AddDataFlowToDebugSessionResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AddDataFlowToDebugSessionResponseInnerTests.java index 61e54aa01de84..fa501b0264b22 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AddDataFlowToDebugSessionResponseInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AddDataFlowToDebugSessionResponseInnerTests.java @@ -11,16 +11,15 @@ public final class AddDataFlowToDebugSessionResponseInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - AddDataFlowToDebugSessionResponseInner model = BinaryData.fromString("{\"jobVersion\":\"fbcgwgcloxoebqin\"}") - .toObject(AddDataFlowToDebugSessionResponseInner.class); - Assertions.assertEquals("fbcgwgcloxoebqin", model.jobVersion()); + AddDataFlowToDebugSessionResponseInner model + = BinaryData.fromString("{\"jobVersion\":\"r\"}").toObject(AddDataFlowToDebugSessionResponseInner.class); + Assertions.assertEquals("r", model.jobVersion()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AddDataFlowToDebugSessionResponseInner model - = new AddDataFlowToDebugSessionResponseInner().withJobVersion("fbcgwgcloxoebqin"); + AddDataFlowToDebugSessionResponseInner model = new AddDataFlowToDebugSessionResponseInner().withJobVersion("r"); model = BinaryData.fromObject(model).toObject(AddDataFlowToDebugSessionResponseInner.class); - Assertions.assertEquals("fbcgwgcloxoebqin", model.jobVersion()); + Assertions.assertEquals("r", model.jobVersion()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsObjectDatasetTests.java index 3bbb82691f33e..2ef6cf9101991 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsObjectDatasetTests.java @@ -19,34 +19,35 @@ public final class AmazonMwsObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonMwsObjectDataset model = BinaryData.fromString( - "{\"type\":\"AmazonMWSObject\",\"typeProperties\":{\"tableName\":\"datagirrpwnqtvuxeuj\"},\"description\":\"ssijuaxxfd\",\"structure\":\"datapkcpws\",\"schema\":\"datannmjun\",\"linkedServiceName\":{\"referenceName\":\"tl\",\"parameters\":{\"kcsihxvta\":\"datatjhbcycgq\",\"zqqgug\":\"datawwfopxpryxnsbubw\"}},\"parameters\":{\"mkdhwqcqweba\":{\"type\":\"SecureString\",\"defaultValue\":\"datahtq\"},\"rgvypa\":{\"type\":\"String\",\"defaultValue\":\"datapkephujeucosvkke\"},\"ueez\":{\"type\":\"String\",\"defaultValue\":\"datapyillg\"},\"zyojfch\":{\"type\":\"Float\",\"defaultValue\":\"datafbuqxknvmcgmb\"}},\"annotations\":[\"dataarex\",\"datao\"],\"folder\":{\"name\":\"qhboojuxilozbl\"},\"\":{\"b\":\"datafldfljwt\",\"gftshfgmuxuqiags\":\"datatsflotumbm\",\"paowkgvnlfueyxfz\":\"dataoikuqirhsk\",\"ce\":\"databrlrjugcfebpi\"}}") + "{\"type\":\"dfclmowurofofkbc\",\"typeProperties\":{\"tableName\":\"datatytvnpbgces\"},\"description\":\"zwweobl\",\"structure\":\"datadq\",\"schema\":\"dataixccnkfsog\",\"linkedServiceName\":{\"referenceName\":\"yoxmyqzyqepg\",\"parameters\":{\"dpwmgwxwukfjvqg\":\"datadsluokcevoxd\"}},\"parameters\":{\"kpalecztl\":{\"type\":\"Float\",\"defaultValue\":\"dataisvvvgyphheove\"},\"fssrgrq\":{\"type\":\"Array\",\"defaultValue\":\"datatzpu\"},\"louurmuzembqq\":{\"type\":\"Int\",\"defaultValue\":\"dataquo\"},\"gslqcxuthv\":{\"type\":\"Bool\",\"defaultValue\":\"datadhjofywwnaxoxlor\"}},\"annotations\":[\"datalyyhrgmabspmlu\",\"datayju\",\"datakedputocrb\",\"datagqicmdrgcuzjmvkr\"],\"folder\":{\"name\":\"cqhgcmljzksqimy\"},\"\":{\"qpvhszopeukufds\":\"datavfiomhc\",\"fsjbpwjwz\":\"databsskgqjemosq\"}}") .toObject(AmazonMwsObjectDataset.class); - Assertions.assertEquals("ssijuaxxfd", model.description()); - Assertions.assertEquals("tl", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("mkdhwqcqweba").type()); - Assertions.assertEquals("qhboojuxilozbl", model.folder().name()); + Assertions.assertEquals("zwweobl", model.description()); + Assertions.assertEquals("yoxmyqzyqepg", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("kpalecztl").type()); + Assertions.assertEquals("cqhgcmljzksqimy", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonMwsObjectDataset model = new AmazonMwsObjectDataset().withDescription("ssijuaxxfd") - .withStructure("datapkcpws").withSchema("datannmjun") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("tl") - .withParameters(mapOf("kcsihxvta", "datatjhbcycgq", "zqqgug", "datawwfopxpryxnsbubw"))) - .withParameters(mapOf("mkdhwqcqweba", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datahtq"), - "rgvypa", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datapkephujeucosvkke"), - "ueez", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datapyillg"), - "zyojfch", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datafbuqxknvmcgmb"))) - .withAnnotations(Arrays.asList("dataarex", "datao")) - .withFolder(new DatasetFolder().withName("qhboojuxilozbl")).withTableName("datagirrpwnqtvuxeuj"); + AmazonMwsObjectDataset model = new AmazonMwsObjectDataset().withDescription("zwweobl") + .withStructure("datadq") + .withSchema("dataixccnkfsog") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("yoxmyqzyqepg") + .withParameters(mapOf("dpwmgwxwukfjvqg", "datadsluokcevoxd"))) + .withParameters(mapOf("kpalecztl", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataisvvvgyphheove"), + "fssrgrq", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datatzpu"), + "louurmuzembqq", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataquo"), + "gslqcxuthv", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datadhjofywwnaxoxlor"))) + .withAnnotations(Arrays.asList("datalyyhrgmabspmlu", "datayju", "datakedputocrb", "datagqicmdrgcuzjmvkr")) + .withFolder(new DatasetFolder().withName("cqhgcmljzksqimy")) + .withTableName("datatytvnpbgces"); model = BinaryData.fromObject(model).toObject(AmazonMwsObjectDataset.class); - Assertions.assertEquals("ssijuaxxfd", model.description()); - Assertions.assertEquals("tl", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("mkdhwqcqweba").type()); - Assertions.assertEquals("qhboojuxilozbl", model.folder().name()); + Assertions.assertEquals("zwweobl", model.description()); + Assertions.assertEquals("yoxmyqzyqepg", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("kpalecztl").type()); + Assertions.assertEquals("cqhgcmljzksqimy", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsSourceTests.java index a222680008a76..ffca45338b06d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsSourceTests.java @@ -11,16 +11,19 @@ public final class AmazonMwsSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonMwsSource model = BinaryData.fromString( - "{\"type\":\"AmazonMWSSource\",\"query\":\"datapsoeocvywtyehln\",\"queryTimeout\":\"dataqeply\",\"additionalColumns\":\"dataad\",\"sourceRetryCount\":\"datagwdxoxjlvvvz\",\"sourceRetryWait\":\"datajvyintgkve\",\"maxConcurrentConnections\":\"dataeldnmb\",\"disableMetricsCollection\":\"databii\",\"\":{\"jaagfeiw\":\"datakxi\",\"zmsivqegmpfzbrh\":\"datauxqw\",\"tkrsjspziiev\":\"dataqj\",\"auyxyoyjas\":\"datattsz\"}}") + "{\"type\":\"imvziyicxnxci\",\"query\":\"datahdlwlehhqxy\",\"queryTimeout\":\"dataakzgryfxwwqbey\",\"additionalColumns\":\"datadnjmjies\",\"sourceRetryCount\":\"datalvzxucnpa\",\"sourceRetryWait\":\"datarairefifbisljhg\",\"maxConcurrentConnections\":\"datajm\",\"disableMetricsCollection\":\"datam\",\"\":{\"hjhpxjlgiurmli\":\"datagcbrmmweeuy\"}}") .toObject(AmazonMwsSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonMwsSource model - = new AmazonMwsSource().withSourceRetryCount("datagwdxoxjlvvvz").withSourceRetryWait("datajvyintgkve") - .withMaxConcurrentConnections("dataeldnmb").withDisableMetricsCollection("databii") - .withQueryTimeout("dataqeply").withAdditionalColumns("dataad").withQuery("datapsoeocvywtyehln"); + AmazonMwsSource model = new AmazonMwsSource().withSourceRetryCount("datalvzxucnpa") + .withSourceRetryWait("datarairefifbisljhg") + .withMaxConcurrentConnections("datajm") + .withDisableMetricsCollection("datam") + .withQueryTimeout("dataakzgryfxwwqbey") + .withAdditionalColumns("datadnjmjies") + .withQuery("datahdlwlehhqxy"); model = BinaryData.fromObject(model).toObject(AmazonMwsSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOraclePartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOraclePartitionSettingsTests.java index 2acdfbf123225..76df20489e852 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOraclePartitionSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOraclePartitionSettingsTests.java @@ -11,15 +11,17 @@ public final class AmazonRdsForOraclePartitionSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonRdsForOraclePartitionSettings model = BinaryData.fromString( - "{\"partitionNames\":\"datafdctgsdxjxkd\",\"partitionColumnName\":\"datao\",\"partitionUpperBound\":\"datalprsrkennnyyvv\",\"partitionLowerBound\":\"datasad\"}") + "{\"partitionNames\":\"datagows\",\"partitionColumnName\":\"dataguap\",\"partitionUpperBound\":\"datalhhmby\",\"partitionLowerBound\":\"datacexpopqy\"}") .toObject(AmazonRdsForOraclePartitionSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonRdsForOraclePartitionSettings model = new AmazonRdsForOraclePartitionSettings() - .withPartitionNames("datafdctgsdxjxkd").withPartitionColumnName("datao") - .withPartitionUpperBound("datalprsrkennnyyvv").withPartitionLowerBound("datasad"); + AmazonRdsForOraclePartitionSettings model + = new AmazonRdsForOraclePartitionSettings().withPartitionNames("datagows") + .withPartitionColumnName("dataguap") + .withPartitionUpperBound("datalhhmby") + .withPartitionLowerBound("datacexpopqy"); model = BinaryData.fromObject(model).toObject(AmazonRdsForOraclePartitionSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleSourceTests.java index 99679fbc0ffec..42492181de5c4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleSourceTests.java @@ -12,20 +12,24 @@ public final class AmazonRdsForOracleSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonRdsForOracleSource model = BinaryData.fromString( - "{\"type\":\"AmazonRdsForOracleSource\",\"oracleReaderQuery\":\"datawgikpdpudqiwhvx\",\"queryTimeout\":\"datavpoeuufw\",\"partitionOption\":\"datadeffrbxzjedy\",\"partitionSettings\":{\"partitionNames\":\"datasxspnmfydphl\",\"partitionColumnName\":\"datano\",\"partitionUpperBound\":\"databdvjlqfzlbpeh\",\"partitionLowerBound\":\"datapgllrh\"},\"additionalColumns\":\"dataxstpg\",\"sourceRetryCount\":\"databezmyjqpdchds\",\"sourceRetryWait\":\"datakmgppxzgjysmtskt\",\"maxConcurrentConnections\":\"datasybwd\",\"disableMetricsCollection\":\"datajsokosugrfizfwd\",\"\":{\"nie\":\"datanwhqafzgzmonj\",\"fmognnw\":\"datactwbim\",\"yrfpnbyxy\":\"datardllrqamfj\",\"xhvzgl\":\"dataubvidpskdzs\"}}") + "{\"type\":\"ftqkg\",\"oracleReaderQuery\":\"dataa\",\"queryTimeout\":\"databrwvio\",\"partitionOption\":\"datayhsorcavkfhyoig\",\"partitionSettings\":{\"partitionNames\":\"datadfteratvpkgaw\",\"partitionColumnName\":\"dataujizdmhepfj\",\"partitionUpperBound\":\"datawzgwmumuchvqwhs\",\"partitionLowerBound\":\"dataaqd\"},\"additionalColumns\":\"dataeln\",\"sourceRetryCount\":\"datagoullxpaylk\",\"sourceRetryWait\":\"datastbkskkziebmwyo\",\"maxConcurrentConnections\":\"datamplgdxdtxbrdbw\",\"disableMetricsCollection\":\"datatxpfofr\",\"\":{\"ukqscmd\":\"databbnoevkkrlkd\"}}") .toObject(AmazonRdsForOracleSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonRdsForOracleSource model = new AmazonRdsForOracleSource().withSourceRetryCount("databezmyjqpdchds") - .withSourceRetryWait("datakmgppxzgjysmtskt").withMaxConcurrentConnections("datasybwd") - .withDisableMetricsCollection("datajsokosugrfizfwd").withOracleReaderQuery("datawgikpdpudqiwhvx") - .withQueryTimeout("datavpoeuufw").withPartitionOption("datadeffrbxzjedy") - .withPartitionSettings(new AmazonRdsForOraclePartitionSettings().withPartitionNames("datasxspnmfydphl") - .withPartitionColumnName("datano").withPartitionUpperBound("databdvjlqfzlbpeh") - .withPartitionLowerBound("datapgllrh")) - .withAdditionalColumns("dataxstpg"); + AmazonRdsForOracleSource model = new AmazonRdsForOracleSource().withSourceRetryCount("datagoullxpaylk") + .withSourceRetryWait("datastbkskkziebmwyo") + .withMaxConcurrentConnections("datamplgdxdtxbrdbw") + .withDisableMetricsCollection("datatxpfofr") + .withOracleReaderQuery("dataa") + .withQueryTimeout("databrwvio") + .withPartitionOption("datayhsorcavkfhyoig") + .withPartitionSettings(new AmazonRdsForOraclePartitionSettings().withPartitionNames("datadfteratvpkgaw") + .withPartitionColumnName("dataujizdmhepfj") + .withPartitionUpperBound("datawzgwmumuchvqwhs") + .withPartitionLowerBound("dataaqd")) + .withAdditionalColumns("dataeln"); model = BinaryData.fromObject(model).toObject(AmazonRdsForOracleSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTests.java index dce67dab0ecca..67f4702a25ecf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTests.java @@ -19,32 +19,33 @@ public final class AmazonRdsForOracleTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonRdsForOracleTableDataset model = BinaryData.fromString( - "{\"type\":\"AmazonRdsForOracleTable\",\"typeProperties\":{\"schema\":\"dataipvwkauj\",\"table\":\"dataw\"},\"description\":\"ox\",\"structure\":\"datawofxxdplrel\",\"schema\":\"datavga\",\"linkedServiceName\":{\"referenceName\":\"cbtuxlbpxrhrfjen\",\"parameters\":{\"jixy\":\"datawefiktlhqashtos\",\"acfvvtdpcbpzf\":\"datasecigzzdwj\",\"fiwltkfysu\":\"datamcsaugbr\"}},\"parameters\":{\"hcvasyy\":{\"type\":\"Array\",\"defaultValue\":\"dataklx\"},\"ixyxxhwrlqomaqs\":{\"type\":\"Array\",\"defaultValue\":\"dataokjbmsr\"},\"zozsxag\":{\"type\":\"Bool\",\"defaultValue\":\"datapzzbrwn\"}},\"annotations\":[\"datak\"],\"folder\":{\"name\":\"ksybvrrbnhylsb\"},\"\":{\"stizsyqag\":\"datacydyllmxv\",\"dylkyhtr\":\"datallcbrva\",\"ogykugdlavsav\":\"dataqwfyybptmjjr\"}}") + "{\"type\":\"gmkokqoikx\",\"typeProperties\":{\"schema\":\"datawxcevdspt\",\"table\":\"dataffmwt\"},\"description\":\"fwlnmlkff\",\"structure\":\"datau\",\"schema\":\"datatbsliejdnccot\",\"linkedServiceName\":{\"referenceName\":\"likjiytehhxt\",\"parameters\":{\"rctysecpekhxdb\":\"datadwbymuqlngncrd\",\"zcvimmwcko\":\"datahq\"}},\"parameters\":{\"upqtzckj\":{\"type\":\"Int\",\"defaultValue\":\"datamtrtsv\"},\"nc\":{\"type\":\"Array\",\"defaultValue\":\"datakgnrfrjschj\"}},\"annotations\":[\"datahgt\",\"databgd\"],\"folder\":{\"name\":\"morzolxosg\"},\"\":{\"gvznjq\":\"datarxueqbmxq\",\"cxlngoufpi\":\"datawshesgcsqos\",\"mfxzspf\":\"datap\",\"zipplxg\":\"datavsl\"}}") .toObject(AmazonRdsForOracleTableDataset.class); - Assertions.assertEquals("ox", model.description()); - Assertions.assertEquals("cbtuxlbpxrhrfjen", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("hcvasyy").type()); - Assertions.assertEquals("ksybvrrbnhylsb", model.folder().name()); + Assertions.assertEquals("fwlnmlkff", model.description()); + Assertions.assertEquals("likjiytehhxt", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("upqtzckj").type()); + Assertions.assertEquals("morzolxosg", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonRdsForOracleTableDataset model = new AmazonRdsForOracleTableDataset().withDescription("ox") - .withStructure("datawofxxdplrel").withSchema("datavga") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cbtuxlbpxrhrfjen").withParameters( - mapOf("jixy", "datawefiktlhqashtos", "acfvvtdpcbpzf", "datasecigzzdwj", "fiwltkfysu", "datamcsaugbr"))) - .withParameters(mapOf("hcvasyy", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataklx"), - "ixyxxhwrlqomaqs", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataokjbmsr"), "zozsxag", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datapzzbrwn"))) - .withAnnotations(Arrays.asList("datak")).withFolder(new DatasetFolder().withName("ksybvrrbnhylsb")) - .withSchemaTypePropertiesSchema("dataipvwkauj").withTable("dataw"); + AmazonRdsForOracleTableDataset model = new AmazonRdsForOracleTableDataset().withDescription("fwlnmlkff") + .withStructure("datau") + .withSchema("datatbsliejdnccot") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("likjiytehhxt") + .withParameters(mapOf("rctysecpekhxdb", "datadwbymuqlngncrd", "zcvimmwcko", "datahq"))) + .withParameters(mapOf("upqtzckj", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datamtrtsv"), "nc", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datakgnrfrjschj"))) + .withAnnotations(Arrays.asList("datahgt", "databgd")) + .withFolder(new DatasetFolder().withName("morzolxosg")) + .withSchemaTypePropertiesSchema("datawxcevdspt") + .withTable("dataffmwt"); model = BinaryData.fromObject(model).toObject(AmazonRdsForOracleTableDataset.class); - Assertions.assertEquals("ox", model.description()); - Assertions.assertEquals("cbtuxlbpxrhrfjen", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("hcvasyy").type()); - Assertions.assertEquals("ksybvrrbnhylsb", model.folder().name()); + Assertions.assertEquals("fwlnmlkff", model.description()); + Assertions.assertEquals("likjiytehhxt", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("upqtzckj").type()); + Assertions.assertEquals("morzolxosg", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTypePropertiesTests.java index 61e7bf19fd416..d020881dbe702 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class AmazonRdsForOracleTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonRdsForOracleTableDatasetTypeProperties model - = BinaryData.fromString("{\"schema\":\"datahk\",\"table\":\"datageuufkb\"}") + = BinaryData.fromString("{\"schema\":\"dataum\",\"table\":\"dataycildrzn\"}") .toObject(AmazonRdsForOracleTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { AmazonRdsForOracleTableDatasetTypeProperties model - = new AmazonRdsForOracleTableDatasetTypeProperties().withSchema("datahk").withTable("datageuufkb"); + = new AmazonRdsForOracleTableDatasetTypeProperties().withSchema("dataum").withTable("dataycildrzn"); model = BinaryData.fromObject(model).toObject(AmazonRdsForOracleTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerSourceTests.java index 3873447c7b879..83548bb8e4910 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerSourceTests.java @@ -12,20 +12,27 @@ public final class AmazonRdsForSqlServerSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonRdsForSqlServerSource model = BinaryData.fromString( - "{\"type\":\"AmazonRdsForSqlServerSource\",\"sqlReaderQuery\":\"datajeypdk\",\"sqlReaderStoredProcedureName\":\"datacxzsynbdrqi\",\"storedProcedureParameters\":\"dataihg\",\"isolationLevel\":\"datahyebwg\",\"produceAdditionalTypes\":\"dataovsvjxnsor\",\"partitionOption\":\"datal\",\"partitionSettings\":{\"partitionColumnName\":\"datalyhgiisn\",\"partitionUpperBound\":\"datax\",\"partitionLowerBound\":\"databo\"},\"queryTimeout\":\"datapyilojwcza\",\"additionalColumns\":\"datawtausk\",\"sourceRetryCount\":\"datahhmtypgrkdmezaun\",\"sourceRetryWait\":\"datacqtigav\",\"maxConcurrentConnections\":\"datasnrjhjlploaeppl\",\"disableMetricsCollection\":\"datakcazuj\",\"\":{\"elaaexcnxrtlnzd\":\"datauzbsxhivnc\",\"zzpmm\":\"databqfea\",\"jxphwynimcp\":\"datais\",\"aceoaboozxkdz\":\"datarakucgjre\"}}") + "{\"type\":\"i\",\"sqlReaderQuery\":\"datamthdqvcifwknlyt\",\"sqlReaderStoredProcedureName\":\"datartocadtnmqrpj\",\"storedProcedureParameters\":\"datajixcya\",\"isolationLevel\":\"dataii\",\"produceAdditionalTypes\":\"datadbtrkv\",\"partitionOption\":\"datauessuuzfrw\",\"partitionSettings\":{\"partitionColumnName\":\"datarngjqc\",\"partitionUpperBound\":\"datafbkrtpulp\",\"partitionLowerBound\":\"datayqsiniej\"},\"queryTimeout\":\"dataukbdtmr\",\"additionalColumns\":\"datatuzfhvb\",\"sourceRetryCount\":\"datauluilgmovadn\",\"sourceRetryWait\":\"datasmjxgqsbjc\",\"maxConcurrentConnections\":\"dataaruvbzcqgtzxtlr\",\"disableMetricsCollection\":\"datatdznvjgovyqppp\",\"\":{\"kntfwxkeu\":\"dataepttab\",\"noveabw\":\"dataxgpcrvvmrdlckp\",\"swhqmrpdxn\":\"dataaiqikzysdha\"}}") .toObject(AmazonRdsForSqlServerSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonRdsForSqlServerSource model = new AmazonRdsForSqlServerSource() - .withSourceRetryCount("datahhmtypgrkdmezaun").withSourceRetryWait("datacqtigav") - .withMaxConcurrentConnections("datasnrjhjlploaeppl").withDisableMetricsCollection("datakcazuj") - .withQueryTimeout("datapyilojwcza").withAdditionalColumns("datawtausk").withSqlReaderQuery("datajeypdk") - .withSqlReaderStoredProcedureName("datacxzsynbdrqi").withStoredProcedureParameters("dataihg") - .withIsolationLevel("datahyebwg").withProduceAdditionalTypes("dataovsvjxnsor").withPartitionOption("datal") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datalyhgiisn") - .withPartitionUpperBound("datax").withPartitionLowerBound("databo")); + AmazonRdsForSqlServerSource model = new AmazonRdsForSqlServerSource().withSourceRetryCount("datauluilgmovadn") + .withSourceRetryWait("datasmjxgqsbjc") + .withMaxConcurrentConnections("dataaruvbzcqgtzxtlr") + .withDisableMetricsCollection("datatdznvjgovyqppp") + .withQueryTimeout("dataukbdtmr") + .withAdditionalColumns("datatuzfhvb") + .withSqlReaderQuery("datamthdqvcifwknlyt") + .withSqlReaderStoredProcedureName("datartocadtnmqrpj") + .withStoredProcedureParameters("datajixcya") + .withIsolationLevel("dataii") + .withProduceAdditionalTypes("datadbtrkv") + .withPartitionOption("datauessuuzfrw") + .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datarngjqc") + .withPartitionUpperBound("datafbkrtpulp") + .withPartitionLowerBound("datayqsiniej")); model = BinaryData.fromObject(model).toObject(AmazonRdsForSqlServerSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTests.java index c0b5f3fa3e6e4..7769daf572c6b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTests.java @@ -19,34 +19,33 @@ public final class AmazonRdsForSqlServerTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonRdsForSqlServerTableDataset model = BinaryData.fromString( - "{\"type\":\"AmazonRdsForSqlServerTable\",\"typeProperties\":{\"schema\":\"dataumtcqxmyvkxixypa\",\"table\":\"datafjczgohvpsuwi\"},\"description\":\"m\",\"structure\":\"datazbyfkoc\",\"schema\":\"datazdct\",\"linkedServiceName\":{\"referenceName\":\"nlwsc\",\"parameters\":{\"lks\":\"datatwgxrolwv\"}},\"parameters\":{\"hbvjhxvpmq\":{\"type\":\"Object\",\"defaultValue\":\"dataacuctihavi\"},\"phngr\":{\"type\":\"SecureString\",\"defaultValue\":\"dataux\"},\"icgym\":{\"type\":\"Float\",\"defaultValue\":\"dataxoweorocr\"},\"vhtvijvwmrg\":{\"type\":\"String\",\"defaultValue\":\"dataqpfy\"}},\"annotations\":[\"datahrplcxfmbzquuutq\",\"datahbtqyzy\",\"datag\",\"datambky\"],\"folder\":{\"name\":\"jdqosxzmdzlybqfu\"},\"\":{\"fkicxhsevmnkggh\":\"datak\",\"vbjsarxsvmfp\":\"datasryjokvl\"}}") + "{\"type\":\"vlqcwyzhndqk\",\"typeProperties\":{\"schema\":\"dataauimnntf\",\"table\":\"datapwqcnbn\"},\"description\":\"tzpze\",\"structure\":\"datalce\",\"schema\":\"datat\",\"linkedServiceName\":{\"referenceName\":\"htuwc\",\"parameters\":{\"lujm\":\"datacaumviudzpsjqr\",\"mpjbh\":\"dataunlofwuzebfq\",\"wkhdlckdoxocj\":\"datayenfspetxeu\"}},\"parameters\":{\"eortwwyjmgvr\":{\"type\":\"Object\",\"defaultValue\":\"datapf\"}},\"annotations\":[\"datax\"],\"folder\":{\"name\":\"jnnhbcjywkdywks\"},\"\":{\"plkossjbzvxp\":\"datauafanefictp\",\"aczhfjdccjn\":\"datawdqzuhfgt\",\"g\":\"datauvbtcuhj\",\"bszsbzrrxey\":\"datatjkntomnlzthc\"}}") .toObject(AmazonRdsForSqlServerTableDataset.class); - Assertions.assertEquals("m", model.description()); - Assertions.assertEquals("nlwsc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("hbvjhxvpmq").type()); - Assertions.assertEquals("jdqosxzmdzlybqfu", model.folder().name()); + Assertions.assertEquals("tzpze", model.description()); + Assertions.assertEquals("htuwc", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("eortwwyjmgvr").type()); + Assertions.assertEquals("jnnhbcjywkdywks", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonRdsForSqlServerTableDataset model = new AmazonRdsForSqlServerTableDataset().withDescription("m") - .withStructure("datazbyfkoc").withSchema("datazdct") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("nlwsc").withParameters(mapOf("lks", "datatwgxrolwv"))) - .withParameters(mapOf("hbvjhxvpmq", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataacuctihavi"), "phngr", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataux"), "icgym", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataxoweorocr"), - "vhtvijvwmrg", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataqpfy"))) - .withAnnotations(Arrays.asList("datahrplcxfmbzquuutq", "datahbtqyzy", "datag", "datambky")) - .withFolder(new DatasetFolder().withName("jdqosxzmdzlybqfu")) - .withSchemaTypePropertiesSchema("dataumtcqxmyvkxixypa").withTable("datafjczgohvpsuwi"); + AmazonRdsForSqlServerTableDataset model = new AmazonRdsForSqlServerTableDataset().withDescription("tzpze") + .withStructure("datalce") + .withSchema("datat") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("htuwc") + .withParameters(mapOf("lujm", "datacaumviudzpsjqr", "mpjbh", "dataunlofwuzebfq", "wkhdlckdoxocj", + "datayenfspetxeu"))) + .withParameters(mapOf("eortwwyjmgvr", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datapf"))) + .withAnnotations(Arrays.asList("datax")) + .withFolder(new DatasetFolder().withName("jnnhbcjywkdywks")) + .withSchemaTypePropertiesSchema("dataauimnntf") + .withTable("datapwqcnbn"); model = BinaryData.fromObject(model).toObject(AmazonRdsForSqlServerTableDataset.class); - Assertions.assertEquals("m", model.description()); - Assertions.assertEquals("nlwsc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("hbvjhxvpmq").type()); - Assertions.assertEquals("jdqosxzmdzlybqfu", model.folder().name()); + Assertions.assertEquals("tzpze", model.description()); + Assertions.assertEquals("htuwc", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("eortwwyjmgvr").type()); + Assertions.assertEquals("jnnhbcjywkdywks", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTypePropertiesTests.java index 66a8fe86190da..3ed2b2e67a349 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class AmazonRdsForSqlServerTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonRdsForSqlServerTableDatasetTypeProperties model - = BinaryData.fromString("{\"schema\":\"datawbpzgfgqp\",\"table\":\"datahgxg\"}") + = BinaryData.fromString("{\"schema\":\"datad\",\"table\":\"datawlrmbdctqx\"}") .toObject(AmazonRdsForSqlServerTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { AmazonRdsForSqlServerTableDatasetTypeProperties model - = new AmazonRdsForSqlServerTableDatasetTypeProperties().withSchema("datawbpzgfgqp").withTable("datahgxg"); + = new AmazonRdsForSqlServerTableDatasetTypeProperties().withSchema("datad").withTable("datawlrmbdctqx"); model = BinaryData.fromObject(model).toObject(AmazonRdsForSqlServerTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftSourceTests.java index b5d9d0384828a..f88057d40747a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftSourceTests.java @@ -16,23 +16,29 @@ public final class AmazonRedshiftSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonRedshiftSource model = BinaryData.fromString( - "{\"type\":\"AmazonRedshiftSource\",\"query\":\"datayipjzgmxqaupy\",\"redshiftUnloadSettings\":{\"s3LinkedServiceName\":{\"referenceName\":\"goyp\",\"parameters\":{\"zjnlfucmhono\":\"dataoyyfysn\",\"elmmx\":\"dataeseuq\"}},\"bucketName\":\"datam\"},\"queryTimeout\":\"dataiyc\",\"additionalColumns\":\"datajswedkfofyfwpu\",\"sourceRetryCount\":\"datapcutzlvxkolvedz\",\"sourceRetryWait\":\"datakrpormvddwgozrd\",\"maxConcurrentConnections\":\"datakmgcxmkrldfoidyh\",\"disableMetricsCollection\":\"datacgv\",\"\":{\"qgysxpaapmt\":\"datasexenywwkdx\",\"ocadtnmqrpjecj\":\"datadqvcifwknlytvxr\"}}") + "{\"type\":\"joxwugdzwoczfizf\",\"query\":\"datajc\",\"redshiftUnloadSettings\":{\"s3LinkedServiceName\":{\"referenceName\":\"aaabtxrhemnkyk\",\"parameters\":{\"jpzeeqvku\":\"dataomwyoktzffpcdq\",\"coikstap\":\"dataykiyrfoko\",\"rmugkugwtgfktw\":\"databdh\",\"isatb\":\"datayhauhqvxeyl\"}},\"bucketName\":\"dataghtkdcuf\"},\"queryTimeout\":\"dataernq\",\"additionalColumns\":\"dataiyyys\",\"sourceRetryCount\":\"datapddzzdw\",\"sourceRetryWait\":\"datavmocnfzmuyykxlfl\",\"maxConcurrentConnections\":\"datauwunsnyqpmnyvnbm\",\"disableMetricsCollection\":\"dataomcrev\",\"\":{\"i\":\"dataclilzv\",\"nraeodixoflxv\":\"datadnobxcdx\",\"frbjrbqc\":\"datauhxrctcozfj\",\"icmhhv\":\"datarrwbcycwasmrfbw\"}}") .toObject(AmazonRedshiftSource.class); - Assertions.assertEquals("goyp", model.redshiftUnloadSettings().s3LinkedServiceName().referenceName()); + Assertions.assertEquals("aaabtxrhemnkyk", model.redshiftUnloadSettings().s3LinkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonRedshiftSource model = new AmazonRedshiftSource().withSourceRetryCount("datapcutzlvxkolvedz") - .withSourceRetryWait("datakrpormvddwgozrd").withMaxConcurrentConnections("datakmgcxmkrldfoidyh") - .withDisableMetricsCollection("datacgv").withQueryTimeout("dataiyc") - .withAdditionalColumns("datajswedkfofyfwpu").withQuery("datayipjzgmxqaupy") - .withRedshiftUnloadSettings(new RedshiftUnloadSettings() - .withS3LinkedServiceName(new LinkedServiceReference().withReferenceName("goyp") - .withParameters(mapOf("zjnlfucmhono", "dataoyyfysn", "elmmx", "dataeseuq"))) - .withBucketName("datam")); + AmazonRedshiftSource model + = new AmazonRedshiftSource().withSourceRetryCount("datapddzzdw") + .withSourceRetryWait("datavmocnfzmuyykxlfl") + .withMaxConcurrentConnections("datauwunsnyqpmnyvnbm") + .withDisableMetricsCollection("dataomcrev") + .withQueryTimeout("dataernq") + .withAdditionalColumns("dataiyyys") + .withQuery("datajc") + .withRedshiftUnloadSettings( + new RedshiftUnloadSettings() + .withS3LinkedServiceName(new LinkedServiceReference().withReferenceName("aaabtxrhemnkyk") + .withParameters(mapOf("jpzeeqvku", "dataomwyoktzffpcdq", "coikstap", "dataykiyrfoko", + "rmugkugwtgfktw", "databdh", "isatb", "datayhauhqvxeyl"))) + .withBucketName("dataghtkdcuf")); model = BinaryData.fromObject(model).toObject(AmazonRedshiftSource.class); - Assertions.assertEquals("goyp", model.redshiftUnloadSettings().s3LinkedServiceName().referenceName()); + Assertions.assertEquals("aaabtxrhemnkyk", model.redshiftUnloadSettings().s3LinkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTests.java index 8cdc032e2557c..286f6e64a512f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTests.java @@ -19,32 +19,34 @@ public final class AmazonRedshiftTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonRedshiftTableDataset model = BinaryData.fromString( - "{\"type\":\"AmazonRedshiftTable\",\"typeProperties\":{\"tableName\":\"datamhidyliuajkln\",\"table\":\"datagdnxqeon\",\"schema\":\"datarjjaojpz\"},\"description\":\"d\",\"structure\":\"dataigecwsadsqyuddkh\",\"schema\":\"datadmohheuyu\",\"linkedServiceName\":{\"referenceName\":\"nxmyevyigde\",\"parameters\":{\"ejwli\":\"datafi\"}},\"parameters\":{\"pqokhdyncra\":{\"type\":\"Bool\",\"defaultValue\":\"datajzwhajod\"},\"m\":{\"type\":\"Int\",\"defaultValue\":\"dataewb\"},\"clmslnunkqvz\":{\"type\":\"Int\",\"defaultValue\":\"datapmqnmelyksygih\"}},\"annotations\":[\"databajdexquawexi\",\"databfzetjizwh\",\"datanbmajvvyxtvvx\",\"dataakzixb\"],\"folder\":{\"name\":\"bfmlngfwhrmvlakn\"},\"\":{\"zblxna\":\"datawxn\",\"kovohwvpr\":\"datahsmfndcbsyhludzj\",\"cntjna\":\"datafdvtdurmdt\"}}") + "{\"type\":\"hjbozvoo\",\"typeProperties\":{\"tableName\":\"datajejqa\",\"table\":\"datausqpfzxkczbd\",\"schema\":\"databb\"},\"description\":\"clnh\",\"structure\":\"datacyuzlybqscibv\",\"schema\":\"datakvck\",\"linkedServiceName\":{\"referenceName\":\"afnwqh\",\"parameters\":{\"mgjz\":\"datacnviulby\",\"wpbgumwhmxp\":\"datay\",\"trvlv\":\"datacknsastlpsmgomic\",\"xok\":\"dataj\"}},\"parameters\":{\"emklphx\":{\"type\":\"Float\",\"defaultValue\":\"dataecjrzvlcivqx\"}},\"annotations\":[\"datalojkbgnfbrzj\",\"datafsunh\",\"dataevlahxczywyw\",\"dataahwcorewcnn\"],\"folder\":{\"name\":\"qjfdajr\"},\"\":{\"bxexf\":\"datamomggewd\",\"vussuqks\":\"dataz\",\"qbyeywpmoh\":\"datawsfxay\",\"dzt\":\"datartlikff\"}}") .toObject(AmazonRedshiftTableDataset.class); - Assertions.assertEquals("d", model.description()); - Assertions.assertEquals("nxmyevyigde", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("pqokhdyncra").type()); - Assertions.assertEquals("bfmlngfwhrmvlakn", model.folder().name()); + Assertions.assertEquals("clnh", model.description()); + Assertions.assertEquals("afnwqh", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("emklphx").type()); + Assertions.assertEquals("qjfdajr", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonRedshiftTableDataset model = new AmazonRedshiftTableDataset().withDescription("d") - .withStructure("dataigecwsadsqyuddkh").withSchema("datadmohheuyu") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("nxmyevyigde").withParameters(mapOf("ejwli", "datafi"))) - .withParameters(mapOf("pqokhdyncra", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datajzwhajod"), "m", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataewb"), "clmslnunkqvz", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datapmqnmelyksygih"))) - .withAnnotations(Arrays.asList("databajdexquawexi", "databfzetjizwh", "datanbmajvvyxtvvx", "dataakzixb")) - .withFolder(new DatasetFolder().withName("bfmlngfwhrmvlakn")).withTableName("datamhidyliuajkln") - .withTable("datagdnxqeon").withSchemaTypePropertiesSchema("datarjjaojpz"); + AmazonRedshiftTableDataset model = new AmazonRedshiftTableDataset().withDescription("clnh") + .withStructure("datacyuzlybqscibv") + .withSchema("datakvck") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("afnwqh") + .withParameters(mapOf("mgjz", "datacnviulby", "wpbgumwhmxp", "datay", "trvlv", "datacknsastlpsmgomic", + "xok", "dataj"))) + .withParameters(mapOf("emklphx", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataecjrzvlcivqx"))) + .withAnnotations(Arrays.asList("datalojkbgnfbrzj", "datafsunh", "dataevlahxczywyw", "dataahwcorewcnn")) + .withFolder(new DatasetFolder().withName("qjfdajr")) + .withTableName("datajejqa") + .withTable("datausqpfzxkczbd") + .withSchemaTypePropertiesSchema("databb"); model = BinaryData.fromObject(model).toObject(AmazonRedshiftTableDataset.class); - Assertions.assertEquals("d", model.description()); - Assertions.assertEquals("nxmyevyigde", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("pqokhdyncra").type()); - Assertions.assertEquals("bfmlngfwhrmvlakn", model.folder().name()); + Assertions.assertEquals("clnh", model.description()); + Assertions.assertEquals("afnwqh", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("emklphx").type()); + Assertions.assertEquals("qjfdajr", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTypePropertiesTests.java index a0a084085dc4b..944f989d7b93a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTypePropertiesTests.java @@ -10,16 +10,17 @@ public final class AmazonRedshiftTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - AmazonRedshiftTableDatasetTypeProperties model = BinaryData - .fromString( - "{\"tableName\":\"datahvqiiasbtwskkf\",\"table\":\"datayikmxhhqsxjbjk\",\"schema\":\"datariglb\"}") - .toObject(AmazonRedshiftTableDatasetTypeProperties.class); + AmazonRedshiftTableDatasetTypeProperties model + = BinaryData.fromString("{\"tableName\":\"datarfbg\",\"table\":\"datazfwvzdteqjmy\",\"schema\":\"datauh\"}") + .toObject(AmazonRedshiftTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonRedshiftTableDatasetTypeProperties model = new AmazonRedshiftTableDatasetTypeProperties() - .withTableName("datahvqiiasbtwskkf").withTable("datayikmxhhqsxjbjk").withSchema("datariglb"); + AmazonRedshiftTableDatasetTypeProperties model + = new AmazonRedshiftTableDatasetTypeProperties().withTableName("datarfbg") + .withTable("datazfwvzdteqjmy") + .withSchema("datauh"); model = BinaryData.fromObject(model).toObject(AmazonRedshiftTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleLocationTests.java index 61da9495ea321..4eb0555b97f5b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleLocationTests.java @@ -11,14 +11,16 @@ public final class AmazonS3CompatibleLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonS3CompatibleLocation model = BinaryData.fromString( - "{\"type\":\"AmazonS3CompatibleLocation\",\"bucketName\":\"dataxtpzdlyse\",\"version\":\"datatoakatprytgrhz\",\"folderPath\":\"datafdpfawrptvcsht\",\"fileName\":\"datatzc\",\"\":{\"m\":\"dataqgdirda\",\"bwjjirmuydgf\":\"datazjgcfjfx\"}}") + "{\"type\":\"fnqh\",\"bucketName\":\"datakkt\",\"version\":\"datazejogmkorvvmvm\",\"folderPath\":\"datasnosnqliwkmzojf\",\"fileName\":\"datayalhtgm\",\"\":{\"sttcucrcmm\":\"datazgbjbhrpgi\"}}") .toObject(AmazonS3CompatibleLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonS3CompatibleLocation model = new AmazonS3CompatibleLocation().withFolderPath("datafdpfawrptvcsht") - .withFileName("datatzc").withBucketName("dataxtpzdlyse").withVersion("datatoakatprytgrhz"); + AmazonS3CompatibleLocation model = new AmazonS3CompatibleLocation().withFolderPath("datasnosnqliwkmzojf") + .withFileName("datayalhtgm") + .withBucketName("datakkt") + .withVersion("datazejogmkorvvmvm"); model = BinaryData.fromObject(model).toObject(AmazonS3CompatibleLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleReadSettingsTests.java index 732aa2c05131f..d14571fef9a97 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleReadSettingsTests.java @@ -11,18 +11,25 @@ public final class AmazonS3CompatibleReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonS3CompatibleReadSettings model = BinaryData.fromString( - "{\"type\":\"AmazonS3CompatibleReadSettings\",\"recursive\":\"datakf\",\"wildcardFolderPath\":\"dataauihnb\",\"wildcardFileName\":\"datahkdwyehqnxuffgj\",\"prefix\":\"dataminhvdkqigppdqsq\",\"fileListPath\":\"datap\",\"enablePartitionDiscovery\":\"dataaxthuhuruo\",\"partitionRootPath\":\"datayotapstkdbn\",\"deleteFilesAfterCompletion\":\"datapcuzexoymfku\",\"modifiedDatetimeStart\":\"dataysgsqzpgrvf\",\"modifiedDatetimeEnd\":\"datayph\",\"maxConcurrentConnections\":\"datarxrpahp\",\"disableMetricsCollection\":\"dataikfenmiflkyf\",\"\":{\"tablilyoomgse\":\"dataolnxhsupilh\",\"kluqf\":\"dataisydhardx\",\"ierxu\":\"datafglftlqa\"}}") + "{\"type\":\"ojgmobkaligo\",\"recursive\":\"databpiie\",\"wildcardFolderPath\":\"datagchsgotgwerbpob\",\"wildcardFileName\":\"datah\",\"prefix\":\"dataicgrxceivvm\",\"fileListPath\":\"datakllqh\",\"enablePartitionDiscovery\":\"datautr\",\"partitionRootPath\":\"datamztrnniarje\",\"deleteFilesAfterCompletion\":\"datahyxiqfoq\",\"modifiedDatetimeStart\":\"datasqykqfserlsai\",\"modifiedDatetimeEnd\":\"datahhetagwmzg\",\"maxConcurrentConnections\":\"datakehpdssvlubdp\",\"disableMetricsCollection\":\"datawxsxbxdkmixurce\",\"\":{\"kcnkghkr\":\"datamjqqauftghmtbu\",\"eayodrvwnqb\":\"datairshl\"}}") .toObject(AmazonS3CompatibleReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonS3CompatibleReadSettings model = new AmazonS3CompatibleReadSettings() - .withMaxConcurrentConnections("datarxrpahp").withDisableMetricsCollection("dataikfenmiflkyf") - .withRecursive("datakf").withWildcardFolderPath("dataauihnb").withWildcardFileName("datahkdwyehqnxuffgj") - .withPrefix("dataminhvdkqigppdqsq").withFileListPath("datap").withEnablePartitionDiscovery("dataaxthuhuruo") - .withPartitionRootPath("datayotapstkdbn").withDeleteFilesAfterCompletion("datapcuzexoymfku") - .withModifiedDatetimeStart("dataysgsqzpgrvf").withModifiedDatetimeEnd("datayph"); + AmazonS3CompatibleReadSettings model + = new AmazonS3CompatibleReadSettings().withMaxConcurrentConnections("datakehpdssvlubdp") + .withDisableMetricsCollection("datawxsxbxdkmixurce") + .withRecursive("databpiie") + .withWildcardFolderPath("datagchsgotgwerbpob") + .withWildcardFileName("datah") + .withPrefix("dataicgrxceivvm") + .withFileListPath("datakllqh") + .withEnablePartitionDiscovery("datautr") + .withPartitionRootPath("datamztrnniarje") + .withDeleteFilesAfterCompletion("datahyxiqfoq") + .withModifiedDatetimeStart("datasqykqfserlsai") + .withModifiedDatetimeEnd("datahhetagwmzg"); model = BinaryData.fromObject(model).toObject(AmazonS3CompatibleReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3LocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3LocationTests.java index f1bdb4d82b05a..d1d35d0142797 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3LocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3LocationTests.java @@ -11,14 +11,16 @@ public final class AmazonS3LocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonS3Location model = BinaryData.fromString( - "{\"type\":\"AmazonS3Location\",\"bucketName\":\"dataae\",\"version\":\"datacflwtjdtlr\",\"folderPath\":\"datafooy\",\"fileName\":\"datauxdtzcq\",\"\":{\"lantolamlb\":\"datadudgcozzomeh\",\"z\":\"datajuxkqll\"}}") + "{\"type\":\"a\",\"bucketName\":\"datapaslavxjfiuofpi\",\"version\":\"datadzlvssqywj\",\"folderPath\":\"datayhydvikmfn\",\"fileName\":\"datamillxgjs\",\"\":{\"uynhbokayrgwybr\":\"datawgsoriobijeiydy\",\"or\":\"dataopdweoft\",\"gsioc\":\"dataa\"}}") .toObject(AmazonS3Location.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonS3Location model = new AmazonS3Location().withFolderPath("datafooy").withFileName("datauxdtzcq") - .withBucketName("dataae").withVersion("datacflwtjdtlr"); + AmazonS3Location model = new AmazonS3Location().withFolderPath("datayhydvikmfn") + .withFileName("datamillxgjs") + .withBucketName("datapaslavxjfiuofpi") + .withVersion("datadzlvssqywj"); model = BinaryData.fromObject(model).toObject(AmazonS3Location.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3ReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3ReadSettingsTests.java index 95233f9165568..1155a7927f242 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3ReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3ReadSettingsTests.java @@ -11,18 +11,24 @@ public final class AmazonS3ReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AmazonS3ReadSettings model = BinaryData.fromString( - "{\"type\":\"AmazonS3ReadSettings\",\"recursive\":\"dataubryhvbvjyf\",\"wildcardFolderPath\":\"dataaupjozgryoc\",\"wildcardFileName\":\"datak\",\"prefix\":\"datailyznbb\",\"fileListPath\":\"datamxznfoa\",\"enablePartitionDiscovery\":\"datajwiswzn\",\"partitionRootPath\":\"databhtleiwfiz\",\"deleteFilesAfterCompletion\":\"datahvq\",\"modifiedDatetimeStart\":\"datahltnds\",\"modifiedDatetimeEnd\":\"datagnyfhqylia\",\"maxConcurrentConnections\":\"databhzteninafyaga\",\"disableMetricsCollection\":\"datavetzkcolbclspq\",\"\":{\"p\":\"dataicurufngpbvdl\",\"zixranbtqejfqgh\":\"datadkiywwenvxu\",\"y\":\"dataadrvxb\"}}") + "{\"type\":\"vvkuzygzrzubdt\",\"recursive\":\"datavb\",\"wildcardFolderPath\":\"dataheyxewcskt\",\"wildcardFileName\":\"datawbprkf\",\"prefix\":\"dataxiikrjamzt\",\"fileListPath\":\"datamrg\",\"enablePartitionDiscovery\":\"dataypuotmkbofu\",\"partitionRootPath\":\"datahk\",\"deleteFilesAfterCompletion\":\"dataouzvegtnphnotr\",\"modifiedDatetimeStart\":\"datayje\",\"modifiedDatetimeEnd\":\"dataovjdunbaetsdufe\",\"maxConcurrentConnections\":\"dataachmhzpurnpkkbx\",\"disableMetricsCollection\":\"datacfiosra\",\"\":{\"cwuafbh\":\"datahdxnlu\",\"l\":\"dataoaqfpqfpkr\",\"oqdnxumefv\":\"datakvqogzaw\"}}") .toObject(AmazonS3ReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AmazonS3ReadSettings model = new AmazonS3ReadSettings().withMaxConcurrentConnections("databhzteninafyaga") - .withDisableMetricsCollection("datavetzkcolbclspq").withRecursive("dataubryhvbvjyf") - .withWildcardFolderPath("dataaupjozgryoc").withWildcardFileName("datak").withPrefix("datailyznbb") - .withFileListPath("datamxznfoa").withEnablePartitionDiscovery("datajwiswzn") - .withPartitionRootPath("databhtleiwfiz").withDeleteFilesAfterCompletion("datahvq") - .withModifiedDatetimeStart("datahltnds").withModifiedDatetimeEnd("datagnyfhqylia"); + AmazonS3ReadSettings model = new AmazonS3ReadSettings().withMaxConcurrentConnections("dataachmhzpurnpkkbx") + .withDisableMetricsCollection("datacfiosra") + .withRecursive("datavb") + .withWildcardFolderPath("dataheyxewcskt") + .withWildcardFileName("datawbprkf") + .withPrefix("dataxiikrjamzt") + .withFileListPath("datamrg") + .withEnablePartitionDiscovery("dataypuotmkbofu") + .withPartitionRootPath("datahk") + .withDeleteFilesAfterCompletion("dataouzvegtnphnotr") + .withModifiedDatetimeStart("datayje") + .withModifiedDatetimeEnd("dataovjdunbaetsdufe"); model = BinaryData.fromObject(model).toObject(AmazonS3ReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTests.java index 5aea5f50d7e23..42d2902f598a3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTests.java @@ -20,43 +20,42 @@ public final class AppendVariableActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AppendVariableActivity model = BinaryData.fromString( - "{\"type\":\"AppendVariable\",\"typeProperties\":{\"variableName\":\"iymvgnqqfnvdor\",\"value\":\"datacvgknbmpcnezdpl\"},\"name\":\"bqhbbzfcjmhpobu\",\"description\":\"dyztqpszbtbxymby\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ehxotizv\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Skipped\"],\"\":{\"jiyl\":\"datatfo\",\"ovbzsmgeyok\":\"dataveb\"}},{\"activity\":\"a\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"xymcmogfbweuaz\":\"datawebgaaxffttfql\",\"wnicdgim\":\"datatsgsqoadsbace\"}}],\"userProperties\":[{\"name\":\"mpplbcarc\",\"value\":\"datarftcjxzmxwwmhdl\"},{\"name\":\"fyonnbxvxrcm\",\"value\":\"datadmyjc\"}],\"\":{\"zirkyxhqwoxm\":\"datazodolehchimzrc\"}}") + "{\"type\":\"ndirdlehjz\",\"typeProperties\":{\"variableName\":\"lrx\",\"value\":\"dataslccuyscjefa\"},\"name\":\"pdwyhggvhcoaoeti\",\"description\":\"tkeiram\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"glubqtfc\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Failed\"],\"\":{\"j\":\"datamir\"}}],\"userProperties\":[{\"name\":\"ksafjht\",\"value\":\"databrkghtsfp\"},{\"name\":\"junkhxthkqny\",\"value\":\"dataufvzrqaphep\"},{\"name\":\"eheeqq\",\"value\":\"datatasijiaqqwom\"}],\"\":{\"mvlfmv\":\"datamwqfdhg\",\"bafscdpbzpo\":\"dataumjmpsxz\",\"qtcnyhsdgmoxnelh\":\"datajhubzkzjazfwywv\",\"kofcsvipwa\":\"datahzfyzb\"}}") .toObject(AppendVariableActivity.class); - Assertions.assertEquals("bqhbbzfcjmhpobu", model.name()); - Assertions.assertEquals("dyztqpszbtbxymby", model.description()); + Assertions.assertEquals("pdwyhggvhcoaoeti", model.name()); + Assertions.assertEquals("tkeiram", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("ehxotizv", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("mpplbcarc", model.userProperties().get(0).name()); - Assertions.assertEquals("iymvgnqqfnvdor", model.variableName()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("glubqtfc", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("ksafjht", model.userProperties().get(0).name()); + Assertions.assertEquals("lrx", model.variableName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AppendVariableActivity model = new AppendVariableActivity().withName("bqhbbzfcjmhpobu") - .withDescription("dyztqpszbtbxymby").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ehxotizv") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("a") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("mpplbcarc").withValue("datarftcjxzmxwwmhdl"), - new UserProperty().withName("fyonnbxvxrcm").withValue("datadmyjc"))) - .withVariableName("iymvgnqqfnvdor").withValue("datacvgknbmpcnezdpl"); + AppendVariableActivity model = new AppendVariableActivity().withName("pdwyhggvhcoaoeti") + .withDescription("tkeiram") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("glubqtfc") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED, + DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("ksafjht").withValue("databrkghtsfp"), + new UserProperty().withName("junkhxthkqny").withValue("dataufvzrqaphep"), + new UserProperty().withName("eheeqq").withValue("datatasijiaqqwom"))) + .withVariableName("lrx") + .withValue("dataslccuyscjefa"); model = BinaryData.fromObject(model).toObject(AppendVariableActivity.class); - Assertions.assertEquals("bqhbbzfcjmhpobu", model.name()); - Assertions.assertEquals("dyztqpszbtbxymby", model.description()); + Assertions.assertEquals("pdwyhggvhcoaoeti", model.name()); + Assertions.assertEquals("tkeiram", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("ehxotizv", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("mpplbcarc", model.userProperties().get(0).name()); - Assertions.assertEquals("iymvgnqqfnvdor", model.variableName()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("glubqtfc", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("ksafjht", model.userProperties().get(0).name()); + Assertions.assertEquals("lrx", model.variableName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTypePropertiesTests.java index 5ce6cd3622236..8714041a477f1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTypePropertiesTests.java @@ -12,16 +12,17 @@ public final class AppendVariableActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AppendVariableActivityTypeProperties model - = BinaryData.fromString("{\"variableName\":\"buanybfmblwhpqn\",\"value\":\"datafpsppkqufdmgm\"}") + = BinaryData.fromString("{\"variableName\":\"huctprgpmgtjvu\",\"value\":\"datawcfzcsklvtcea\"}") .toObject(AppendVariableActivityTypeProperties.class); - Assertions.assertEquals("buanybfmblwhpqn", model.variableName()); + Assertions.assertEquals("huctprgpmgtjvu", model.variableName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AppendVariableActivityTypeProperties model = new AppendVariableActivityTypeProperties() - .withVariableName("buanybfmblwhpqn").withValue("datafpsppkqufdmgm"); + AppendVariableActivityTypeProperties model + = new AppendVariableActivityTypeProperties().withVariableName("huctprgpmgtjvu") + .withValue("datawcfzcsklvtcea"); model = BinaryData.fromObject(model).toObject(AppendVariableActivityTypeProperties.class); - Assertions.assertEquals("buanybfmblwhpqn", model.variableName()); + Assertions.assertEquals("huctprgpmgtjvu", model.variableName()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ArmIdWrapperTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ArmIdWrapperTests.java index 2ad9684df2509..5e971abcd200c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ArmIdWrapperTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ArmIdWrapperTests.java @@ -10,7 +10,7 @@ public final class ArmIdWrapperTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - ArmIdWrapper model = BinaryData.fromString("{\"id\":\"z\"}").toObject(ArmIdWrapper.class); + ArmIdWrapper model = BinaryData.fromString("{\"id\":\"kxcpt\"}").toObject(ArmIdWrapper.class); } @org.junit.jupiter.api.Test diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroFormatTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroFormatTests.java index 5a9cc32f3e190..f6cc657b6ceb2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroFormatTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroFormatTests.java @@ -11,13 +11,13 @@ public final class AvroFormatTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AvroFormat model = BinaryData.fromString( - "{\"type\":\"AvroFormat\",\"serializer\":\"datasnosnqliwkmzojf\",\"deserializer\":\"datayalhtgm\",\"\":{\"sttcucrcmm\":\"datazgbjbhrpgi\"}}") + "{\"type\":\"kh\",\"serializer\":\"datagapvdgtfpeerscdx\",\"deserializer\":\"datarnjr\",\"\":{\"acnrgfdtncmspsan\":\"datamb\",\"b\":\"dataame\"}}") .toObject(AvroFormat.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AvroFormat model = new AvroFormat().withSerializer("datasnosnqliwkmzojf").withDeserializer("datayalhtgm"); + AvroFormat model = new AvroFormat().withSerializer("datagapvdgtfpeerscdx").withDeserializer("datarnjr"); model = BinaryData.fromObject(model).toObject(AvroFormat.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSinkTests.java index 125d6f93c6023..3b6e90047bedd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSinkTests.java @@ -18,29 +18,34 @@ public final class AvroSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AvroSink model = BinaryData.fromString( - "{\"type\":\"AvroSink\",\"storeSettings\":{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"datavzqhv\",\"disableMetricsCollection\":\"datadsnvtdbeanigozjr\",\"copyBehavior\":\"dataaq\",\"metadata\":[{\"name\":\"datalmzpfylqevwwvzn\",\"value\":\"dataxc\"},{\"name\":\"dataropzgjleec\",\"value\":\"databhqkvbins\"},{\"name\":\"dataw\",\"value\":\"dataegoupdqeflvd\"},{\"name\":\"dataqcqlexobeekzy\",\"value\":\"datapatwbbf\"}],\"\":{\"cahhfuydg\":\"datalhnwoh\",\"lcvibp\":\"datahitavgayusp\",\"efepwkhruzzwgbb\":\"datauzeeb\"}},\"formatSettings\":{\"type\":\"AvroWriteSettings\",\"recordName\":\"vfoyqvnlhsxeas\",\"recordNamespace\":\"qquvvsc\",\"maxRowsPerFile\":\"datakm\",\"fileNamePrefix\":\"datadukp\",\"\":{\"fkxazuboi\":\"dataibwuzvmorsyi\",\"kznnzpvjwegovic\":\"dataorwpbbjzdvaqoil\",\"atyypercazcchv\":\"dataqyrajdvvs\"}},\"writeBatchSize\":\"datacha\",\"writeBatchTimeout\":\"datatvotfhhayfxkf\",\"sinkRetryCount\":\"dataxefzliguw\",\"sinkRetryWait\":\"dataszcmfmynljigjc\",\"maxConcurrentConnections\":\"dataa\",\"disableMetricsCollection\":\"datawtivsk\",\"\":{\"hbxvvu\":\"datapnpunrvjb\",\"kj\":\"dataq\"}}") + "{\"type\":\"p\",\"storeSettings\":{\"type\":\"nqcgbijyp\",\"maxConcurrentConnections\":\"databyrkxzebvxuzch\",\"disableMetricsCollection\":\"dataeogdkcrc\",\"copyBehavior\":\"datazzeu\",\"metadata\":[{\"name\":\"datapxir\",\"value\":\"datahuqkicimyykmke\"},{\"name\":\"dataqmnr\",\"value\":\"dataefxkattpkkwldvk\"},{\"name\":\"datagxakgoptbjooqo\",\"value\":\"datankvnuwjrxbbxkh\"}],\"\":{\"tglo\":\"databx\",\"jhyiey\":\"datafmlbhlimgzimtzz\",\"xalvdhmumsmnub\":\"datarwfu\",\"xidqnvhrbfepf\":\"datanxrpsty\"}},\"formatSettings\":{\"type\":\"kwoommqvzz\",\"recordName\":\"pxthsfuxhlgoexu\",\"recordNamespace\":\"bfoo\",\"maxRowsPerFile\":\"datatxdlewh\",\"fileNamePrefix\":\"datavrip\",\"\":{\"fh\":\"datawfo\"}},\"writeBatchSize\":\"dataljajz\",\"writeBatchTimeout\":\"datagwarbvblat\",\"sinkRetryCount\":\"datajk\",\"sinkRetryWait\":\"datatodgisfej\",\"maxConcurrentConnections\":\"datap\",\"disableMetricsCollection\":\"dataifujynfjwktiyhiy\",\"\":{\"wo\":\"datavaodifupdafuf\",\"rqhgnrxxhzwtrx\":\"datas\"}}") .toObject(AvroSink.class); - Assertions.assertEquals("vfoyqvnlhsxeas", model.formatSettings().recordName()); - Assertions.assertEquals("qquvvsc", model.formatSettings().recordNamespace()); + Assertions.assertEquals("pxthsfuxhlgoexu", model.formatSettings().recordName()); + Assertions.assertEquals("bfoo", model.formatSettings().recordNamespace()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AvroSink model = new AvroSink().withWriteBatchSize("datacha").withWriteBatchTimeout("datatvotfhhayfxkf") - .withSinkRetryCount("dataxefzliguw").withSinkRetryWait("dataszcmfmynljigjc") - .withMaxConcurrentConnections("dataa").withDisableMetricsCollection("datawtivsk") - .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("datavzqhv") - .withDisableMetricsCollection("datadsnvtdbeanigozjr").withCopyBehavior("dataaq") - .withMetadata(Arrays.asList(new MetadataItem().withName("datalmzpfylqevwwvzn").withValue("dataxc"), - new MetadataItem().withName("dataropzgjleec").withValue("databhqkvbins"), - new MetadataItem().withName("dataw").withValue("dataegoupdqeflvd"), - new MetadataItem().withName("dataqcqlexobeekzy").withValue("datapatwbbf"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings"))) - .withFormatSettings(new AvroWriteSettings().withRecordName("vfoyqvnlhsxeas").withRecordNamespace("qquvvsc") - .withMaxRowsPerFile("datakm").withFileNamePrefix("datadukp")); + AvroSink model = new AvroSink().withWriteBatchSize("dataljajz") + .withWriteBatchTimeout("datagwarbvblat") + .withSinkRetryCount("datajk") + .withSinkRetryWait("datatodgisfej") + .withMaxConcurrentConnections("datap") + .withDisableMetricsCollection("dataifujynfjwktiyhiy") + .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("databyrkxzebvxuzch") + .withDisableMetricsCollection("dataeogdkcrc") + .withCopyBehavior("datazzeu") + .withMetadata(Arrays.asList(new MetadataItem().withName("datapxir").withValue("datahuqkicimyykmke"), + new MetadataItem().withName("dataqmnr").withValue("dataefxkattpkkwldvk"), + new MetadataItem().withName("datagxakgoptbjooqo").withValue("datankvnuwjrxbbxkh"))) + .withAdditionalProperties(mapOf("type", "nqcgbijyp"))) + .withFormatSettings(new AvroWriteSettings().withRecordName("pxthsfuxhlgoexu") + .withRecordNamespace("bfoo") + .withMaxRowsPerFile("datatxdlewh") + .withFileNamePrefix("datavrip")); model = BinaryData.fromObject(model).toObject(AvroSink.class); - Assertions.assertEquals("vfoyqvnlhsxeas", model.formatSettings().recordName()); - Assertions.assertEquals("qquvvsc", model.formatSettings().recordNamespace()); + Assertions.assertEquals("pxthsfuxhlgoexu", model.formatSettings().recordName()); + Assertions.assertEquals("bfoo", model.formatSettings().recordNamespace()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSourceTests.java index d11bec34d6d83..081bbf2287683 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSourceTests.java @@ -14,17 +14,20 @@ public final class AvroSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AvroSource model = BinaryData.fromString( - "{\"type\":\"AvroSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"dataggoppmxcm\",\"disableMetricsCollection\":\"datagdd\",\"\":{\"unamzqvawk\":\"datayf\"}},\"additionalColumns\":\"dataxvzcxxfwp\",\"sourceRetryCount\":\"datagqwbrzkmgyl\",\"sourceRetryWait\":\"datacxmh\",\"maxConcurrentConnections\":\"datarecoiqwnqlizlcyh\",\"disableMetricsCollection\":\"datarkf\",\"\":{\"fmbvmajcmpohjdvf\":\"dataiommis\",\"diuyqdjk\":\"datavbwawymahboi\",\"bryhvshkvup\":\"datagcjssqpk\"}}") + "{\"type\":\"ltozxdzoldwvfn\",\"storeSettings\":{\"type\":\"hgnf\",\"maxConcurrentConnections\":\"datanhwpnloifxzdo\",\"disableMetricsCollection\":\"datavxavhfhlxqww\",\"\":{\"b\":\"dataamcfrfaytcygoo\"}},\"additionalColumns\":\"datambcklfpemgfvvnk\",\"sourceRetryCount\":\"datayaterjjuz\",\"sourceRetryWait\":\"dataegeso\",\"maxConcurrentConnections\":\"datapudaluggdh\",\"disableMetricsCollection\":\"datatgekecu\",\"\":{\"sidityvvgxejhq\":\"datakuxvccpdact\"}}") .toObject(AvroSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AvroSource model = new AvroSource().withSourceRetryCount("datagqwbrzkmgyl").withSourceRetryWait("datacxmh") - .withMaxConcurrentConnections("datarecoiqwnqlizlcyh").withDisableMetricsCollection("datarkf") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataggoppmxcm") - .withDisableMetricsCollection("datagdd").withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withAdditionalColumns("dataxvzcxxfwp"); + AvroSource model = new AvroSource().withSourceRetryCount("datayaterjjuz") + .withSourceRetryWait("dataegeso") + .withMaxConcurrentConnections("datapudaluggdh") + .withDisableMetricsCollection("datatgekecu") + .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datanhwpnloifxzdo") + .withDisableMetricsCollection("datavxavhfhlxqww") + .withAdditionalProperties(mapOf("type", "hgnf"))) + .withAdditionalColumns("datambcklfpemgfvvnk"); model = BinaryData.fromObject(model).toObject(AvroSource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroWriteSettingsTests.java index 1701d7d603fa2..a8e3ea75597b4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroWriteSettingsTests.java @@ -12,18 +12,20 @@ public final class AvroWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AvroWriteSettings model = BinaryData.fromString( - "{\"type\":\"AvroWriteSettings\",\"recordName\":\"gdjvyclasdprknup\",\"recordNamespace\":\"eklgsnpvqczpko\",\"maxRowsPerFile\":\"dataolthowcsueb\",\"fileNamePrefix\":\"datag\",\"\":{\"exar\":\"dataimtxmd\",\"ivftl\":\"dataukoir\"}}") + "{\"type\":\"jva\",\"recordName\":\"mktpykoicpk\",\"recordNamespace\":\"mqfdtbaobjaof\",\"maxRowsPerFile\":\"datavhhrgvkuuikr\",\"fileNamePrefix\":\"dataelrwsjvdxe\",\"\":{\"fm\":\"datagfvygrfyyknxua\",\"kt\":\"dataynlcimjmurocryfu\"}}") .toObject(AvroWriteSettings.class); - Assertions.assertEquals("gdjvyclasdprknup", model.recordName()); - Assertions.assertEquals("eklgsnpvqczpko", model.recordNamespace()); + Assertions.assertEquals("mktpykoicpk", model.recordName()); + Assertions.assertEquals("mqfdtbaobjaof", model.recordNamespace()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AvroWriteSettings model = new AvroWriteSettings().withRecordName("gdjvyclasdprknup") - .withRecordNamespace("eklgsnpvqczpko").withMaxRowsPerFile("dataolthowcsueb").withFileNamePrefix("datag"); + AvroWriteSettings model = new AvroWriteSettings().withRecordName("mktpykoicpk") + .withRecordNamespace("mqfdtbaobjaof") + .withMaxRowsPerFile("datavhhrgvkuuikr") + .withFileNamePrefix("dataelrwsjvdxe"); model = BinaryData.fromObject(model).toObject(AvroWriteSettings.class); - Assertions.assertEquals("gdjvyclasdprknup", model.recordName()); - Assertions.assertEquals("eklgsnpvqczpko", model.recordNamespace()); + Assertions.assertEquals("mktpykoicpk", model.recordName()); + Assertions.assertEquals("mqfdtbaobjaof", model.recordNamespace()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTests.java index 5b27d7018a45d..cc22a54cb9045 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTests.java @@ -12,15 +12,15 @@ public final class AzPowerShellSetupTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzPowerShellSetup model - = BinaryData.fromString("{\"type\":\"AzPowerShellSetup\",\"typeProperties\":{\"version\":\"pjfkr\"}}") + = BinaryData.fromString("{\"type\":\"ecjgpjnydkkgbv\",\"typeProperties\":{\"version\":\"kqaqgzcojgdqjh\"}}") .toObject(AzPowerShellSetup.class); - Assertions.assertEquals("pjfkr", model.version()); + Assertions.assertEquals("kqaqgzcojgdqjh", model.version()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzPowerShellSetup model = new AzPowerShellSetup().withVersion("pjfkr"); + AzPowerShellSetup model = new AzPowerShellSetup().withVersion("kqaqgzcojgdqjh"); model = BinaryData.fromObject(model).toObject(AzPowerShellSetup.class); - Assertions.assertEquals("pjfkr", model.version()); + Assertions.assertEquals("kqaqgzcojgdqjh", model.version()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTypePropertiesTests.java index f60b0b43ffc89..e01eca68157ff 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTypePropertiesTests.java @@ -12,14 +12,14 @@ public final class AzPowerShellSetupTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzPowerShellSetupTypeProperties model - = BinaryData.fromString("{\"version\":\"rerdlgbvtpxowg\"}").toObject(AzPowerShellSetupTypeProperties.class); - Assertions.assertEquals("rerdlgbvtpxowg", model.version()); + = BinaryData.fromString("{\"version\":\"qksrgkvg\"}").toObject(AzPowerShellSetupTypeProperties.class); + Assertions.assertEquals("qksrgkvg", model.version()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzPowerShellSetupTypeProperties model = new AzPowerShellSetupTypeProperties().withVersion("rerdlgbvtpxowg"); + AzPowerShellSetupTypeProperties model = new AzPowerShellSetupTypeProperties().withVersion("qksrgkvg"); model = BinaryData.fromObject(model).toObject(AzPowerShellSetupTypeProperties.class); - Assertions.assertEquals("rerdlgbvtpxowg", model.version()); + Assertions.assertEquals("qksrgkvg", model.version()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTests.java index 8649ec6887402..b923e1d386c1f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTests.java @@ -21,38 +21,45 @@ public final class AzureBlobDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureBlobDataset model = BinaryData.fromString( - "{\"type\":\"AzureBlob\",\"typeProperties\":{\"folderPath\":\"datal\",\"tableRootLocation\":\"datavdovbrb\",\"fileName\":\"datarlprda\",\"modifiedDatetimeStart\":\"datacd\",\"modifiedDatetimeEnd\":\"databnygdjc\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"databpwyykd\",\"deserializer\":\"dataqzlrzndasdn\",\"\":{\"arhqt\":\"datajqmvytgkiq\",\"azekdzdzffzjwzts\":\"datav\"}},\"compression\":{\"type\":\"datachggryelg\",\"level\":\"dataatig\",\"\":{\"wnvojtvmdev\":\"datarrkdknczgor\",\"rbpyhssrl\":\"datalhqvbk\",\"bxxo\":\"datakpkpkocmacc\",\"kgv\":\"datayicyvspeslhwy\"}}},\"description\":\"cpumdd\",\"structure\":\"dataajk\",\"schema\":\"datayddt\",\"linkedServiceName\":{\"referenceName\":\"fcudvafnbfbqv\",\"parameters\":{\"rdw\":\"datanxhgk\",\"zvcmbpwd\":\"dataejpec\"}},\"parameters\":{\"ffbvtzldzchub\":{\"type\":\"Object\",\"defaultValue\":\"dataprldidwm\"},\"bzakp\":{\"type\":\"Array\",\"defaultValue\":\"datanlzuvigvlghf\"},\"aqpojpsucmximc\":{\"type\":\"Float\",\"defaultValue\":\"dataq\"},\"yv\":{\"type\":\"String\",\"defaultValue\":\"dataynqjgsat\"}},\"annotations\":[\"datagngc\",\"datausxhircpg\",\"datavsvkkjbjolpyo\",\"datalkvuznadvh\"],\"folder\":{\"name\":\"eoigowxxbhtpsyio\"},\"\":{\"wanvmwdvgjqcrbko\":\"dataqwtqszzgyksik\",\"gyweo\":\"datapnbn\",\"w\":\"databepgcmahiwfry\"}}") + "{\"type\":\"pwnikxkcajgrbrc\",\"typeProperties\":{\"folderPath\":\"datauasnjeglhtrxb\",\"tableRootLocation\":\"datapcjcnwjzbqb\",\"fileName\":\"datarnwv\",\"modifiedDatetimeStart\":\"datavoqizawws\",\"modifiedDatetimeEnd\":\"dataorgfhjxsawooauff\",\"format\":{\"type\":\"qkmwzrdqyoybmrno\",\"serializer\":\"datacjnzdaiovrbh\",\"deserializer\":\"dataqphkv\",\"\":{\"yp\":\"dataadcrxyl\",\"iwhjdfrwpsshrmn\":\"datatyzvelffohu\",\"ogkscxj\":\"datacclpct\"}},\"compression\":{\"type\":\"datagmos\",\"level\":\"dataebnxsqcows\",\"\":{\"k\":\"datafqlamfbqhsu\",\"pn\":\"datafu\"}}},\"description\":\"vgoo\",\"structure\":\"datasazmzl\",\"schema\":\"dataxitmhxxrqic\",\"linkedServiceName\":{\"referenceName\":\"zfvwjdthkvpyeyoa\",\"parameters\":{\"kjr\":\"datampnqup\",\"qdcclcv\":\"datafwkyluobdxw\"}},\"parameters\":{\"cvwbz\":{\"type\":\"Int\",\"defaultValue\":\"dataay\"},\"qnpfydrfb\":{\"type\":\"Array\",\"defaultValue\":\"datalrymfjxlpiy\"},\"byxmkhmqyncgaull\":{\"type\":\"Array\",\"defaultValue\":\"datay\"},\"gm\":{\"type\":\"Array\",\"defaultValue\":\"datayygjqpulmw\"}},\"annotations\":[\"datamiwxzfvv\",\"dataucqfgufjnbxwbm\",\"datadukinhlxh\",\"datadekekzouyveww\"],\"folder\":{\"name\":\"dwcgldo\"},\"\":{\"dxfhhht\":\"dataa\",\"qtdn\":\"datast\",\"dshvvf\":\"datackkpl\"}}") .toObject(AzureBlobDataset.class); - Assertions.assertEquals("cpumdd", model.description()); - Assertions.assertEquals("fcudvafnbfbqv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("ffbvtzldzchub").type()); - Assertions.assertEquals("eoigowxxbhtpsyio", model.folder().name()); + Assertions.assertEquals("vgoo", model.description()); + Assertions.assertEquals("zfvwjdthkvpyeyoa", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("cvwbz").type()); + Assertions.assertEquals("dwcgldo", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureBlobDataset model = new AzureBlobDataset().withDescription("cpumdd").withStructure("dataajk") - .withSchema("datayddt") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("fcudvafnbfbqv") - .withParameters(mapOf("rdw", "datanxhgk", "zvcmbpwd", "dataejpec"))) - .withParameters(mapOf("ffbvtzldzchub", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataprldidwm"), "bzakp", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datanlzuvigvlghf"), - "aqpojpsucmximc", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataq"), - "yv", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataynqjgsat"))) - .withAnnotations(Arrays.asList("datagngc", "datausxhircpg", "datavsvkkjbjolpyo", "datalkvuznadvh")) - .withFolder(new DatasetFolder().withName("eoigowxxbhtpsyio")).withFolderPath("datal") - .withTableRootLocation("datavdovbrb").withFileName("datarlprda").withModifiedDatetimeStart("datacd") - .withModifiedDatetimeEnd("databnygdjc") - .withFormat(new DatasetStorageFormat().withSerializer("databpwyykd").withDeserializer("dataqzlrzndasdn") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("datachggryelg").withLevel("dataatig") + AzureBlobDataset model = new AzureBlobDataset().withDescription("vgoo") + .withStructure("datasazmzl") + .withSchema("dataxitmhxxrqic") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zfvwjdthkvpyeyoa") + .withParameters(mapOf("kjr", "datampnqup", "qdcclcv", "datafwkyluobdxw"))) + .withParameters(mapOf("cvwbz", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataay"), "qnpfydrfb", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datalrymfjxlpiy"), + "byxmkhmqyncgaull", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datay"), "gm", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datayygjqpulmw"))) + .withAnnotations(Arrays.asList("datamiwxzfvv", "dataucqfgufjnbxwbm", "datadukinhlxh", "datadekekzouyveww")) + .withFolder(new DatasetFolder().withName("dwcgldo")) + .withFolderPath("datauasnjeglhtrxb") + .withTableRootLocation("datapcjcnwjzbqb") + .withFileName("datarnwv") + .withModifiedDatetimeStart("datavoqizawws") + .withModifiedDatetimeEnd("dataorgfhjxsawooauff") + .withFormat(new DatasetStorageFormat().withSerializer("datacjnzdaiovrbh") + .withDeserializer("dataqphkv") + .withAdditionalProperties(mapOf("type", "qkmwzrdqyoybmrno"))) + .withCompression(new DatasetCompression().withType("datagmos") + .withLevel("dataebnxsqcows") .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(AzureBlobDataset.class); - Assertions.assertEquals("cpumdd", model.description()); - Assertions.assertEquals("fcudvafnbfbqv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("ffbvtzldzchub").type()); - Assertions.assertEquals("eoigowxxbhtpsyio", model.folder().name()); + Assertions.assertEquals("vgoo", model.description()); + Assertions.assertEquals("zfvwjdthkvpyeyoa", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("cvwbz").type()); + Assertions.assertEquals("dwcgldo", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTypePropertiesTests.java index bf460ce73d884..bc4a78e07fda0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTypePropertiesTests.java @@ -15,18 +15,22 @@ public final class AzureBlobDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureBlobDatasetTypeProperties model = BinaryData.fromString( - "{\"folderPath\":\"datahkapitsks\",\"tableRootLocation\":\"datayfttibjepzwhju\",\"fileName\":\"datadgbggcjxzh\",\"modifiedDatetimeStart\":\"dataivwehsudym\",\"modifiedDatetimeEnd\":\"databhdosmbngkql\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"dataduvxd\",\"deserializer\":\"dataexatmdmnrsen\",\"\":{\"xznntwgkvyohp\":\"datarxyddmiploisjkzs\",\"wytb\":\"dataapzupz\",\"mxpqkjnpyriwn\":\"datajzghximkg\",\"xmmqmt\":\"dataot\"}},\"compression\":{\"type\":\"dataky\",\"level\":\"dataexwdonbexf\",\"\":{\"fozuumrtgjq\":\"dataaubheeggzgrnqt\",\"nsyxzxjmk\":\"dataacan\",\"lazof\":\"datanb\"}}}") + "{\"folderPath\":\"dataxccyijjimhijzr\",\"tableRootLocation\":\"datajxmvvsduydwn\",\"fileName\":\"dataruhhqldrdymnswx\",\"modifiedDatetimeStart\":\"dataxqwqnghxnim\",\"modifiedDatetimeEnd\":\"dataujrxgunnqgyp\",\"format\":{\"type\":\"ny\",\"serializer\":\"datauevqmvyumgmm\",\"deserializer\":\"databsnznwgsqufm\",\"\":{\"gcyjpgawepk\":\"datayoseqcazisvbr\",\"zpghlrdtbgblxbu\":\"dataon\",\"mfjihv\":\"databrvjztaflv\",\"tt\":\"datajcq\"}},\"compression\":{\"type\":\"datauqmmfjewfeqbavdo\",\"level\":\"datawy\",\"\":{\"lvxgwzz\":\"datamw\"}}}") .toObject(AzureBlobDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureBlobDatasetTypeProperties model = new AzureBlobDatasetTypeProperties().withFolderPath("datahkapitsks") - .withTableRootLocation("datayfttibjepzwhju").withFileName("datadgbggcjxzh") - .withModifiedDatetimeStart("dataivwehsudym").withModifiedDatetimeEnd("databhdosmbngkql") - .withFormat(new DatasetStorageFormat().withSerializer("dataduvxd").withDeserializer("dataexatmdmnrsen") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("dataky").withLevel("dataexwdonbexf") + AzureBlobDatasetTypeProperties model = new AzureBlobDatasetTypeProperties().withFolderPath("dataxccyijjimhijzr") + .withTableRootLocation("datajxmvvsduydwn") + .withFileName("dataruhhqldrdymnswx") + .withModifiedDatetimeStart("dataxqwqnghxnim") + .withModifiedDatetimeEnd("dataujrxgunnqgyp") + .withFormat(new DatasetStorageFormat().withSerializer("datauevqmvyumgmm") + .withDeserializer("databsnznwgsqufm") + .withAdditionalProperties(mapOf("type", "ny"))) + .withCompression(new DatasetCompression().withType("datauqmmfjewfeqbavdo") + .withLevel("datawy") .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(AzureBlobDatasetTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTests.java index 534fff582e272..1f96b5e3adc89 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTests.java @@ -21,33 +21,38 @@ public final class AzureBlobFSDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureBlobFSDataset model = BinaryData.fromString( - "{\"type\":\"AzureBlobFSFile\",\"typeProperties\":{\"folderPath\":\"datagc\",\"fileName\":\"datayrhkvxzzmiem\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"datatgp\",\"deserializer\":\"datasw\",\"\":{\"ornfbm\":\"datavjmghpakbqyhl\"}},\"compression\":{\"type\":\"dataagkncjmybnyevz\",\"level\":\"datajawrhulemm\",\"\":{\"kitzm\":\"datawppxirx\",\"cltjl\":\"datahitaxj\"}}},\"description\":\"gcem\",\"structure\":\"datadzdvyljubv\",\"schema\":\"datayzufldifnivlutgg\",\"linkedServiceName\":{\"referenceName\":\"aacxauhvc\",\"parameters\":{\"oiyygkts\":\"datahklsqx\",\"xxoxwfzbkv\":\"dataj\"}},\"parameters\":{\"snbwutlvuwm\":{\"type\":\"Bool\",\"defaultValue\":\"dataxphsowbe\"}},\"annotations\":[\"dataustihtgrafjajvky\",\"datammjczvog\"],\"folder\":{\"name\":\"rjenn\"},\"\":{\"xnrp\":\"dataaeuwqdwxhhlbmyph\",\"ywbihqbtodjfyx\":\"datahewokyqsfkxf\",\"rugyozzzawnjdv\":\"datavkvwzdmvddqw\"}}") + "{\"type\":\"bjsdj\",\"typeProperties\":{\"folderPath\":\"datakfnyskwwunqhc\",\"fileName\":\"datavoj\",\"format\":{\"type\":\"hipcukvbljpx\",\"serializer\":\"datarvchyluqalp\",\"deserializer\":\"datafjjfxtizt\",\"\":{\"xxsri\":\"datatahhhsa\",\"yyefmxwoqotiiqb\":\"datarcwbaae\",\"sty\":\"datapasrvrmt\",\"k\":\"datakjhorlxkpy\"}},\"compression\":{\"type\":\"datanycntr\",\"level\":\"dataxwtdmbqjtsuhqh\",\"\":{\"x\":\"datatdyqav\",\"npaami\":\"dataqmzxsyaks\",\"hvwt\":\"datawb\",\"kiy\":\"datapbgchcgsfzhb\"}}},\"description\":\"xeysgwkcffe\",\"structure\":\"datanzcbivove\",\"schema\":\"datakhfeqcooplfpo\",\"linkedServiceName\":{\"referenceName\":\"imgckycjpeebznbz\",\"parameters\":{\"tn\":\"dataulo\",\"hgpncdmwkhupfhs\":\"databmxooq\"}},\"parameters\":{\"iunyevdyzdsytc\":{\"type\":\"Float\",\"defaultValue\":\"dataa\"}},\"annotations\":[\"datawhcamwuynfxkc\",\"datasfcmvhadrpb\",\"datatvyrkljqkq\"],\"folder\":{\"name\":\"jtvjkowggxawwd\"},\"\":{\"lprgeganihkjcner\":\"datapnkteiidlbovwb\"}}") .toObject(AzureBlobFSDataset.class); - Assertions.assertEquals("gcem", model.description()); - Assertions.assertEquals("aacxauhvc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("snbwutlvuwm").type()); - Assertions.assertEquals("rjenn", model.folder().name()); + Assertions.assertEquals("xeysgwkcffe", model.description()); + Assertions.assertEquals("imgckycjpeebznbz", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("iunyevdyzdsytc").type()); + Assertions.assertEquals("jtvjkowggxawwd", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureBlobFSDataset model = new AzureBlobFSDataset().withDescription("gcem").withStructure("datadzdvyljubv") - .withSchema("datayzufldifnivlutgg") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("aacxauhvc") - .withParameters(mapOf("oiyygkts", "datahklsqx", "xxoxwfzbkv", "dataj"))) - .withParameters(mapOf("snbwutlvuwm", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataxphsowbe"))) - .withAnnotations(Arrays.asList("dataustihtgrafjajvky", "datammjczvog")) - .withFolder(new DatasetFolder().withName("rjenn")).withFolderPath("datagc").withFileName("datayrhkvxzzmiem") - .withFormat(new DatasetStorageFormat().withSerializer("datatgp").withDeserializer("datasw") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("dataagkncjmybnyevz").withLevel("datajawrhulemm") + AzureBlobFSDataset model = new AzureBlobFSDataset().withDescription("xeysgwkcffe") + .withStructure("datanzcbivove") + .withSchema("datakhfeqcooplfpo") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("imgckycjpeebznbz") + .withParameters(mapOf("tn", "dataulo", "hgpncdmwkhupfhs", "databmxooq"))) + .withParameters(mapOf("iunyevdyzdsytc", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataa"))) + .withAnnotations(Arrays.asList("datawhcamwuynfxkc", "datasfcmvhadrpb", "datatvyrkljqkq")) + .withFolder(new DatasetFolder().withName("jtvjkowggxawwd")) + .withFolderPath("datakfnyskwwunqhc") + .withFileName("datavoj") + .withFormat(new DatasetStorageFormat().withSerializer("datarvchyluqalp") + .withDeserializer("datafjjfxtizt") + .withAdditionalProperties(mapOf("type", "hipcukvbljpx"))) + .withCompression(new DatasetCompression().withType("datanycntr") + .withLevel("dataxwtdmbqjtsuhqh") .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(AzureBlobFSDataset.class); - Assertions.assertEquals("gcem", model.description()); - Assertions.assertEquals("aacxauhvc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("snbwutlvuwm").type()); - Assertions.assertEquals("rjenn", model.folder().name()); + Assertions.assertEquals("xeysgwkcffe", model.description()); + Assertions.assertEquals("imgckycjpeebznbz", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("iunyevdyzdsytc").type()); + Assertions.assertEquals("jtvjkowggxawwd", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTypePropertiesTests.java index 0f29e842f39a6..73de1b4ff439c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTypePropertiesTests.java @@ -15,18 +15,20 @@ public final class AzureBlobFSDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureBlobFSDatasetTypeProperties model = BinaryData.fromString( - "{\"folderPath\":\"datarho\",\"fileName\":\"datakkvxu\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"datazbvb\",\"deserializer\":\"datauvqhxtozfgdkw\",\"\":{\"utui\":\"datarklpiigfuzk\",\"xyll\":\"datajclzjwaqdzqydewu\"}},\"compression\":{\"type\":\"datazevtzqwczoc\",\"level\":\"databek\",\"\":{\"horkslhraqk\":\"datanfpkyvnhiys\",\"flteatnegef\":\"datawlwkfflaqwmwqog\",\"ibt\":\"datajxnjtqbgy\",\"kxunsaujqgbb\":\"datanvxwtdqtcbjdbtqy\"}}}") + "{\"folderPath\":\"datayjulskwwnqhq\",\"fileName\":\"datagpwxtvce\",\"format\":{\"type\":\"vbvx\",\"serializer\":\"dataegenlrj\",\"deserializer\":\"datamwevguyflnxel\",\"\":{\"owwe\":\"datafzcde\",\"sfqbirtybcelfjn\":\"datahyfkdilbwqlqa\",\"el\":\"dataodnjyhzfaxskdv\",\"qxsorchaz\":\"datamodpe\"}},\"compression\":{\"type\":\"datao\",\"level\":\"datayhl\",\"\":{\"b\":\"datavhs\",\"gf\":\"datapwxslaj\",\"awkmibu\":\"datagaef\"}}}") .toObject(AzureBlobFSDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { AzureBlobFSDatasetTypeProperties model - = new AzureBlobFSDatasetTypeProperties().withFolderPath("datarho").withFileName("datakkvxu") - .withFormat(new DatasetStorageFormat().withSerializer("datazbvb").withDeserializer("datauvqhxtozfgdkw") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("datazevtzqwczoc").withLevel("databek") - .withAdditionalProperties(mapOf())); + = new AzureBlobFSDatasetTypeProperties().withFolderPath("datayjulskwwnqhq") + .withFileName("datagpwxtvce") + .withFormat(new DatasetStorageFormat().withSerializer("dataegenlrj") + .withDeserializer("datamwevguyflnxel") + .withAdditionalProperties(mapOf("type", "vbvx"))) + .withCompression( + new DatasetCompression().withType("datao").withLevel("datayhl").withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(AzureBlobFSDatasetTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSLocationTests.java index 8b22c37fffdcf..2288823b24b8c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSLocationTests.java @@ -11,14 +11,15 @@ public final class AzureBlobFSLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureBlobFSLocation model = BinaryData.fromString( - "{\"type\":\"AzureBlobFSLocation\",\"fileSystem\":\"datadsiuorin\",\"folderPath\":\"datacedpksriwmmtmqrx\",\"fileName\":\"dataqvvyczyay\",\"\":{\"bxiqahragpxmibpl\":\"datag\"}}") + "{\"type\":\"al\",\"fileSystem\":\"datasxrznmg\",\"folderPath\":\"datackhefrb\",\"fileName\":\"dataeu\",\"\":{\"jraxdtpryjmg\":\"datagpxebjludcayt\",\"msvvylj\":\"datansewouxl\",\"hqqylmpc\":\"datarkepos\"}}") .toObject(AzureBlobFSLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureBlobFSLocation model = new AzureBlobFSLocation().withFolderPath("datacedpksriwmmtmqrx") - .withFileName("dataqvvyczyay").withFileSystem("datadsiuorin"); + AzureBlobFSLocation model = new AzureBlobFSLocation().withFolderPath("datackhefrb") + .withFileName("dataeu") + .withFileSystem("datasxrznmg"); model = BinaryData.fromObject(model).toObject(AzureBlobFSLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSReadSettingsTests.java index 53953e7c7a8c6..3768b6d805f35 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSReadSettingsTests.java @@ -11,18 +11,24 @@ public final class AzureBlobFSReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureBlobFSReadSettings model = BinaryData.fromString( - "{\"type\":\"AzureBlobFSReadSettings\",\"recursive\":\"datasbk\",\"wildcardFolderPath\":\"dataouytsajjgvu\",\"wildcardFileName\":\"datawichjk\",\"fileListPath\":\"datatlsohrtgpvvcbt\",\"enablePartitionDiscovery\":\"dataiwc\",\"partitionRootPath\":\"datayaoyzjfgv\",\"deleteFilesAfterCompletion\":\"datauimnabgrs\",\"modifiedDatetimeStart\":\"datazmthiecuflazfot\",\"modifiedDatetimeEnd\":\"datakumam\",\"maxConcurrentConnections\":\"datargljekh\",\"disableMetricsCollection\":\"datafgjbeybdukbglniw\",\"\":{\"oexkon\":\"dataysceji\",\"edhaftniffajniwb\":\"dataiacdloehsmvvxkc\"}}") + "{\"type\":\"qvfxvvog\",\"recursive\":\"datajoklb\",\"wildcardFolderPath\":\"datat\",\"wildcardFileName\":\"datatwxfjlpk\",\"fileListPath\":\"datarexf\",\"enablePartitionDiscovery\":\"datafuflupuvwlfzj\",\"partitionRootPath\":\"dataglac\",\"deleteFilesAfterCompletion\":\"datagzscgslwujk\",\"modifiedDatetimeStart\":\"datatpmlrjnnbmodsy\",\"modifiedDatetimeEnd\":\"datatvatuj\",\"maxConcurrentConnections\":\"datahxoxwpiqkkmpfnwd\",\"disableMetricsCollection\":\"datazwmtsm\",\"\":{\"vidkzbdbvl\":\"dataiypkhnrgmgnvcu\"}}") .toObject(AzureBlobFSReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureBlobFSReadSettings model = new AzureBlobFSReadSettings().withMaxConcurrentConnections("datargljekh") - .withDisableMetricsCollection("datafgjbeybdukbglniw").withRecursive("datasbk") - .withWildcardFolderPath("dataouytsajjgvu").withWildcardFileName("datawichjk") - .withFileListPath("datatlsohrtgpvvcbt").withEnablePartitionDiscovery("dataiwc") - .withPartitionRootPath("datayaoyzjfgv").withDeleteFilesAfterCompletion("datauimnabgrs") - .withModifiedDatetimeStart("datazmthiecuflazfot").withModifiedDatetimeEnd("datakumam"); + AzureBlobFSReadSettings model + = new AzureBlobFSReadSettings().withMaxConcurrentConnections("datahxoxwpiqkkmpfnwd") + .withDisableMetricsCollection("datazwmtsm") + .withRecursive("datajoklb") + .withWildcardFolderPath("datat") + .withWildcardFileName("datatwxfjlpk") + .withFileListPath("datarexf") + .withEnablePartitionDiscovery("datafuflupuvwlfzj") + .withPartitionRootPath("dataglac") + .withDeleteFilesAfterCompletion("datagzscgslwujk") + .withModifiedDatetimeStart("datatpmlrjnnbmodsy") + .withModifiedDatetimeEnd("datatvatuj"); model = BinaryData.fromObject(model).toObject(AzureBlobFSReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSinkTests.java index 65cd48947fb78..be26d43ab8e44 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSinkTests.java @@ -13,19 +13,24 @@ public final class AzureBlobFSSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureBlobFSSink model = BinaryData.fromString( - "{\"type\":\"AzureBlobFSSink\",\"copyBehavior\":\"datamiw\",\"metadata\":[{\"name\":\"datakxrqzgshqx\",\"value\":\"dataunuvfslawimhoaqj\"},{\"name\":\"datahlpznmdai\",\"value\":\"datazqz\"},{\"name\":\"datadipnhbsvrlr\",\"value\":\"dataxmnoa\"}],\"writeBatchSize\":\"datayadyfnxtlln\",\"writeBatchTimeout\":\"datamdgsvaekuovwi\",\"sinkRetryCount\":\"dataykprrddbenf\",\"sinkRetryWait\":\"datafszmxpos\",\"maxConcurrentConnections\":\"datasc\",\"disableMetricsCollection\":\"datauldkpdle\",\"\":{\"bxggknm\":\"datajujps\",\"z\":\"datakniqoqyrcpsjea\",\"jwsddyq\":\"datajcsbkmaluchbfrt\",\"txsyufex\":\"dataxpnzpuknfpgg\"}}") + "{\"type\":\"iehwmaxl\",\"copyBehavior\":\"dataeskmvrcyyzaal\",\"metadata\":[{\"name\":\"datawiegsswijqsndq\",\"value\":\"datadtczxwqmundle\"},{\"name\":\"datalcuedrmqkwk\",\"value\":\"databtrhrygdpjufmvoz\"},{\"name\":\"datatcfppjegctsatnry\",\"value\":\"datauewrwcqrvtwv\"},{\"name\":\"dataprrxhxqpmzznmns\",\"value\":\"datawadvbwewwd\"}],\"writeBatchSize\":\"dataag\",\"writeBatchTimeout\":\"datah\",\"sinkRetryCount\":\"dataneqapllodowsjcvp\",\"sinkRetryWait\":\"datayullivcym\",\"maxConcurrentConnections\":\"datablh\",\"disableMetricsCollection\":\"datamhc\",\"\":{\"o\":\"dataw\",\"glpglhlwulugrum\":\"datanajmwpeaoegg\",\"qrpbwykeeo\":\"dataprnbozvixamhi\"}}") .toObject(AzureBlobFSSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureBlobFSSink model = new AzureBlobFSSink().withWriteBatchSize("datayadyfnxtlln") - .withWriteBatchTimeout("datamdgsvaekuovwi").withSinkRetryCount("dataykprrddbenf") - .withSinkRetryWait("datafszmxpos").withMaxConcurrentConnections("datasc") - .withDisableMetricsCollection("datauldkpdle").withCopyBehavior("datamiw") - .withMetadata(Arrays.asList(new MetadataItem().withName("datakxrqzgshqx").withValue("dataunuvfslawimhoaqj"), - new MetadataItem().withName("datahlpznmdai").withValue("datazqz"), - new MetadataItem().withName("datadipnhbsvrlr").withValue("dataxmnoa"))); + AzureBlobFSSink model = new AzureBlobFSSink().withWriteBatchSize("dataag") + .withWriteBatchTimeout("datah") + .withSinkRetryCount("dataneqapllodowsjcvp") + .withSinkRetryWait("datayullivcym") + .withMaxConcurrentConnections("datablh") + .withDisableMetricsCollection("datamhc") + .withCopyBehavior("dataeskmvrcyyzaal") + .withMetadata( + Arrays.asList(new MetadataItem().withName("datawiegsswijqsndq").withValue("datadtczxwqmundle"), + new MetadataItem().withName("datalcuedrmqkwk").withValue("databtrhrygdpjufmvoz"), + new MetadataItem().withName("datatcfppjegctsatnry").withValue("datauewrwcqrvtwv"), + new MetadataItem().withName("dataprrxhxqpmzznmns").withValue("datawadvbwewwd"))); model = BinaryData.fromObject(model).toObject(AzureBlobFSSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSourceTests.java index 7f98d87a0a7f7..c1321a4ae2cc9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSourceTests.java @@ -11,16 +11,19 @@ public final class AzureBlobFSSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureBlobFSSource model = BinaryData.fromString( - "{\"type\":\"AzureBlobFSSource\",\"treatEmptyAsNull\":\"datautawgyl\",\"skipHeaderLineCount\":\"datadrfclehlop\",\"recursive\":\"datavpeaeyjl\",\"sourceRetryCount\":\"dataduxhopyavcb\",\"sourceRetryWait\":\"dataembvfa\",\"maxConcurrentConnections\":\"dataxjoa\",\"disableMetricsCollection\":\"dataxmumfbkaxzrycvac\",\"\":{\"aq\":\"datajysyphxeoq\",\"sbbawrb\":\"dataikceiyuv\",\"rtnk\":\"dataooxvprqlxqhqgip\"}}") + "{\"type\":\"tnhi\",\"treatEmptyAsNull\":\"dataoeegal\",\"skipHeaderLineCount\":\"dataqy\",\"recursive\":\"datayzilocn\",\"sourceRetryCount\":\"dataegjvhyvnqbhcl\",\"sourceRetryWait\":\"dataksoqzzyrovvnac\",\"maxConcurrentConnections\":\"datajmhclhcqcjngwmb\",\"disableMetricsCollection\":\"datagsidasovlrj\",\"\":{\"pxxbkxwhsfhe\":\"dataydtjebbacscirzts\"}}") .toObject(AzureBlobFSSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureBlobFSSource model = new AzureBlobFSSource().withSourceRetryCount("dataduxhopyavcb") - .withSourceRetryWait("dataembvfa").withMaxConcurrentConnections("dataxjoa") - .withDisableMetricsCollection("dataxmumfbkaxzrycvac").withTreatEmptyAsNull("datautawgyl") - .withSkipHeaderLineCount("datadrfclehlop").withRecursive("datavpeaeyjl"); + AzureBlobFSSource model = new AzureBlobFSSource().withSourceRetryCount("dataegjvhyvnqbhcl") + .withSourceRetryWait("dataksoqzzyrovvnac") + .withMaxConcurrentConnections("datajmhclhcqcjngwmb") + .withDisableMetricsCollection("datagsidasovlrj") + .withTreatEmptyAsNull("dataoeegal") + .withSkipHeaderLineCount("dataqy") + .withRecursive("datayzilocn"); model = BinaryData.fromObject(model).toObject(AzureBlobFSSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSWriteSettingsTests.java index 87ff876fcdc1c..3de9245a45055 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSWriteSettingsTests.java @@ -13,19 +13,18 @@ public final class AzureBlobFSWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureBlobFSWriteSettings model = BinaryData.fromString( - "{\"type\":\"AzureBlobFSWriteSettings\",\"blockSizeInMB\":\"datatfozysfjdcokbp\",\"maxConcurrentConnections\":\"dataqelms\",\"disableMetricsCollection\":\"databtneltnbyvbg\",\"copyBehavior\":\"datarumududwecdsybi\",\"metadata\":[{\"name\":\"dataxdkwvceqlyxbyqqo\",\"value\":\"datarekiojusmdodku\"},{\"name\":\"datacn\",\"value\":\"dataovlyykxyfpkdsld\"},{\"name\":\"datarwvswlhjlbk\",\"value\":\"datags\"}],\"\":{\"jekqz\":\"datanatl\",\"zfayyuq\":\"datawlkyrnmgsbu\",\"derzsnfgmohhcgh\":\"datacwrtr\",\"rulboawzplwghfgq\":\"datasmvvfpkymqnvvwfa\"}}") + "{\"type\":\"lvxwlqlugnbudjy\",\"blockSizeInMB\":\"datakarggvyuewgy\",\"maxConcurrentConnections\":\"dataiotgtlans\",\"disableMetricsCollection\":\"datavlxsycqqdoxooxua\",\"copyBehavior\":\"dataqoobwxctkveqvp\",\"metadata\":[{\"name\":\"datahqcjrerypjbyqxey\",\"value\":\"datanupsipclxvaovss\"}],\"\":{\"iyo\":\"datavqqvicqdbmzwlej\"}}") .toObject(AzureBlobFSWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureBlobFSWriteSettings model = new AzureBlobFSWriteSettings().withMaxConcurrentConnections("dataqelms") - .withDisableMetricsCollection("databtneltnbyvbg").withCopyBehavior("datarumududwecdsybi") + AzureBlobFSWriteSettings model = new AzureBlobFSWriteSettings().withMaxConcurrentConnections("dataiotgtlans") + .withDisableMetricsCollection("datavlxsycqqdoxooxua") + .withCopyBehavior("dataqoobwxctkveqvp") .withMetadata( - Arrays.asList(new MetadataItem().withName("dataxdkwvceqlyxbyqqo").withValue("datarekiojusmdodku"), - new MetadataItem().withName("datacn").withValue("dataovlyykxyfpkdsld"), - new MetadataItem().withName("datarwvswlhjlbk").withValue("datags"))) - .withBlockSizeInMB("datatfozysfjdcokbp"); + Arrays.asList(new MetadataItem().withName("datahqcjrerypjbyqxey").withValue("datanupsipclxvaovss"))) + .withBlockSizeInMB("datakarggvyuewgy"); model = BinaryData.fromObject(model).toObject(AzureBlobFSWriteSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageLocationTests.java index e46731d0cd69e..6458b68a6dbe1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageLocationTests.java @@ -11,14 +11,15 @@ public final class AzureBlobStorageLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureBlobStorageLocation model = BinaryData.fromString( - "{\"type\":\"AzureBlobStorageLocation\",\"container\":\"dataovsfb\",\"folderPath\":\"datavzopaxmfmvsm\",\"fileName\":\"dataoxfaxdtn\",\"\":{\"oiauesugmocpcj\":\"databsat\",\"rgttw\":\"datacboe\"}}") + "{\"type\":\"a\",\"container\":\"dataqr\",\"folderPath\":\"datavznllaslkskhjqj\",\"fileName\":\"databaihxjtgzg\",\"\":{\"enwl\":\"dataywbqgroigbsfsgs\",\"qlxspmrj\":\"datafmhl\"}}") .toObject(AzureBlobStorageLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureBlobStorageLocation model = new AzureBlobStorageLocation().withFolderPath("datavzopaxmfmvsm") - .withFileName("dataoxfaxdtn").withContainer("dataovsfb"); + AzureBlobStorageLocation model = new AzureBlobStorageLocation().withFolderPath("datavznllaslkskhjqj") + .withFileName("databaihxjtgzg") + .withContainer("dataqr"); model = BinaryData.fromObject(model).toObject(AzureBlobStorageLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageReadSettingsTests.java index e1f9df609b708..ed5c28edec1fb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageReadSettingsTests.java @@ -11,18 +11,25 @@ public final class AzureBlobStorageReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureBlobStorageReadSettings model = BinaryData.fromString( - "{\"type\":\"AzureBlobStorageReadSettings\",\"recursive\":\"dataouxpdnlbp\",\"wildcardFolderPath\":\"datackohn\",\"wildcardFileName\":\"dataaqzekggurwxf\",\"prefix\":\"dataxg\",\"fileListPath\":\"datafiyuofegrzf\",\"enablePartitionDiscovery\":\"dataulookwnzotjbvh\",\"partitionRootPath\":\"datadlo\",\"deleteFilesAfterCompletion\":\"dataqsylkkqvmmm\",\"modifiedDatetimeStart\":\"dataljcauegymc\",\"modifiedDatetimeEnd\":\"datasmnjitxu\",\"maxConcurrentConnections\":\"datalbibwodayipgsh\",\"disableMetricsCollection\":\"dataoecmbyo\",\"\":{\"qpddebokzdshhhd\":\"databvfchfux\",\"zzkzvfywspaja\":\"dataxnzapzibmst\",\"zzpauts\":\"datajhvpktbnmhxt\"}}") + "{\"type\":\"ecxvkqjpovjvvx\",\"recursive\":\"datapbfsxps\",\"wildcardFolderPath\":\"dataevz\",\"wildcardFileName\":\"dataydnokkkgiec\",\"prefix\":\"dataftsneftzetjclaq\",\"fileListPath\":\"datab\",\"enablePartitionDiscovery\":\"datamagpdsuyywnaqgoo\",\"partitionRootPath\":\"datahhvgddfzcnylz\",\"deleteFilesAfterCompletion\":\"datahufsgc\",\"modifiedDatetimeStart\":\"datartgtdegfhofoptb\",\"modifiedDatetimeEnd\":\"dataikpotj\",\"maxConcurrentConnections\":\"datanrewrbmhjm\",\"disableMetricsCollection\":\"datalqwqcxyi\",\"\":{\"eijzgnlaaxtte\":\"dataacjiqrllacylbtk\",\"rwbobawlntenh\":\"dataaugojvgje\",\"hehotqorrvwlceoj\":\"dataqtvxghbe\",\"fdsgrtkevim\":\"datayugzlvgjirjkkrs\"}}") .toObject(AzureBlobStorageReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureBlobStorageReadSettings model = new AzureBlobStorageReadSettings() - .withMaxConcurrentConnections("datalbibwodayipgsh").withDisableMetricsCollection("dataoecmbyo") - .withRecursive("dataouxpdnlbp").withWildcardFolderPath("datackohn").withWildcardFileName("dataaqzekggurwxf") - .withPrefix("dataxg").withFileListPath("datafiyuofegrzf").withEnablePartitionDiscovery("dataulookwnzotjbvh") - .withPartitionRootPath("datadlo").withDeleteFilesAfterCompletion("dataqsylkkqvmmm") - .withModifiedDatetimeStart("dataljcauegymc").withModifiedDatetimeEnd("datasmnjitxu"); + AzureBlobStorageReadSettings model + = new AzureBlobStorageReadSettings().withMaxConcurrentConnections("datanrewrbmhjm") + .withDisableMetricsCollection("datalqwqcxyi") + .withRecursive("datapbfsxps") + .withWildcardFolderPath("dataevz") + .withWildcardFileName("dataydnokkkgiec") + .withPrefix("dataftsneftzetjclaq") + .withFileListPath("datab") + .withEnablePartitionDiscovery("datamagpdsuyywnaqgoo") + .withPartitionRootPath("datahhvgddfzcnylz") + .withDeleteFilesAfterCompletion("datahufsgc") + .withModifiedDatetimeStart("datartgtdegfhofoptb") + .withModifiedDatetimeEnd("dataikpotj"); model = BinaryData.fromObject(model).toObject(AzureBlobStorageReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageWriteSettingsTests.java index a629c06d75d67..fca1c51ed7090 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageWriteSettingsTests.java @@ -13,18 +13,19 @@ public final class AzureBlobStorageWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureBlobStorageWriteSettings model = BinaryData.fromString( - "{\"type\":\"AzureBlobStorageWriteSettings\",\"blockSizeInMB\":\"datafn\",\"maxConcurrentConnections\":\"datagzmyxsoxqarjt\",\"disableMetricsCollection\":\"datalllmtiyguuhylzbd\",\"copyBehavior\":\"datatdohjxfqyyu\",\"metadata\":[{\"name\":\"dataxznxh\",\"value\":\"datatkkicxj\"},{\"name\":\"dataeilixzjvkq\",\"value\":\"databblh\"}],\"\":{\"xcljnkms\":\"dataxxpn\",\"rivagcsmrtepsyb\":\"datasquxxqcimnchvkj\"}}") + "{\"type\":\"enrcqickhvps\",\"blockSizeInMB\":\"dataoisbmv\",\"maxConcurrentConnections\":\"datauiuvingmonq\",\"disableMetricsCollection\":\"datatyuqdz\",\"copyBehavior\":\"dataojz\",\"metadata\":[{\"name\":\"datafjgakaysc\",\"value\":\"datadqyjzwmf\"},{\"name\":\"dataqziz\",\"value\":\"datavmuot\"}],\"\":{\"khprlt\":\"dataabfyjampvwx\",\"yw\":\"dataipmnqrbyq\"}}") .toObject(AzureBlobStorageWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { AzureBlobStorageWriteSettings model - = new AzureBlobStorageWriteSettings().withMaxConcurrentConnections("datagzmyxsoxqarjt") - .withDisableMetricsCollection("datalllmtiyguuhylzbd").withCopyBehavior("datatdohjxfqyyu") - .withMetadata(Arrays.asList(new MetadataItem().withName("dataxznxh").withValue("datatkkicxj"), - new MetadataItem().withName("dataeilixzjvkq").withValue("databblh"))) - .withBlockSizeInMB("datafn"); + = new AzureBlobStorageWriteSettings().withMaxConcurrentConnections("datauiuvingmonq") + .withDisableMetricsCollection("datatyuqdz") + .withCopyBehavior("dataojz") + .withMetadata(Arrays.asList(new MetadataItem().withName("datafjgakaysc").withValue("datadqyjzwmf"), + new MetadataItem().withName("dataqziz").withValue("datavmuot"))) + .withBlockSizeInMB("dataoisbmv"); model = BinaryData.fromObject(model).toObject(AzureBlobStorageWriteSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTests.java index 52c62d1dc0aac..84f8fb75d5346 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTests.java @@ -22,59 +22,67 @@ public final class AzureDataExplorerCommandActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDataExplorerCommandActivity model = BinaryData.fromString( - "{\"type\":\"AzureDataExplorerCommand\",\"typeProperties\":{\"command\":\"datadsdgvhecqkb\",\"commandTimeout\":\"dataemamyshnksupc\"},\"linkedServiceName\":{\"referenceName\":\"spgbyvumxyqhctrr\",\"parameters\":{\"zuk\":\"datagchtaeacbqkx\",\"nlqfqawyns\":\"datajki\",\"lovavi\":\"datacf\"}},\"policy\":{\"timeout\":\"dataedfyhtfugpp\",\"retry\":\"datadhylx\",\"retryIntervalInSeconds\":1788389204,\"secureInput\":false,\"secureOutput\":false,\"\":{\"bchnh\":\"dataauxzcrfp\",\"ufojuex\":\"dataxmgcq\",\"snaklobc\":\"datakqhgfwyzv\",\"mltdgxiqrgr\":\"datay\"}},\"name\":\"xjfxu\",\"description\":\"yje\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"cnklqipnzgnyb\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Completed\",\"Failed\"],\"\":{\"hzqnhxksc\":\"datanc\"}},{\"activity\":\"ykrzrjjernjkgalo\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Failed\",\"Failed\"],\"\":{\"hctxarl\":\"datatygyia\",\"gp\":\"datasijcmn\"}},{\"activity\":\"gxjmwzkafuvbg\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Completed\"],\"\":{\"xvsmrxypbiwn\":\"datajl\",\"b\":\"dataznaixjsfasxfamn\"}},{\"activity\":\"xbglqybfnxej\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"cnuozjg\":\"datajmpsxot\"}}],\"userProperties\":[{\"name\":\"benwiignrrqv\",\"value\":\"dataxouoqtestr\"}],\"\":{\"yyzaalpwwcwie\":\"dataskmvr\",\"wqmundle\":\"datasswijqsndqjbdtcz\",\"hrygdp\":\"datadlcuedrmqkwkutbt\",\"tcfppjegctsatnry\":\"dataufmvozq\"}}") + "{\"type\":\"dc\",\"typeProperties\":{\"command\":\"datalahdpliciv\",\"commandTimeout\":\"dataudaubmjheyntsdwx\"},\"linkedServiceName\":{\"referenceName\":\"locrkfygjywpkbvv\",\"parameters\":{\"vhhouh\":\"datanwv\",\"lkfl\":\"datatih\",\"yvxb\":\"dataooiiviwlfmjoy\",\"rwwk\":\"datathwbi\"}},\"policy\":{\"timeout\":\"dataqrvnhcuoghvkzm\",\"retry\":\"datatemp\",\"retryIntervalInSeconds\":1946428911,\"secureInput\":false,\"secureOutput\":false,\"\":{\"ato\":\"datahrqjjl\",\"lyrtkvftlbt\":\"datasgarxtgexmxgqgqu\",\"asrwo\":\"datatub\",\"kwmvcxyuem\":\"dataumnucqewxcwr\"}},\"name\":\"tnzye\",\"description\":\"dwgqisrlhxfmvngd\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"hnh\",\"dependencyConditions\":[\"Skipped\",\"Completed\"],\"\":{\"qczoqp\":\"datarjyprcojwiigtd\",\"ntihlnay\":\"datapibeiftmozof\"}},{\"activity\":\"sgbyhouqugyc\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Succeeded\"],\"\":{\"objixoqqjbsagw\":\"dataven\",\"r\":\"datapuqf\",\"hwxu\":\"datamwlpa\",\"s\":\"dataaktnmwlklqhw\"}},{\"activity\":\"eoefwnjsorhpga\",\"dependencyConditions\":[\"Failed\"],\"\":{\"bildwqlxn\":\"dataoezcabfylsuiy\",\"wdvdtzdrv\":\"datajqeigpylpm\"}},{\"activity\":\"xswiindyurwz\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Skipped\",\"Succeeded\"],\"\":{\"slpbvponxhs\":\"datajambtvc\"}}],\"userProperties\":[{\"name\":\"tunnkbekzk\",\"value\":\"dataoonbzi\"},{\"name\":\"lqyzrtawjkjzvvk\",\"value\":\"datahasxjmfh\"}],\"\":{\"qfvczuuanrjcrpx\":\"dataqxwcimamtqfrdfo\",\"kmlfcgk\":\"datafyytjmlroxvsclmt\",\"bestyy\":\"dataeitphzuaznsbvu\"}}") .toObject(AzureDataExplorerCommandActivity.class); - Assertions.assertEquals("xjfxu", model.name()); - Assertions.assertEquals("yje", model.description()); + Assertions.assertEquals("tnzye", model.name()); + Assertions.assertEquals("dwgqisrlhxfmvngd", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("cnklqipnzgnyb", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("benwiignrrqv", model.userProperties().get(0).name()); - Assertions.assertEquals("spgbyvumxyqhctrr", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1788389204, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("hnh", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("tunnkbekzk", model.userProperties().get(0).name()); + Assertions.assertEquals("locrkfygjywpkbvv", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1946428911, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDataExplorerCommandActivity model = new AzureDataExplorerCommandActivity().withName("xjfxu") - .withDescription("yje").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("cnklqipnzgnyb") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ykrzrjjernjkgalo") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("gxjmwzkafuvbg") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.FAILED, - DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("xbglqybfnxej") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("benwiignrrqv").withValue("dataxouoqtestr"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("spgbyvumxyqhctrr") - .withParameters(mapOf("zuk", "datagchtaeacbqkx", "nlqfqawyns", "datajki", "lovavi", "datacf"))) - .withPolicy(new ActivityPolicy().withTimeout("dataedfyhtfugpp").withRetry("datadhylx") - .withRetryIntervalInSeconds(1788389204).withSecureInput(false).withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withCommand("datadsdgvhecqkb").withCommandTimeout("dataemamyshnksupc"); + AzureDataExplorerCommandActivity model + = new AzureDataExplorerCommandActivity().withName("tnzye") + .withDescription("dwgqisrlhxfmvngd") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("hnh") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("sgbyhouqugyc") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, + DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("eoefwnjsorhpga") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("xswiindyurwz") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED, + DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("tunnkbekzk").withValue("dataoonbzi"), + new UserProperty().withName("lqyzrtawjkjzvvk").withValue("datahasxjmfh"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("locrkfygjywpkbvv") + .withParameters(mapOf("vhhouh", "datanwv", "lkfl", "datatih", "yvxb", "dataooiiviwlfmjoy", "rwwk", + "datathwbi"))) + .withPolicy(new ActivityPolicy().withTimeout("dataqrvnhcuoghvkzm") + .withRetry("datatemp") + .withRetryIntervalInSeconds(1946428911) + .withSecureInput(false) + .withSecureOutput(false) + .withAdditionalProperties(mapOf())) + .withCommand("datalahdpliciv") + .withCommandTimeout("dataudaubmjheyntsdwx"); model = BinaryData.fromObject(model).toObject(AzureDataExplorerCommandActivity.class); - Assertions.assertEquals("xjfxu", model.name()); - Assertions.assertEquals("yje", model.description()); + Assertions.assertEquals("tnzye", model.name()); + Assertions.assertEquals("dwgqisrlhxfmvngd", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("cnklqipnzgnyb", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("benwiignrrqv", model.userProperties().get(0).name()); - Assertions.assertEquals("spgbyvumxyqhctrr", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1788389204, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("hnh", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("tunnkbekzk", model.userProperties().get(0).name()); + Assertions.assertEquals("locrkfygjywpkbvv", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1946428911, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTypePropertiesTests.java index 6c4b42c804d05..fdbe3dd1dafab 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTypePropertiesTests.java @@ -11,14 +11,15 @@ public final class AzureDataExplorerCommandActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDataExplorerCommandActivityTypeProperties model - = BinaryData.fromString("{\"command\":\"dataouewrwc\",\"commandTimeout\":\"datavtwvyjpr\"}") + = BinaryData.fromString("{\"command\":\"dataljo\",\"commandTimeout\":\"datampuw\"}") .toObject(AzureDataExplorerCommandActivityTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDataExplorerCommandActivityTypeProperties model = new AzureDataExplorerCommandActivityTypeProperties() - .withCommand("dataouewrwc").withCommandTimeout("datavtwvyjpr"); + AzureDataExplorerCommandActivityTypeProperties model + = new AzureDataExplorerCommandActivityTypeProperties().withCommand("dataljo") + .withCommandTimeout("datampuw"); model = BinaryData.fromObject(model).toObject(AzureDataExplorerCommandActivityTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerDatasetTypePropertiesTests.java index a66ecd68f4d77..c11cd03f3f3ad 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerDatasetTypePropertiesTests.java @@ -10,14 +10,14 @@ public final class AzureDataExplorerDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - AzureDataExplorerDatasetTypeProperties model = BinaryData.fromString("{\"table\":\"dataysyajmm\"}") + AzureDataExplorerDatasetTypeProperties model = BinaryData.fromString("{\"table\":\"dataumzenk\"}") .toObject(AzureDataExplorerDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { AzureDataExplorerDatasetTypeProperties model - = new AzureDataExplorerDatasetTypeProperties().withTable("dataysyajmm"); + = new AzureDataExplorerDatasetTypeProperties().withTable("dataumzenk"); model = BinaryData.fromObject(model).toObject(AzureDataExplorerDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSinkTests.java index 62468deaf1ff2..9f0f2f4c7e546 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSinkTests.java @@ -11,17 +11,21 @@ public final class AzureDataExplorerSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDataExplorerSink model = BinaryData.fromString( - "{\"type\":\"AzureDataExplorerSink\",\"ingestionMappingName\":\"datalyjdbsx\",\"ingestionMappingAsJson\":\"dataleqsk\",\"flushImmediately\":\"dataol\",\"writeBatchSize\":\"databellpk\",\"writeBatchTimeout\":\"databrsmy\",\"sinkRetryCount\":\"datandfrfhgowhnvc\",\"sinkRetryWait\":\"datamuvgysto\",\"maxConcurrentConnections\":\"datarktodeertyijlvcm\",\"disableMetricsCollection\":\"datanxxw\",\"\":{\"bkhvlsahjdeb\":\"datadbdkw\",\"cpxlkih\":\"datayqnluszilk\",\"cmcehtrhwr\":\"dataxtbvhms\",\"chyyfgkrp\":\"dataihw\"}}") + "{\"type\":\"tmyfajygn\",\"ingestionMappingName\":\"datajocsq\",\"ingestionMappingAsJson\":\"databuflppnevujkzb\",\"flushImmediately\":\"datagvwkdg\",\"writeBatchSize\":\"dataoeoxsobljzodcx\",\"writeBatchTimeout\":\"datapmjfwm\",\"sinkRetryCount\":\"datafaucihqsog\",\"sinkRetryWait\":\"dataao\",\"maxConcurrentConnections\":\"datanlyzi\",\"disableMetricsCollection\":\"datatqvgpidrtb\",\"\":{\"e\":\"datawfkdblvbw\",\"nekwe\":\"datatxlujvmnooaga\"}}") .toObject(AzureDataExplorerSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDataExplorerSink model = new AzureDataExplorerSink().withWriteBatchSize("databellpk") - .withWriteBatchTimeout("databrsmy").withSinkRetryCount("datandfrfhgowhnvc") - .withSinkRetryWait("datamuvgysto").withMaxConcurrentConnections("datarktodeertyijlvcm") - .withDisableMetricsCollection("datanxxw").withIngestionMappingName("datalyjdbsx") - .withIngestionMappingAsJson("dataleqsk").withFlushImmediately("dataol"); + AzureDataExplorerSink model = new AzureDataExplorerSink().withWriteBatchSize("dataoeoxsobljzodcx") + .withWriteBatchTimeout("datapmjfwm") + .withSinkRetryCount("datafaucihqsog") + .withSinkRetryWait("dataao") + .withMaxConcurrentConnections("datanlyzi") + .withDisableMetricsCollection("datatqvgpidrtb") + .withIngestionMappingName("datajocsq") + .withIngestionMappingAsJson("databuflppnevujkzb") + .withFlushImmediately("datagvwkdg"); model = BinaryData.fromObject(model).toObject(AzureDataExplorerSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSourceTests.java index 25d7fd1c21175..166a22fb84494 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSourceTests.java @@ -11,17 +11,20 @@ public final class AzureDataExplorerSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDataExplorerSource model = BinaryData.fromString( - "{\"type\":\"AzureDataExplorerSource\",\"query\":\"dataqebtrps\",\"noTruncation\":\"datawtlocseybvtgcozn\",\"queryTimeout\":\"dataqxckrnkuuotlymy\",\"additionalColumns\":\"datagmrkxkmtuynugptf\",\"sourceRetryCount\":\"dataisvfh\",\"sourceRetryWait\":\"dataqqshbipzhuhu\",\"maxConcurrentConnections\":\"datazjsezgphip\",\"disableMetricsCollection\":\"dataoqldnhwdfxgec\",\"\":{\"bhsnimompxd\":\"datakdb\",\"fbdmoawhbdxxnm\":\"datamp\",\"iocqoydqyzhfny\":\"dataxz\",\"skt\":\"datagbwdsaqwywayjin\"}}") + "{\"type\":\"mbnfvygtt\",\"query\":\"dataf\",\"noTruncation\":\"datambxf\",\"queryTimeout\":\"dataweiqvhfyvkxgo\",\"additionalColumns\":\"dataveiucuxwnojvcr\",\"sourceRetryCount\":\"datafjalpsycvcksz\",\"sourceRetryWait\":\"datagguucpytsxnuj\",\"maxConcurrentConnections\":\"datafth\",\"disableMetricsCollection\":\"dataiieoxlbcccc\",\"\":{\"eksbksvvyvoi\":\"dataabdevjrbgcdxqgst\",\"wljmz\":\"datavvu\"}}") .toObject(AzureDataExplorerSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDataExplorerSource model - = new AzureDataExplorerSource().withSourceRetryCount("dataisvfh").withSourceRetryWait("dataqqshbipzhuhu") - .withMaxConcurrentConnections("datazjsezgphip").withDisableMetricsCollection("dataoqldnhwdfxgec") - .withQuery("dataqebtrps").withNoTruncation("datawtlocseybvtgcozn") - .withQueryTimeout("dataqxckrnkuuotlymy").withAdditionalColumns("datagmrkxkmtuynugptf"); + AzureDataExplorerSource model = new AzureDataExplorerSource().withSourceRetryCount("datafjalpsycvcksz") + .withSourceRetryWait("datagguucpytsxnuj") + .withMaxConcurrentConnections("datafth") + .withDisableMetricsCollection("dataiieoxlbcccc") + .withQuery("dataf") + .withNoTruncation("datambxf") + .withQueryTimeout("dataweiqvhfyvkxgo") + .withAdditionalColumns("dataveiucuxwnojvcr"); model = BinaryData.fromObject(model).toObject(AzureDataExplorerSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerTableDatasetTests.java index a79a3b68f0b73..3036990d4ed7c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerTableDatasetTests.java @@ -19,32 +19,34 @@ public final class AzureDataExplorerTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDataExplorerTableDataset model = BinaryData.fromString( - "{\"type\":\"AzureDataExplorerTable\",\"typeProperties\":{\"table\":\"datarwxxqkwargcbgdg\"},\"description\":\"ijiqe\",\"structure\":\"datawqykmvugflh\",\"schema\":\"dataoxu\",\"linkedServiceName\":{\"referenceName\":\"hcnnkvthwtam\",\"parameters\":{\"cocdxvbeqzjd\":\"datagyvxhfmuhkezuucq\"}},\"parameters\":{\"my\":{\"type\":\"Array\",\"defaultValue\":\"datapdwnee\"},\"jrwvnffaofkvfru\":{\"type\":\"Bool\",\"defaultValue\":\"datau\"},\"tvymdqaymqmyrn\":{\"type\":\"Int\",\"defaultValue\":\"datafbvhgykzov\"}},\"annotations\":[\"databqkfnoxhvo\",\"datajdgfkr\"],\"folder\":{\"name\":\"rvpa\"},\"\":{\"ej\":\"datadeex\",\"nxbohpzurn\":\"datagu\",\"oijoxcbpkiwse\":\"dataoytkbeadyfenro\",\"ztdacrqcwkk\":\"datacbtaxdrpanhsxwhx\"}}") + "{\"type\":\"faky\",\"typeProperties\":{\"table\":\"datatbgvlpgfyg\"},\"description\":\"psihzqieoympp\",\"structure\":\"dataxjsfgbyy\",\"schema\":\"dataazlycx\",\"linkedServiceName\":{\"referenceName\":\"ubru\",\"parameters\":{\"dkg\":\"databuoyr\"}},\"parameters\":{\"xuvsveamseauuuvh\":{\"type\":\"Array\",\"defaultValue\":\"datavjmfjjf\"},\"lmufzuuysz\":{\"type\":\"String\",\"defaultValue\":\"datah\"},\"fvwgwphwxiav\":{\"type\":\"String\",\"defaultValue\":\"dataemtyosdpxtsdywfg\"}},\"annotations\":[\"dataxaqgfpuhh\",\"datawrsjumlkjsvk\",\"datatmlixalphkg\",\"datainhecj\"],\"folder\":{\"name\":\"djmsngm\"},\"\":{\"pjthizsabcylzzi\":\"datarlkpismmrmr\"}}") .toObject(AzureDataExplorerTableDataset.class); - Assertions.assertEquals("ijiqe", model.description()); - Assertions.assertEquals("hcnnkvthwtam", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("my").type()); - Assertions.assertEquals("rvpa", model.folder().name()); + Assertions.assertEquals("psihzqieoympp", model.description()); + Assertions.assertEquals("ubru", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("xuvsveamseauuuvh").type()); + Assertions.assertEquals("djmsngm", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDataExplorerTableDataset model = new AzureDataExplorerTableDataset().withDescription("ijiqe") - .withStructure("datawqykmvugflh").withSchema("dataoxu") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hcnnkvthwtam") - .withParameters(mapOf("cocdxvbeqzjd", "datagyvxhfmuhkezuucq"))) - .withParameters(mapOf("my", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datapdwnee"), - "jrwvnffaofkvfru", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datau"), - "tvymdqaymqmyrn", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datafbvhgykzov"))) - .withAnnotations(Arrays.asList("databqkfnoxhvo", "datajdgfkr")) - .withFolder(new DatasetFolder().withName("rvpa")).withTable("datarwxxqkwargcbgdg"); + AzureDataExplorerTableDataset model = new AzureDataExplorerTableDataset().withDescription("psihzqieoympp") + .withStructure("dataxjsfgbyy") + .withSchema("dataazlycx") + .withLinkedServiceName( + new LinkedServiceReference().withReferenceName("ubru").withParameters(mapOf("dkg", "databuoyr"))) + .withParameters(mapOf("xuvsveamseauuuvh", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datavjmfjjf"), + "lmufzuuysz", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datah"), + "fvwgwphwxiav", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataemtyosdpxtsdywfg"))) + .withAnnotations(Arrays.asList("dataxaqgfpuhh", "datawrsjumlkjsvk", "datatmlixalphkg", "datainhecj")) + .withFolder(new DatasetFolder().withName("djmsngm")) + .withTable("datatbgvlpgfyg"); model = BinaryData.fromObject(model).toObject(AzureDataExplorerTableDataset.class); - Assertions.assertEquals("ijiqe", model.description()); - Assertions.assertEquals("hcnnkvthwtam", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("my").type()); - Assertions.assertEquals("rvpa", model.folder().name()); + Assertions.assertEquals("psihzqieoympp", model.description()); + Assertions.assertEquals("ubru", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("xuvsveamseauuuvh").type()); + Assertions.assertEquals("djmsngm", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTests.java index beefec3d0575a..4fc6b95c4d63e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTests.java @@ -21,36 +21,40 @@ public final class AzureDataLakeStoreDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDataLakeStoreDataset model = BinaryData.fromString( - "{\"type\":\"AzureDataLakeStoreFile\",\"typeProperties\":{\"folderPath\":\"datablwal\",\"fileName\":\"datassnqe\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"datatbptgcsma\",\"deserializer\":\"dataxrwqfmd\",\"\":{\"psibxovuqo\":\"datavtamqwzmnobfew\",\"qnzjcyqqz\":\"datajrkblndyclwgycv\",\"dpisjdl\":\"dataembtbwnalb\",\"eopsk\":\"dataajvmvvlooubsfxip\"}},\"compression\":{\"type\":\"datacjomlupf\",\"level\":\"datausjcd\",\"\":{\"j\":\"datalgdwzrgdqyx\",\"cwwsj\":\"datalgrcavqcwyzoqzkm\"}}},\"description\":\"iixepbntqqwwgfgs\",\"structure\":\"datailefej\",\"schema\":\"datawrznequ\",\"linkedServiceName\":{\"referenceName\":\"ynttwknhajk\",\"parameters\":{\"cydi\":\"datayogjmqjh\",\"vjbssfcriqxz\":\"datanm\",\"py\":\"dataxtdlxwmvcdkucp\",\"pnr\":\"datafrwrgorogeuvmkr\"}},\"parameters\":{\"snqpljpete\":{\"type\":\"Object\",\"defaultValue\":\"datalzof\"},\"ub\":{\"type\":\"Array\",\"defaultValue\":\"dataikelpmwgr\"},\"gjzscueza\":{\"type\":\"String\",\"defaultValue\":\"datahvo\"}},\"annotations\":[\"datadfwgqjhewcffrx\",\"datagezkhzpriqisse\",\"dataerrusyzaiv\",\"datapsjnpck\"],\"folder\":{\"name\":\"jy\"},\"\":{\"jarsbbdddwok\":\"datanbdawsaoplvvmnbk\"}}") + "{\"type\":\"tvnpccxzivxpmh\",\"typeProperties\":{\"folderPath\":\"datavykbyjecrqkwakk\",\"fileName\":\"datasfoulborcxu\",\"format\":{\"type\":\"dqbdyblp\",\"serializer\":\"datat\",\"deserializer\":\"datawpgweoqhbj\",\"\":{\"c\":\"datafblerufol\",\"tpnerxvjea\":\"datahjucihbymjj\"}},\"compression\":{\"type\":\"dataahelh\",\"level\":\"datamyi\",\"\":{\"cthtpq\":\"dataa\",\"zkdiuvflgzhcw\":\"datafzdos\",\"g\":\"datagwahcrxo\"}}},\"description\":\"hh\",\"structure\":\"datavnnjd\",\"schema\":\"datajq\",\"linkedServiceName\":{\"referenceName\":\"vhnjvpmxnhtmz\",\"parameters\":{\"vtlgx\":\"datalfxolr\",\"twwgwqubot\":\"datafjswequflzyyopoa\",\"jiemimdtnpo\":\"datavufrk\"}},\"parameters\":{\"ft\":{\"type\":\"String\",\"defaultValue\":\"datakreeeddd\"},\"uwbrnqyxfedqne\":{\"type\":\"String\",\"defaultValue\":\"dataulpclhsiige\"}},\"annotations\":[\"datawynxoqgvbz\"],\"folder\":{\"name\":\"pw\"},\"\":{\"a\":\"dataobqajejir\"}}") .toObject(AzureDataLakeStoreDataset.class); - Assertions.assertEquals("iixepbntqqwwgfgs", model.description()); - Assertions.assertEquals("ynttwknhajk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("snqpljpete").type()); - Assertions.assertEquals("jy", model.folder().name()); + Assertions.assertEquals("hh", model.description()); + Assertions.assertEquals("vhnjvpmxnhtmz", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("ft").type()); + Assertions.assertEquals("pw", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDataLakeStoreDataset model = new AzureDataLakeStoreDataset().withDescription("iixepbntqqwwgfgs") - .withStructure("datailefej").withSchema("datawrznequ") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ynttwknhajk") - .withParameters(mapOf("cydi", "datayogjmqjh", "vjbssfcriqxz", "datanm", "py", "dataxtdlxwmvcdkucp", - "pnr", "datafrwrgorogeuvmkr"))) - .withParameters(mapOf("snqpljpete", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datalzof"), "ub", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataikelpmwgr"), - "gjzscueza", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datahvo"))) - .withAnnotations(Arrays.asList("datadfwgqjhewcffrx", "datagezkhzpriqisse", "dataerrusyzaiv", "datapsjnpck")) - .withFolder(new DatasetFolder().withName("jy")).withFolderPath("datablwal").withFileName("datassnqe") - .withFormat(new DatasetStorageFormat().withSerializer("datatbptgcsma").withDeserializer("dataxrwqfmd") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("datacjomlupf").withLevel("datausjcd") - .withAdditionalProperties(mapOf())); + AzureDataLakeStoreDataset model = new AzureDataLakeStoreDataset().withDescription("hh") + .withStructure("datavnnjd") + .withSchema("datajq") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vhnjvpmxnhtmz") + .withParameters( + mapOf("vtlgx", "datalfxolr", "twwgwqubot", "datafjswequflzyyopoa", "jiemimdtnpo", "datavufrk"))) + .withParameters(mapOf("ft", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datakreeeddd"), + "uwbrnqyxfedqne", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataulpclhsiige"))) + .withAnnotations(Arrays.asList("datawynxoqgvbz")) + .withFolder(new DatasetFolder().withName("pw")) + .withFolderPath("datavykbyjecrqkwakk") + .withFileName("datasfoulborcxu") + .withFormat(new DatasetStorageFormat().withSerializer("datat") + .withDeserializer("datawpgweoqhbj") + .withAdditionalProperties(mapOf("type", "dqbdyblp"))) + .withCompression( + new DatasetCompression().withType("dataahelh").withLevel("datamyi").withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreDataset.class); - Assertions.assertEquals("iixepbntqqwwgfgs", model.description()); - Assertions.assertEquals("ynttwknhajk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("snqpljpete").type()); - Assertions.assertEquals("jy", model.folder().name()); + Assertions.assertEquals("hh", model.description()); + Assertions.assertEquals("vhnjvpmxnhtmz", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("ft").type()); + Assertions.assertEquals("pw", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTypePropertiesTests.java index fae20eeeb5557..92f92d9b0c257 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTypePropertiesTests.java @@ -15,18 +15,20 @@ public final class AzureDataLakeStoreDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDataLakeStoreDatasetTypeProperties model = BinaryData.fromString( - "{\"folderPath\":\"dataailxqkdyqjvzvcg\",\"fileName\":\"dataspzesfkqqxuhvz\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"datarouszxacdwukokgo\",\"deserializer\":\"dataj\",\"\":{\"h\":\"datatubcmu\",\"bcuufkrfn\":\"databtzvxxv\",\"wwp\":\"datacnihkswxmfurqmw\"}},\"compression\":{\"type\":\"dataum\",\"level\":\"dataahbqsvnkxm\",\"\":{\"edr\":\"datau\"}}}") + "{\"folderPath\":\"datavkgpogplbjuvlnhx\",\"fileName\":\"datanjhina\",\"format\":{\"type\":\"sbxnep\",\"serializer\":\"datajmoplukfy\",\"deserializer\":\"datafhsovadkrmjxmwq\",\"\":{\"wggijtspzjnrrhik\":\"dataesjqbzkqmxvporiw\"}},\"compression\":{\"type\":\"databz\",\"level\":\"datad\",\"\":{\"zfrunjfhrjhiycba\":\"datauhtr\",\"hvtuwyjsqwzsz\":\"dataseqnczkv\"}}}") .toObject(AzureDataLakeStoreDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDataLakeStoreDatasetTypeProperties model = new AzureDataLakeStoreDatasetTypeProperties() - .withFolderPath("dataailxqkdyqjvzvcg").withFileName("dataspzesfkqqxuhvz") - .withFormat(new DatasetStorageFormat().withSerializer("datarouszxacdwukokgo").withDeserializer("dataj") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("dataum").withLevel("dataahbqsvnkxm") - .withAdditionalProperties(mapOf())); + AzureDataLakeStoreDatasetTypeProperties model + = new AzureDataLakeStoreDatasetTypeProperties().withFolderPath("datavkgpogplbjuvlnhx") + .withFileName("datanjhina") + .withFormat(new DatasetStorageFormat().withSerializer("datajmoplukfy") + .withDeserializer("datafhsovadkrmjxmwq") + .withAdditionalProperties(mapOf("type", "sbxnep"))) + .withCompression( + new DatasetCompression().withType("databz").withLevel("datad").withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreDatasetTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreLocationTests.java index 0c750b86c399d..758168a92bd61 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreLocationTests.java @@ -11,14 +11,14 @@ public final class AzureDataLakeStoreLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDataLakeStoreLocation model = BinaryData.fromString( - "{\"type\":\"AzureDataLakeStoreLocation\",\"folderPath\":\"datapoyryefqmwovyzt\",\"fileName\":\"datanomfpbjceegvyiez\",\"\":{\"ehyh\":\"datatnjillukk\",\"fvulxfaryr\":\"datamjodu\",\"jqwahoyi\":\"datajlgdez\",\"ovbooqbmdqrxy\":\"dataaxqvjweiwtczkddn\"}}") + "{\"type\":\"lzmkxerxxxotee\",\"folderPath\":\"datahowgomvvhxo\",\"fileName\":\"datac\",\"\":{\"ylqlocvvujexaygl\":\"datanpxraqawbmpspf\"}}") .toObject(AzureDataLakeStoreLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDataLakeStoreLocation model = new AzureDataLakeStoreLocation().withFolderPath("datapoyryefqmwovyzt") - .withFileName("datanomfpbjceegvyiez"); + AzureDataLakeStoreLocation model + = new AzureDataLakeStoreLocation().withFolderPath("datahowgomvvhxo").withFileName("datac"); model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreReadSettingsTests.java index 7c53a49580aad..f8bef1ed610f0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreReadSettingsTests.java @@ -11,19 +11,26 @@ public final class AzureDataLakeStoreReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDataLakeStoreReadSettings model = BinaryData.fromString( - "{\"type\":\"AzureDataLakeStoreReadSettings\",\"recursive\":\"datayjuy\",\"wildcardFolderPath\":\"datalbbugojd\",\"wildcardFileName\":\"dataluyosigkinykjx\",\"fileListPath\":\"dataspeqgedpi\",\"listAfter\":\"dataqpjzt\",\"listBefore\":\"datadiverkwmafyxo\",\"enablePartitionDiscovery\":\"datakvutedetx\",\"partitionRootPath\":\"dataqudjdwcw\",\"deleteFilesAfterCompletion\":\"datacdbkceh\",\"modifiedDatetimeStart\":\"datahnqjbavdblf\",\"modifiedDatetimeEnd\":\"databbvitlnnpafu\",\"maxConcurrentConnections\":\"datar\",\"disableMetricsCollection\":\"databndr\",\"\":{\"vmdccte\":\"datanstl\",\"urz\":\"datavaajyitpyzgwihks\"}}") + "{\"type\":\"qggzahng\",\"recursive\":\"datatecsmocqwey\",\"wildcardFolderPath\":\"dataakettmfcxviwf\",\"wildcardFileName\":\"datajxxbsafqiwldu\",\"fileListPath\":\"datasyjzdasgkfz\",\"listAfter\":\"datahqomuzohnpkofklb\",\"listBefore\":\"dataln\",\"enablePartitionDiscovery\":\"dataffyvowlammvazv\",\"partitionRootPath\":\"dataie\",\"deleteFilesAfterCompletion\":\"datajunmgd\",\"modifiedDatetimeStart\":\"dataeivrhjxdnkgztfgc\",\"modifiedDatetimeEnd\":\"datavbreh\",\"maxConcurrentConnections\":\"dataseiidfpwbybmxf\",\"disableMetricsCollection\":\"datazgolfensibqi\",\"\":{\"ml\":\"datayjzvy\",\"vwr\":\"datasavzcz\"}}") .toObject(AzureDataLakeStoreReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDataLakeStoreReadSettings model = new AzureDataLakeStoreReadSettings() - .withMaxConcurrentConnections("datar").withDisableMetricsCollection("databndr").withRecursive("datayjuy") - .withWildcardFolderPath("datalbbugojd").withWildcardFileName("dataluyosigkinykjx") - .withFileListPath("dataspeqgedpi").withListAfter("dataqpjzt").withListBefore("datadiverkwmafyxo") - .withEnablePartitionDiscovery("datakvutedetx").withPartitionRootPath("dataqudjdwcw") - .withDeleteFilesAfterCompletion("datacdbkceh").withModifiedDatetimeStart("datahnqjbavdblf") - .withModifiedDatetimeEnd("databbvitlnnpafu"); + AzureDataLakeStoreReadSettings model + = new AzureDataLakeStoreReadSettings().withMaxConcurrentConnections("dataseiidfpwbybmxf") + .withDisableMetricsCollection("datazgolfensibqi") + .withRecursive("datatecsmocqwey") + .withWildcardFolderPath("dataakettmfcxviwf") + .withWildcardFileName("datajxxbsafqiwldu") + .withFileListPath("datasyjzdasgkfz") + .withListAfter("datahqomuzohnpkofklb") + .withListBefore("dataln") + .withEnablePartitionDiscovery("dataffyvowlammvazv") + .withPartitionRootPath("dataie") + .withDeleteFilesAfterCompletion("datajunmgd") + .withModifiedDatetimeStart("dataeivrhjxdnkgztfgc") + .withModifiedDatetimeEnd("datavbreh"); model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSinkTests.java index 57c009410f4c6..a725d06a8ea38 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSinkTests.java @@ -11,17 +11,20 @@ public final class AzureDataLakeStoreSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDataLakeStoreSink model = BinaryData.fromString( - "{\"type\":\"AzureDataLakeStoreSink\",\"copyBehavior\":\"datar\",\"enableAdlsSingleFileParallel\":\"datavos\",\"writeBatchSize\":\"dataujhskxxekzys\",\"writeBatchTimeout\":\"datatfimcaxgtwpzqti\",\"sinkRetryCount\":\"dataicsfaqy\",\"sinkRetryWait\":\"datacpdtktfpjkxk\",\"maxConcurrentConnections\":\"datawntnfoqwufor\",\"disableMetricsCollection\":\"databe\",\"\":{\"pslcfwgrzzqf\":\"datapnsyedpyrp\",\"tjzdpllgllvkorg\":\"dataodifghdgsyhncxoq\",\"jaogmttxq\":\"datasoxxoqyik\",\"fnsdccmdplhzjiq\":\"datavmybqjlgr\"}}") + "{\"type\":\"fuvbgcyarsbhj\",\"copyBehavior\":\"datarldsijcmn\",\"enableAdlsSingleFileParallel\":\"datapxgxjmwz\",\"writeBatchSize\":\"dataxvsmrxypbiwn\",\"writeBatchTimeout\":\"datanaixjsfasxfamn\",\"sinkRetryCount\":\"datayx\",\"sinkRetryWait\":\"datalqybfn\",\"maxConcurrentConnections\":\"datajxwcojjmps\",\"disableMetricsCollection\":\"datatqc\",\"\":{\"iignrr\":\"datazjgkcxben\"}}") .toObject(AzureDataLakeStoreSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDataLakeStoreSink model = new AzureDataLakeStoreSink().withWriteBatchSize("dataujhskxxekzys") - .withWriteBatchTimeout("datatfimcaxgtwpzqti").withSinkRetryCount("dataicsfaqy") - .withSinkRetryWait("datacpdtktfpjkxk").withMaxConcurrentConnections("datawntnfoqwufor") - .withDisableMetricsCollection("databe").withCopyBehavior("datar") - .withEnableAdlsSingleFileParallel("datavos"); + AzureDataLakeStoreSink model = new AzureDataLakeStoreSink().withWriteBatchSize("dataxvsmrxypbiwn") + .withWriteBatchTimeout("datanaixjsfasxfamn") + .withSinkRetryCount("datayx") + .withSinkRetryWait("datalqybfn") + .withMaxConcurrentConnections("datajxwcojjmps") + .withDisableMetricsCollection("datatqc") + .withCopyBehavior("datarldsijcmn") + .withEnableAdlsSingleFileParallel("datapxgxjmwz"); model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSourceTests.java index 9e0ee8e8f2686..3210dc89246d3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSourceTests.java @@ -11,15 +11,17 @@ public final class AzureDataLakeStoreSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDataLakeStoreSource model = BinaryData.fromString( - "{\"type\":\"AzureDataLakeStoreSource\",\"recursive\":\"dataf\",\"sourceRetryCount\":\"datal\",\"sourceRetryWait\":\"dataxoudjcttavbc\",\"maxConcurrentConnections\":\"datazvqzmlferjw\",\"disableMetricsCollection\":\"datannqudexnicqu\",\"\":{\"hhsvsnedh\":\"datattfqgdoowgqooi\",\"flrrtju\":\"datajiwfvetwfreq\",\"wqal\":\"dataikqzd\",\"cisolkwipvls\":\"datapmiytpji\"}}") + "{\"type\":\"khufktqgtj\",\"recursive\":\"datamhioar\",\"sourceRetryCount\":\"datactreotzgkokfztrv\",\"sourceRetryWait\":\"databt\",\"maxConcurrentConnections\":\"dataw\",\"disableMetricsCollection\":\"datakegyskmh\",\"\":{\"lpsswoslqmftk\":\"databznjngerwo\"}}") .toObject(AzureDataLakeStoreSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDataLakeStoreSource model = new AzureDataLakeStoreSource().withSourceRetryCount("datal") - .withSourceRetryWait("dataxoudjcttavbc").withMaxConcurrentConnections("datazvqzmlferjw") - .withDisableMetricsCollection("datannqudexnicqu").withRecursive("dataf"); + AzureDataLakeStoreSource model = new AzureDataLakeStoreSource().withSourceRetryCount("datactreotzgkokfztrv") + .withSourceRetryWait("databt") + .withMaxConcurrentConnections("dataw") + .withDisableMetricsCollection("datakegyskmh") + .withRecursive("datamhioar"); model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreWriteSettingsTests.java index 42ccebaa5a8ef..2c0b9fcd2d2fb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreWriteSettingsTests.java @@ -13,18 +13,19 @@ public final class AzureDataLakeStoreWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDataLakeStoreWriteSettings model = BinaryData.fromString( - "{\"type\":\"AzureDataLakeStoreWriteSettings\",\"expiryDateTime\":\"datarmtuprqt\",\"maxConcurrentConnections\":\"dataqkohupyajkde\",\"disableMetricsCollection\":\"dataarjv\",\"copyBehavior\":\"dataozfjbdyyxhjfzjb\",\"metadata\":[{\"name\":\"datalnbklhwri\",\"value\":\"datauljbhgzffe\"},{\"name\":\"datayoiaobbzcdlcc\",\"value\":\"datamvbhb\"}],\"\":{\"x\":\"databxolzin\",\"gzgsgzlbunmjha\":\"datafixrukwxcaagzlqo\",\"ysrreebj\":\"datafiyrywf\",\"dtykhsafrfv\":\"dataslbxfkiiarlldygf\"}}") + "{\"type\":\"gsxkyboysquygokh\",\"expiryDateTime\":\"dataopbabndwcfmzmqmg\",\"maxConcurrentConnections\":\"datatwcyigrhfevxypqu\",\"disableMetricsCollection\":\"dataojyxhhvoowrtcsuc\",\"copyBehavior\":\"dataawyiqzj\",\"metadata\":[{\"name\":\"datajnmpvsbludfbhzu\",\"value\":\"datapfbhihddiiuex\"}],\"\":{\"udmlqpward\":\"dataf\"}}") .toObject(AzureDataLakeStoreWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { AzureDataLakeStoreWriteSettings model - = new AzureDataLakeStoreWriteSettings().withMaxConcurrentConnections("dataqkohupyajkde") - .withDisableMetricsCollection("dataarjv").withCopyBehavior("dataozfjbdyyxhjfzjb") - .withMetadata(Arrays.asList(new MetadataItem().withName("datalnbklhwri").withValue("datauljbhgzffe"), - new MetadataItem().withName("datayoiaobbzcdlcc").withValue("datamvbhb"))) - .withExpiryDateTime("datarmtuprqt"); + = new AzureDataLakeStoreWriteSettings().withMaxConcurrentConnections("datatwcyigrhfevxypqu") + .withDisableMetricsCollection("dataojyxhhvoowrtcsuc") + .withCopyBehavior("dataawyiqzj") + .withMetadata( + Arrays.asList(new MetadataItem().withName("datajnmpvsbludfbhzu").withValue("datapfbhihddiiuex"))) + .withExpiryDateTime("dataopbabndwcfmzmqmg"); model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreWriteSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTests.java index 212b600fd6558..3ef0e311bfbc8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTests.java @@ -19,32 +19,38 @@ public final class AzureDatabricksDeltaLakeDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDatabricksDeltaLakeDataset model = BinaryData.fromString( - "{\"type\":\"AzureDatabricksDeltaLakeDataset\",\"typeProperties\":{\"table\":\"dataram\",\"database\":\"dataugqcglmadfztof\"},\"description\":\"qlauuagwayf\",\"structure\":\"datae\",\"schema\":\"dataxfei\",\"linkedServiceName\":{\"referenceName\":\"basthz\",\"parameters\":{\"qcj\":\"datapssvnonij\",\"kvocu\":\"datazzjkugpdqqbt\"}},\"parameters\":{\"tt\":{\"type\":\"Bool\",\"defaultValue\":\"datapwarhw\"},\"tkzbhizxp\":{\"type\":\"Float\",\"defaultValue\":\"datapzwxy\"},\"ushvlxudhe\":{\"type\":\"Int\",\"defaultValue\":\"datadmwnfhmjusuqn\"},\"sz\":{\"type\":\"Array\",\"defaultValue\":\"datanirmidtvhjc\"}},\"annotations\":[\"dataygkxrlfojlclp\",\"datamveybodhrv\"],\"folder\":{\"name\":\"u\"},\"\":{\"gpdxtsaujtco\":\"databcumjv\",\"l\":\"datajybolqoxupt\",\"vamtyk\":\"dataivmlkwkzli\",\"fxcsqmzdozktkdpc\":\"dataszde\"}}") + "{\"type\":\"vtoiqofzttqg\",\"typeProperties\":{\"table\":\"dataapaseqcppypfre\",\"database\":\"datavzhn\"},\"description\":\"lwyoxzuhellitpqv\",\"structure\":\"datavrsgqbm\",\"schema\":\"dataxeo\",\"linkedServiceName\":{\"referenceName\":\"ebzvtvxxfsfoy\",\"parameters\":{\"dybkb\":\"dataihnalpcu\",\"ywedbpirbzpauzbb\":\"datadwbmivzika\"}},\"parameters\":{\"xgmigsoebd\":{\"type\":\"SecureString\",\"defaultValue\":\"datazdwpave\"},\"vcymddoei\":{\"type\":\"Array\",\"defaultValue\":\"dataiuiimerffh\"},\"chh\":{\"type\":\"SecureString\",\"defaultValue\":\"datagajfeudbobmolji\"},\"cc\":{\"type\":\"String\",\"defaultValue\":\"dataih\"}},\"annotations\":[\"datajpoipdjxyotgvra\",\"datahntoiwfszkrl\",\"dataosjwrretsluqf\"],\"folder\":{\"name\":\"d\"},\"\":{\"uwhdqngqam\":\"dataioixviobuwbng\",\"exqzaffzqodoks\":\"databmggnqx\"}}") .toObject(AzureDatabricksDeltaLakeDataset.class); - Assertions.assertEquals("qlauuagwayf", model.description()); - Assertions.assertEquals("basthz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("tt").type()); - Assertions.assertEquals("u", model.folder().name()); + Assertions.assertEquals("lwyoxzuhellitpqv", model.description()); + Assertions.assertEquals("ebzvtvxxfsfoy", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("xgmigsoebd").type()); + Assertions.assertEquals("d", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDatabricksDeltaLakeDataset model = new AzureDatabricksDeltaLakeDataset().withDescription("qlauuagwayf") - .withStructure("datae").withSchema("dataxfei") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("basthz") - .withParameters(mapOf("qcj", "datapssvnonij", "kvocu", "datazzjkugpdqqbt"))) - .withParameters(mapOf("tt", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datapwarhw"), "tkzbhizxp", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datapzwxy"), "ushvlxudhe", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datadmwnfhmjusuqn"), "sz", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datanirmidtvhjc"))) - .withAnnotations(Arrays.asList("dataygkxrlfojlclp", "datamveybodhrv")) - .withFolder(new DatasetFolder().withName("u")).withTable("dataram").withDatabase("dataugqcglmadfztof"); + AzureDatabricksDeltaLakeDataset model = new AzureDatabricksDeltaLakeDataset() + .withDescription("lwyoxzuhellitpqv") + .withStructure("datavrsgqbm") + .withSchema("dataxeo") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ebzvtvxxfsfoy") + .withParameters(mapOf("dybkb", "dataihnalpcu", "ywedbpirbzpauzbb", "datadwbmivzika"))) + .withParameters(mapOf("xgmigsoebd", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datazdwpave"), + "vcymddoei", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataiuiimerffh"), "chh", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("datagajfeudbobmolji"), + "cc", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataih"))) + .withAnnotations(Arrays.asList("datajpoipdjxyotgvra", "datahntoiwfszkrl", "dataosjwrretsluqf")) + .withFolder(new DatasetFolder().withName("d")) + .withTable("dataapaseqcppypfre") + .withDatabase("datavzhn"); model = BinaryData.fromObject(model).toObject(AzureDatabricksDeltaLakeDataset.class); - Assertions.assertEquals("qlauuagwayf", model.description()); - Assertions.assertEquals("basthz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("tt").type()); - Assertions.assertEquals("u", model.folder().name()); + Assertions.assertEquals("lwyoxzuhellitpqv", model.description()); + Assertions.assertEquals("ebzvtvxxfsfoy", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("xgmigsoebd").type()); + Assertions.assertEquals("d", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTypePropertiesTests.java index a357545ffd968..aa59620adf14c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTypePropertiesTests.java @@ -11,14 +11,15 @@ public final class AzureDatabricksDeltaLakeDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDatabricksDeltaLakeDatasetTypeProperties model - = BinaryData.fromString("{\"table\":\"dataohplrgcnbvmhvq\",\"database\":\"dataedaxkuyorfjidqo\"}") + = BinaryData.fromString("{\"table\":\"datavomdqxnoyzqipapi\",\"database\":\"datacydbjghunq\"}") .toObject(AzureDatabricksDeltaLakeDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDatabricksDeltaLakeDatasetTypeProperties model = new AzureDatabricksDeltaLakeDatasetTypeProperties() - .withTable("dataohplrgcnbvmhvq").withDatabase("dataedaxkuyorfjidqo"); + AzureDatabricksDeltaLakeDatasetTypeProperties model + = new AzureDatabricksDeltaLakeDatasetTypeProperties().withTable("datavomdqxnoyzqipapi") + .withDatabase("datacydbjghunq"); model = BinaryData.fromObject(model).toObject(AzureDatabricksDeltaLakeDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeExportCommandTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeExportCommandTests.java index a62c7df53ad6d..cceb49045d63c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeExportCommandTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeExportCommandTests.java @@ -11,14 +11,15 @@ public final class AzureDatabricksDeltaLakeExportCommandTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDatabricksDeltaLakeExportCommand model = BinaryData.fromString( - "{\"type\":\"AzureDatabricksDeltaLakeExportCommand\",\"dateFormat\":\"dataphnag\",\"timestampFormat\":\"datalaxjmnbmfmloq\",\"\":{\"m\":\"datawvtddpicwnbtvlrs\",\"dknxerkaiikbpf\":\"datarm\"}}") + "{\"type\":\"aenarfy\",\"dateFormat\":\"dataf\",\"timestampFormat\":\"datadqoepwyyeupkp\",\"\":{\"khoygfgchlc\":\"datai\",\"yklyhmymkcc\":\"datatxcsskgu\"}}") .toObject(AzureDatabricksDeltaLakeExportCommand.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDatabricksDeltaLakeExportCommand model = new AzureDatabricksDeltaLakeExportCommand() - .withDateFormat("dataphnag").withTimestampFormat("datalaxjmnbmfmloq"); + AzureDatabricksDeltaLakeExportCommand model + = new AzureDatabricksDeltaLakeExportCommand().withDateFormat("dataf") + .withTimestampFormat("datadqoepwyyeupkp"); model = BinaryData.fromObject(model).toObject(AzureDatabricksDeltaLakeExportCommand.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeImportCommandTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeImportCommandTests.java index b33520337b6f4..ccf2321c94e1f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeImportCommandTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeImportCommandTests.java @@ -11,14 +11,15 @@ public final class AzureDatabricksDeltaLakeImportCommandTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDatabricksDeltaLakeImportCommand model = BinaryData.fromString( - "{\"type\":\"AzureDatabricksDeltaLakeImportCommand\",\"dateFormat\":\"dataaqdoo\",\"timestampFormat\":\"datanzkmjoybyogw\",\"\":{\"hxawohsj\":\"datasnryk\",\"yzvrixcveserltlh\":\"datawxphnlw\",\"ksfxdmbxfyxweiq\":\"datajgjuopvkrms\",\"iucu\":\"datahfyvkxgoxsv\"}}") + "{\"type\":\"bticnidubo\",\"dateFormat\":\"datacgmcthjgbrxmxqsk\",\"timestampFormat\":\"datatajjfmkwq\",\"\":{\"ptiqfu\":\"dataiibtvwalhaw\",\"dmmwylrvztaelpux\":\"dataavtapcxsmap\",\"vhyqexujlleweegv\":\"datakuemcbtumtnrcv\",\"gfwxthrcmgsimgo\":\"databsythycdckcpfom\"}}") .toObject(AzureDatabricksDeltaLakeImportCommand.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDatabricksDeltaLakeImportCommand model = new AzureDatabricksDeltaLakeImportCommand() - .withDateFormat("dataaqdoo").withTimestampFormat("datanzkmjoybyogw"); + AzureDatabricksDeltaLakeImportCommand model + = new AzureDatabricksDeltaLakeImportCommand().withDateFormat("datacgmcthjgbrxmxqsk") + .withTimestampFormat("datatajjfmkwq"); model = BinaryData.fromObject(model).toObject(AzureDatabricksDeltaLakeImportCommand.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSinkTests.java index 6b5598ef2cd76..aebe41bb710af 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSinkTests.java @@ -12,18 +12,22 @@ public final class AzureDatabricksDeltaLakeSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDatabricksDeltaLakeSink model = BinaryData.fromString( - "{\"type\":\"AzureDatabricksDeltaLakeSink\",\"preCopyScript\":\"dataxpqrkeyhj\",\"importSettings\":{\"type\":\"AzureDatabricksDeltaLakeImportCommand\",\"dateFormat\":\"datazkbrvta\",\"timestampFormat\":\"datahtqvt\",\"\":{\"qjjxhijb\":\"datadijcndwoyqvc\",\"vpd\":\"dataiyuhoxul\",\"mphyacdhjmpnv\":\"datairhg\",\"hljtkuyvytfuq\":\"datakxs\"}},\"writeBatchSize\":\"datatqbxpy\",\"writeBatchTimeout\":\"datawkjeitkfhzv\",\"sinkRetryCount\":\"datandbklscoka\",\"sinkRetryWait\":\"dataqqipvnvdzssss\",\"maxConcurrentConnections\":\"datagh\",\"disableMetricsCollection\":\"datadqkotxodbxzh\",\"\":{\"yy\":\"datawjnnoot\"}}") + "{\"type\":\"x\",\"preCopyScript\":\"dataogowfqrykikhfgw\",\"importSettings\":{\"type\":\"mkfy\",\"dateFormat\":\"dataazi\",\"timestampFormat\":\"datah\",\"\":{\"znf\":\"dataavfsehbxbqionnq\",\"pvxcqj\":\"dataiboyexjcrwwdtey\",\"mv\":\"datawtiasfbp\"}},\"writeBatchSize\":\"datarysnszsehoegvwb\",\"writeBatchTimeout\":\"datarndxbkvzwqgmfhl\",\"sinkRetryCount\":\"datayed\",\"sinkRetryWait\":\"datafncwiyfzuw\",\"maxConcurrentConnections\":\"dataaaxstn\",\"disableMetricsCollection\":\"datavv\",\"\":{\"wzbzedhcxyg\":\"datatuj\"}}") .toObject(AzureDatabricksDeltaLakeSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDatabricksDeltaLakeSink model = new AzureDatabricksDeltaLakeSink().withWriteBatchSize("datatqbxpy") - .withWriteBatchTimeout("datawkjeitkfhzv").withSinkRetryCount("datandbklscoka") - .withSinkRetryWait("dataqqipvnvdzssss").withMaxConcurrentConnections("datagh") - .withDisableMetricsCollection("datadqkotxodbxzh").withPreCopyScript("dataxpqrkeyhj") - .withImportSettings(new AzureDatabricksDeltaLakeImportCommand().withDateFormat("datazkbrvta") - .withTimestampFormat("datahtqvt")); + AzureDatabricksDeltaLakeSink model + = new AzureDatabricksDeltaLakeSink().withWriteBatchSize("datarysnszsehoegvwb") + .withWriteBatchTimeout("datarndxbkvzwqgmfhl") + .withSinkRetryCount("datayed") + .withSinkRetryWait("datafncwiyfzuw") + .withMaxConcurrentConnections("dataaaxstn") + .withDisableMetricsCollection("datavv") + .withPreCopyScript("dataogowfqrykikhfgw") + .withImportSettings( + new AzureDatabricksDeltaLakeImportCommand().withDateFormat("dataazi").withTimestampFormat("datah")); model = BinaryData.fromObject(model).toObject(AzureDatabricksDeltaLakeSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSourceTests.java index 5d27ba2330c34..99b6b17b63a42 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSourceTests.java @@ -12,17 +12,19 @@ public final class AzureDatabricksDeltaLakeSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureDatabricksDeltaLakeSource model = BinaryData.fromString( - "{\"type\":\"AzureDatabricksDeltaLakeSource\",\"query\":\"datall\",\"exportSettings\":{\"type\":\"AzureDatabricksDeltaLakeExportCommand\",\"dateFormat\":\"datahthxcrweeqkdmpfm\",\"timestampFormat\":\"datacelsnjf\",\"\":{\"ikmgwxysu\":\"datadcjtveibnt\",\"kstrmsbmdgrzke\":\"datasofdhrif\",\"aaxz\":\"dataplorntnss\",\"pisc\":\"datadlnv\"}},\"sourceRetryCount\":\"datayhtb\",\"sourceRetryWait\":\"dataycacoelvoy\",\"maxConcurrentConnections\":\"datamxqalqqrymjwwox\",\"disableMetricsCollection\":\"dataefellhdsgo\",\"\":{\"malthcbvuvwdp\":\"datab\"}}") + "{\"type\":\"zdukamt\",\"query\":\"dataawmhbq\",\"exportSettings\":{\"type\":\"xxc\",\"dateFormat\":\"datamgvwbytz\",\"timestampFormat\":\"dataqvzwummw\",\"\":{\"dujhzaiw\":\"datag\",\"l\":\"dataqrbtrmi\"}},\"sourceRetryCount\":\"dataufvabci\",\"sourceRetryWait\":\"databyfs\",\"maxConcurrentConnections\":\"dataiwgkozlpsfraj\",\"disableMetricsCollection\":\"datau\",\"\":{\"lllsungzvytbqq\":\"datagf\",\"ugjea\":\"dataxkuyyrcqsyq\"}}") .toObject(AzureDatabricksDeltaLakeSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureDatabricksDeltaLakeSource model = new AzureDatabricksDeltaLakeSource().withSourceRetryCount("datayhtb") - .withSourceRetryWait("dataycacoelvoy").withMaxConcurrentConnections("datamxqalqqrymjwwox") - .withDisableMetricsCollection("dataefellhdsgo").withQuery("datall") - .withExportSettings(new AzureDatabricksDeltaLakeExportCommand().withDateFormat("datahthxcrweeqkdmpfm") - .withTimestampFormat("datacelsnjf")); + AzureDatabricksDeltaLakeSource model = new AzureDatabricksDeltaLakeSource().withSourceRetryCount("dataufvabci") + .withSourceRetryWait("databyfs") + .withMaxConcurrentConnections("dataiwgkozlpsfraj") + .withDisableMetricsCollection("datau") + .withQuery("dataawmhbq") + .withExportSettings(new AzureDatabricksDeltaLakeExportCommand().withDateFormat("datamgvwbytz") + .withTimestampFormat("dataqvzwummw")); model = BinaryData.fromObject(model).toObject(AzureDatabricksDeltaLakeSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageLocationTests.java index 6416fe214470c..c19890ce30c79 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageLocationTests.java @@ -11,14 +11,14 @@ public final class AzureFileStorageLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureFileStorageLocation model = BinaryData.fromString( - "{\"type\":\"AzureFileStorageLocation\",\"folderPath\":\"datahuioaeoc\",\"fileName\":\"datajtfeyvkbdgddkr\",\"\":{\"uzy\":\"datacxbeuuqutkzwtjww\",\"deg\":\"dataijcxfno\",\"uckcatuqbhpow\":\"datadydhqkkkb\"}}") + "{\"type\":\"nensmuffi\",\"folderPath\":\"databctvbpzuj\",\"fileName\":\"datatotdxposcslh\",\"\":{\"xidhhxomil\":\"datasiecktybhj\",\"xwjwilm\":\"datadxjxdu\",\"wuj\":\"datarslaatep\",\"gtvh\":\"datazgxqgqwlxr\"}}") .toObject(AzureFileStorageLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { AzureFileStorageLocation model - = new AzureFileStorageLocation().withFolderPath("datahuioaeoc").withFileName("datajtfeyvkbdgddkr"); + = new AzureFileStorageLocation().withFolderPath("databctvbpzuj").withFileName("datatotdxposcslh"); model = BinaryData.fromObject(model).toObject(AzureFileStorageLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageReadSettingsTests.java index f8cd65c858534..12ece49714ab0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageReadSettingsTests.java @@ -11,18 +11,25 @@ public final class AzureFileStorageReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureFileStorageReadSettings model = BinaryData.fromString( - "{\"type\":\"AzureFileStorageReadSettings\",\"recursive\":\"datajldwxdqt\",\"wildcardFolderPath\":\"datatgn\",\"wildcardFileName\":\"databjvmdkgvu\",\"prefix\":\"datamlsuuhwuox\",\"fileListPath\":\"datai\",\"enablePartitionDiscovery\":\"datazzjo\",\"partitionRootPath\":\"dataygzjrkslqba\",\"deleteFilesAfterCompletion\":\"databjxxcruleim\",\"modifiedDatetimeStart\":\"dataxoign\",\"modifiedDatetimeEnd\":\"datamjmpgzetuvfp\",\"maxConcurrentConnections\":\"datajpmeptnqsnpa\",\"disableMetricsCollection\":\"datasprrvjwbeeolm\",\"\":{\"fk\":\"dataqol\",\"aphlwmivazfnb\":\"datadwzvhtgfdy\"}}") + "{\"type\":\"oyusrbuydeyh\",\"recursive\":\"datalkpvaagrdf\",\"wildcardFolderPath\":\"dataglqdsphvosucryh\",\"wildcardFileName\":\"datahthzfotfrfhrjkah\",\"prefix\":\"datafshgmqxwoppn\",\"fileListPath\":\"datarmzv\",\"enablePartitionDiscovery\":\"datafkznyait\",\"partitionRootPath\":\"datamobrxhwpg\",\"deleteFilesAfterCompletion\":\"datanxrjmilogcnzfg\",\"modifiedDatetimeStart\":\"databbtplrtxhzt\",\"modifiedDatetimeEnd\":\"datawyrsfj\",\"maxConcurrentConnections\":\"datattkdrblehenj\",\"disableMetricsCollection\":\"dataiwdeosbijikjf\",\"\":{\"avfjx\":\"datauwhbpojujpifxtgr\",\"lauhr\":\"dataiwx\",\"r\":\"datachphovu\"}}") .toObject(AzureFileStorageReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureFileStorageReadSettings model = new AzureFileStorageReadSettings() - .withMaxConcurrentConnections("datajpmeptnqsnpa").withDisableMetricsCollection("datasprrvjwbeeolm") - .withRecursive("datajldwxdqt").withWildcardFolderPath("datatgn").withWildcardFileName("databjvmdkgvu") - .withPrefix("datamlsuuhwuox").withFileListPath("datai").withEnablePartitionDiscovery("datazzjo") - .withPartitionRootPath("dataygzjrkslqba").withDeleteFilesAfterCompletion("databjxxcruleim") - .withModifiedDatetimeStart("dataxoign").withModifiedDatetimeEnd("datamjmpgzetuvfp"); + AzureFileStorageReadSettings model + = new AzureFileStorageReadSettings().withMaxConcurrentConnections("datattkdrblehenj") + .withDisableMetricsCollection("dataiwdeosbijikjf") + .withRecursive("datalkpvaagrdf") + .withWildcardFolderPath("dataglqdsphvosucryh") + .withWildcardFileName("datahthzfotfrfhrjkah") + .withPrefix("datafshgmqxwoppn") + .withFileListPath("datarmzv") + .withEnablePartitionDiscovery("datafkznyait") + .withPartitionRootPath("datamobrxhwpg") + .withDeleteFilesAfterCompletion("datanxrjmilogcnzfg") + .withModifiedDatetimeStart("databbtplrtxhzt") + .withModifiedDatetimeEnd("datawyrsfj"); model = BinaryData.fromObject(model).toObject(AzureFileStorageReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageWriteSettingsTests.java index 81297a923a343..a47945e3d2431 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageWriteSettingsTests.java @@ -13,18 +13,19 @@ public final class AzureFileStorageWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureFileStorageWriteSettings model = BinaryData.fromString( - "{\"type\":\"AzureFileStorageWriteSettings\",\"maxConcurrentConnections\":\"datavgwvfvsqlyah\",\"disableMetricsCollection\":\"dataoqk\",\"copyBehavior\":\"datatnbuzvaxlt\",\"metadata\":[{\"name\":\"datahic\",\"value\":\"dataauvprqzpfpbxljdd\"},{\"name\":\"dataoyzs\",\"value\":\"datavkcldons\"},{\"name\":\"dataazxewnl\",\"value\":\"datahhczqm\"}],\"\":{\"rmfclkyncjyafzz\":\"datay\",\"avo\":\"databohb\",\"bverbjctszb\":\"dataerduab\"}}") + "{\"type\":\"uufvo\",\"maxConcurrentConnections\":\"datakff\",\"disableMetricsCollection\":\"datawaewpils\",\"copyBehavior\":\"datasghdovcpbwfnap\",\"metadata\":[{\"name\":\"datahsixzcdaukh\",\"value\":\"datah\"},{\"name\":\"datacbomfoojkerdu\",\"value\":\"datanbzamroadutogbkd\"},{\"name\":\"datasgval\",\"value\":\"datacnecl\"}],\"\":{\"nsl\":\"datajsqcubyj\",\"subzfuhjnmdcyrbz\":\"dataiteenaheecsft\"}}") .toObject(AzureFileStorageWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { AzureFileStorageWriteSettings model - = new AzureFileStorageWriteSettings().withMaxConcurrentConnections("datavgwvfvsqlyah") - .withDisableMetricsCollection("dataoqk").withCopyBehavior("datatnbuzvaxlt") - .withMetadata(Arrays.asList(new MetadataItem().withName("datahic").withValue("dataauvprqzpfpbxljdd"), - new MetadataItem().withName("dataoyzs").withValue("datavkcldons"), - new MetadataItem().withName("dataazxewnl").withValue("datahhczqm"))); + = new AzureFileStorageWriteSettings().withMaxConcurrentConnections("datakff") + .withDisableMetricsCollection("datawaewpils") + .withCopyBehavior("datasghdovcpbwfnap") + .withMetadata(Arrays.asList(new MetadataItem().withName("datahsixzcdaukh").withValue("datah"), + new MetadataItem().withName("datacbomfoojkerdu").withValue("datanbzamroadutogbkd"), + new MetadataItem().withName("datasgval").withValue("datacnecl"))); model = BinaryData.fromObject(model).toObject(AzureFileStorageWriteSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTests.java index bc36b8203a0a3..c9a53a167e704 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTests.java @@ -23,61 +23,60 @@ public final class AzureFunctionActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureFunctionActivity model = BinaryData.fromString( - "{\"type\":\"AzureFunctionActivity\",\"typeProperties\":{\"method\":\"PUT\",\"functionName\":\"dataiarfkfgrdri\",\"headers\":{\"rqpickn\":\"fx\"},\"body\":\"datazdrd\"},\"linkedServiceName\":{\"referenceName\":\"wmueavawywofgc\",\"parameters\":{\"mx\":\"datahjvvrrxclf\",\"igxwxxfkfthw\":\"datafqwyiuhhuftn\",\"lstgsmeijgjbev\":\"dataossokafy\",\"wvdklgwoyw\":\"datasrcsyjx\"}},\"policy\":{\"timeout\":\"datafmenbaj\",\"retry\":\"dataelbcsyaohizf\",\"retryIntervalInSeconds\":1921383033,\"secureInput\":false,\"secureOutput\":false,\"\":{\"y\":\"datatkddohxvcsoq\",\"danufiwtkhcmoc\":\"datacqpmywt\",\"khmbks\":\"datagtmfug\"}},\"name\":\"kkztexds\",\"description\":\"hndcrdvecc\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"ahuu\",\"dependencyConditions\":[\"Failed\",\"Succeeded\"],\"\":{\"pfisyydoy\":\"datavkolfiigoxohjy\",\"hqvmilpgxeaqwogp\":\"dataccwvcfayllx\",\"mcoruti\":\"datatsmyfgted\"}},{\"activity\":\"dkypckhqooqni\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Skipped\"],\"\":{\"ngvpsuk\":\"datatmkmgcipvr\",\"gq\":\"datakmkghugfd\"}}],\"userProperties\":[{\"name\":\"uekoxylcbpb\",\"value\":\"datajtiidozf\"},{\"name\":\"gvq\",\"value\":\"datarrknijdrsvoh\"}],\"\":{\"lzsgpoiccbzqko\":\"dataoiikr\",\"dm\":\"dataja\",\"lz\":\"datazkq\"}}") + "{\"type\":\"doomhrlgidqxbrdh\",\"typeProperties\":{\"method\":\"POST\",\"functionName\":\"datafhpaywwesaqsuqps\",\"headers\":{\"lra\":\"datab\"},\"body\":\"datadiathhxqs\"},\"linkedServiceName\":{\"referenceName\":\"rnyf\",\"parameters\":{\"wffos\":\"dataaomogkp\",\"zgqkxsoavbteaegy\":\"datamxmvgj\",\"bztlvujbhw\":\"datajytoepcdhqjcz\",\"cihkjjjbit\":\"dataszrhf\"}},\"policy\":{\"timeout\":\"dataii\",\"retry\":\"datargz\",\"retryIntervalInSeconds\":114099002,\"secureInput\":true,\"secureOutput\":true,\"\":{\"jn\":\"dataods\"}},\"name\":\"wcqnaspjdah\",\"description\":\"nkliyfgkzw\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ybdoyykhidiand\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"ynppqtxpj\":\"dataoxk\",\"jcimoi\":\"dataowjatyhkq\",\"gyhschamwofqntt\":\"dataqzvawfpu\",\"hguubpmvp\":\"datakjcgupxnuv\"}}],\"userProperties\":[{\"name\":\"iyo\",\"value\":\"datavafbdzokplolcal\"},{\"name\":\"vcxvcpxdeqntb\",\"value\":\"datatdqsqb\"},{\"name\":\"ubswzafqrmwdofg\",\"value\":\"dataspzwa\"}],\"\":{\"djvlwczwdkkscooq\":\"dataxwvj\",\"fckrmrbaoidt\":\"datavht\",\"cbvkoughjsxp\":\"datam\",\"tsbpvyvsc\":\"datatsvppfdnihxcij\"}}") .toObject(AzureFunctionActivity.class); - Assertions.assertEquals("kkztexds", model.name()); - Assertions.assertEquals("hndcrdvecc", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("ahuu", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("uekoxylcbpb", model.userProperties().get(0).name()); - Assertions.assertEquals("wmueavawywofgc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1921383033, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals(AzureFunctionActivityMethod.PUT, model.method()); - Assertions.assertEquals("fx", model.headers().get("rqpickn")); + Assertions.assertEquals("wcqnaspjdah", model.name()); + Assertions.assertEquals("nkliyfgkzw", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("ybdoyykhidiand", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("iyo", model.userProperties().get(0).name()); + Assertions.assertEquals("rnyf", model.linkedServiceName().referenceName()); + Assertions.assertEquals(114099002, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals(true, model.policy().secureOutput()); + Assertions.assertEquals(AzureFunctionActivityMethod.POST, model.method()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureFunctionActivity model - = new AzureFunctionActivity().withName("kkztexds").withDescription("hndcrdvecc") - .withState(ActivityState.INACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ahuu") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("dkypckhqooqni") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("uekoxylcbpb").withValue("datajtiidozf"), - new UserProperty().withName("gvq").withValue("datarrknijdrsvoh"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("wmueavawywofgc") - .withParameters(mapOf("mx", "datahjvvrrxclf", "igxwxxfkfthw", "datafqwyiuhhuftn", "lstgsmeijgjbev", - "dataossokafy", "wvdklgwoyw", "datasrcsyjx"))) - .withPolicy(new ActivityPolicy().withTimeout("datafmenbaj").withRetry("dataelbcsyaohizf") - .withRetryIntervalInSeconds(1921383033).withSecureInput(false).withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withMethod(AzureFunctionActivityMethod.PUT).withFunctionName("dataiarfkfgrdri") - .withHeaders(mapOf("rqpickn", "fx")).withBody("datazdrd"); + AzureFunctionActivity model = new AzureFunctionActivity().withName("wcqnaspjdah") + .withDescription("nkliyfgkzw") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("ybdoyykhidiand") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("iyo").withValue("datavafbdzokplolcal"), + new UserProperty().withName("vcxvcpxdeqntb").withValue("datatdqsqb"), + new UserProperty().withName("ubswzafqrmwdofg").withValue("dataspzwa"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rnyf") + .withParameters(mapOf("wffos", "dataaomogkp", "zgqkxsoavbteaegy", "datamxmvgj", "bztlvujbhw", + "datajytoepcdhqjcz", "cihkjjjbit", "dataszrhf"))) + .withPolicy(new ActivityPolicy().withTimeout("dataii") + .withRetry("datargz") + .withRetryIntervalInSeconds(114099002) + .withSecureInput(true) + .withSecureOutput(true) + .withAdditionalProperties(mapOf())) + .withMethod(AzureFunctionActivityMethod.POST) + .withFunctionName("datafhpaywwesaqsuqps") + .withHeaders(mapOf("lra", "datab")) + .withBody("datadiathhxqs"); model = BinaryData.fromObject(model).toObject(AzureFunctionActivity.class); - Assertions.assertEquals("kkztexds", model.name()); - Assertions.assertEquals("hndcrdvecc", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("ahuu", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("uekoxylcbpb", model.userProperties().get(0).name()); - Assertions.assertEquals("wmueavawywofgc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1921383033, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals(AzureFunctionActivityMethod.PUT, model.method()); - Assertions.assertEquals("fx", model.headers().get("rqpickn")); + Assertions.assertEquals("wcqnaspjdah", model.name()); + Assertions.assertEquals("nkliyfgkzw", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("ybdoyykhidiand", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("iyo", model.userProperties().get(0).name()); + Assertions.assertEquals("rnyf", model.linkedServiceName().referenceName()); + Assertions.assertEquals(114099002, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals(true, model.policy().secureOutput()); + Assertions.assertEquals(AzureFunctionActivityMethod.POST, model.method()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTypePropertiesTests.java index fbc2ee0172763..f17622ee5e1db 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTypePropertiesTests.java @@ -15,20 +15,20 @@ public final class AzureFunctionActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureFunctionActivityTypeProperties model = BinaryData.fromString( - "{\"method\":\"OPTIONS\",\"functionName\":\"dataazqsucttp\",\"headers\":{\"cqnglzfgepblh\":\"gbfiosdizp\"},\"body\":\"datagwvvenmuenoq\"}") + "{\"method\":\"POST\",\"functionName\":\"dataqncddaqq\",\"headers\":{\"rohfv\":\"datayi\",\"nkkztjmqjrh\":\"datagjnexdlsangl\"},\"body\":\"dataqajyrhrywucpdzb\"}") .toObject(AzureFunctionActivityTypeProperties.class); - Assertions.assertEquals(AzureFunctionActivityMethod.OPTIONS, model.method()); - Assertions.assertEquals("gbfiosdizp", model.headers().get("cqnglzfgepblh")); + Assertions.assertEquals(AzureFunctionActivityMethod.POST, model.method()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureFunctionActivityTypeProperties model = new AzureFunctionActivityTypeProperties() - .withMethod(AzureFunctionActivityMethod.OPTIONS).withFunctionName("dataazqsucttp") - .withHeaders(mapOf("cqnglzfgepblh", "gbfiosdizp")).withBody("datagwvvenmuenoq"); + AzureFunctionActivityTypeProperties model + = new AzureFunctionActivityTypeProperties().withMethod(AzureFunctionActivityMethod.POST) + .withFunctionName("dataqncddaqq") + .withHeaders(mapOf("rohfv", "datayi", "nkkztjmqjrh", "datagjnexdlsangl")) + .withBody("dataqajyrhrywucpdzb"); model = BinaryData.fromObject(model).toObject(AzureFunctionActivityTypeProperties.class); - Assertions.assertEquals(AzureFunctionActivityMethod.OPTIONS, model.method()); - Assertions.assertEquals("gbfiosdizp", model.headers().get("cqnglzfgepblh")); + Assertions.assertEquals(AzureFunctionActivityMethod.POST, model.method()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTests.java index 1615a9d84306a..4218d86380a4c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTests.java @@ -23,86 +23,98 @@ public final class AzureMLBatchExecutionActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMLBatchExecutionActivity model = BinaryData.fromString( - "{\"type\":\"AzureMLBatchExecution\",\"typeProperties\":{\"globalParameters\":{\"qqkh\":\"datajthluoyrq\",\"dwsentrcdzyvxwnm\":\"datapws\",\"wpqvdduvxmrbb\":\"dataumd\",\"dtywzrnxiktoki\":\"dataliwfbgkyon\"},\"webServiceOutputs\":{\"lxogimihxyxe\":{\"filePath\":\"datamdadfygj\",\"linkedServiceName\":{\"referenceName\":\"cfprioabqxwid\",\"parameters\":{\"xtsywrmmhaxmo\":\"dataonnolrs\",\"armnseigoalxwuqu\":\"datauotexlpqydgfzet\",\"ghs\":\"datazrskdovgkpqzzrx\"}}}},\"webServiceInputs\":{\"zco\":{\"filePath\":\"datawixdcytd\",\"linkedServiceName\":{\"referenceName\":\"am\",\"parameters\":{\"zlbcamdzoauvwjkg\":\"datab\"}}},\"pgkwtpzbsytwthv\":{\"filePath\":\"datawcnnzacqludq\",\"linkedServiceName\":{\"referenceName\":\"aqxztywzaq\",\"parameters\":{\"lzpowse\":\"datatstmyfebbt\"}}},\"oeky\":{\"filePath\":\"datadtsvgyzmafqsn\",\"linkedServiceName\":{\"referenceName\":\"u\",\"parameters\":{\"qyhr\":\"databyvwejyyngw\"}}},\"veyngzj\":{\"filePath\":\"datanvxco\",\"linkedServiceName\":{\"referenceName\":\"pdgnsmhrpzbyudko\",\"parameters\":{\"d\":\"datajaaocjlwcouwcrex\",\"iukvmzxr\":\"datamkzb\"}}}}},\"linkedServiceName\":{\"referenceName\":\"bk\",\"parameters\":{\"cqhlfqimjlde\":\"datahguvqghueh\"}},\"policy\":{\"timeout\":\"dataqnforujfluomaltv\",\"retry\":\"dataudhtdapkdahy\",\"retryIntervalInSeconds\":1087656358,\"secureInput\":true,\"secureOutput\":false,\"\":{\"xuibyfylh\":\"datagyqrmteicl\",\"uqylmlunquvl\":\"datawqp\"}},\"name\":\"al\",\"description\":\"uztlxfgy\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"dacskulfqcxz\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Skipped\",\"Succeeded\"],\"\":{\"euwfmrckatnjik\":\"datavsgrcrknnru\",\"ieoth\":\"datazhtovs\",\"smavtn\":\"datawokprvpkdkds\"}},{\"activity\":\"gfmtximnpcghcfud\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Succeeded\",\"Completed\"],\"\":{\"erukuoeyyxcdwl\":\"datargerybdiajeeah\",\"xyitezfoekax\":\"datakglahd\"}}],\"userProperties\":[{\"name\":\"tfzaqnoq\",\"value\":\"datafyofohu\"},{\"name\":\"pfxkj\",\"value\":\"datahgwgsbaewkkq\"},{\"name\":\"kuzifsguolfkupmw\",\"value\":\"datazsirhp\"},{\"name\":\"gqdz\",\"value\":\"datadrcj\"}],\"\":{\"liphcpu\":\"databs\",\"e\":\"datadbzxidqqeslnaox\",\"fzyxamyjhp\":\"datautrlzzztg\",\"ily\":\"datazuvsjblqmddtp\"}}") + "{\"type\":\"crcpishj\",\"typeProperties\":{\"globalParameters\":{\"qlbjxps\":\"datarqponugco\",\"zygdkovytjsrboq\":\"datavvggvnqpar\"},\"webServiceOutputs\":{\"swlmxepygkfuwgkb\":{\"filePath\":\"dataqqvukjtcdppdmmf\",\"linkedServiceName\":{\"referenceName\":\"ubcccltygxzit\",\"parameters\":{\"vydvdjc\":\"datab\",\"rfayd\":\"datadhar\"}}},\"lefpuxjhxsfb\":{\"filePath\":\"datajqtk\",\"linkedServiceName\":{\"referenceName\":\"zwmqzkjecz\",\"parameters\":{\"jacvmhpueiuhhn\":\"datafew\",\"kvfjbxvhuili\":\"dataxnxxwafialipymn\",\"ahqqumoz\":\"datadyuuotqpljw\"}}}},\"webServiceInputs\":{\"ocjkqo\":{\"filePath\":\"dataaysmmztvkgxz\",\"linkedServiceName\":{\"referenceName\":\"rhnkm\",\"parameters\":{\"iypsmpgopcl\":\"datanplnnxopixx\",\"we\":\"datatysirhnwseb\",\"ddcqteozayjim\":\"datasxrnji\",\"jtmdw\":\"datacb\"}}},\"peafzvxbvk\":{\"filePath\":\"datacfnomwaoebvjmmsg\",\"linkedServiceName\":{\"referenceName\":\"koql\",\"parameters\":{\"hztdzmeiicdybcor\":\"dataerztenzkbppg\",\"ztzhwbwrocuv\":\"datamdzafdqqjds\",\"gzrt\":\"datapqag\",\"je\":\"datausxh\"}}},\"g\":{\"filePath\":\"dataj\",\"linkedServiceName\":{\"referenceName\":\"nmoekohxkgxyd\",\"parameters\":{\"wsage\":\"datapzetradbihn\",\"hoxux\":\"dataosbzydbt\",\"zeoxz\":\"datavbsazzh\",\"rjfpqemylkyg\":\"datarhakhaec\"}}}}},\"linkedServiceName\":{\"referenceName\":\"bv\",\"parameters\":{\"ctehxwnxp\":\"datamt\",\"b\":\"dataerhdablqol\"}},\"policy\":{\"timeout\":\"datahpcwwsqavpuwt\",\"retry\":\"dataqcfzdfmqoeofy\",\"retryIntervalInSeconds\":1978374101,\"secureInput\":true,\"secureOutput\":false,\"\":{\"qatgazd\":\"dataym\",\"aebaw\":\"datahih\",\"nmzraegyvxlnp\":\"datapajdkjq\"}},\"name\":\"ov\",\"description\":\"x\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"iv\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Succeeded\",\"Completed\"],\"\":{\"syd\":\"dataaxyyvxetgsdhwmbe\",\"rkqdbqhz\":\"datarkchbnatrdr\"}},{\"activity\":\"vatypjk\",\"dependencyConditions\":[\"Completed\"],\"\":{\"vrlbezhwsvoi\":\"datadlehcqbjjphuakpk\",\"fzrjcbadnwpruydc\":\"datadcxjcjiqxybbbytm\",\"pdmeeabcnh\":\"datav\"}},{\"activity\":\"amydwdb\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Failed\"],\"\":{\"dhp\":\"dataixnuzbmffzig\",\"wfxgu\":\"dataecrviobfui\"}},{\"activity\":\"az\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Skipped\",\"Failed\"],\"\":{\"bsuqarm\":\"datanma\",\"rgbugprfiympy\":\"datajuldojor\",\"pdbc\":\"databcpieiqolym\",\"syylukpjdm\":\"dataedohhbnkhgp\"}}],\"userProperties\":[{\"name\":\"jhukngdf\",\"value\":\"dataspw\"},{\"name\":\"fjxljrrgvyuqunnv\",\"value\":\"datakpdlkviaen\"},{\"name\":\"ydkgicbkijyv\",\"value\":\"dataukauhnhdhssu\"},{\"name\":\"hkgfvgxmnan\",\"value\":\"datac\"}],\"\":{\"coruyistjwlnt\":\"datartvyvxbtpqjgb\",\"tu\":\"datapi\",\"gulymouwnnhbz\":\"datapncdebpelgy\",\"fqqjydrhwnnux\":\"datascztsatfu\"}}") .toObject(AzureMLBatchExecutionActivity.class); - Assertions.assertEquals("al", model.name()); - Assertions.assertEquals("uztlxfgy", model.description()); + Assertions.assertEquals("ov", model.name()); + Assertions.assertEquals("x", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("dacskulfqcxz", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("tfzaqnoq", model.userProperties().get(0).name()); - Assertions.assertEquals("bk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1087656358, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("iv", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("jhukngdf", model.userProperties().get(0).name()); + Assertions.assertEquals("bv", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1978374101, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("cfprioabqxwid", - model.webServiceOutputs().get("lxogimihxyxe").linkedServiceName().referenceName()); - Assertions.assertEquals("am", model.webServiceInputs().get("zco").linkedServiceName().referenceName()); + Assertions.assertEquals("ubcccltygxzit", + model.webServiceOutputs().get("swlmxepygkfuwgkb").linkedServiceName().referenceName()); + Assertions.assertEquals("rhnkm", model.webServiceInputs().get("ocjkqo").linkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureMLBatchExecutionActivity model = new AzureMLBatchExecutionActivity().withName("al") - .withDescription("uztlxfgy").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + AzureMLBatchExecutionActivity model = new AzureMLBatchExecutionActivity().withName("ov") + .withDescription("x") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("dacskulfqcxz") + new ActivityDependency().withActivity("iv") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("vatypjk") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("amydwdb") .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) + DependencyCondition.FAILED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("gfmtximnpcghcfud") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) + new ActivityDependency().withActivity("az") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, DependencyCondition.FAILED)) .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("tfzaqnoq").withValue("datafyofohu"), - new UserProperty().withName("pfxkj").withValue("datahgwgsbaewkkq"), - new UserProperty().withName("kuzifsguolfkupmw").withValue("datazsirhp"), - new UserProperty().withName("gqdz").withValue("datadrcj"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bk") - .withParameters(mapOf("cqhlfqimjlde", "datahguvqghueh"))) - .withPolicy(new ActivityPolicy().withTimeout("dataqnforujfluomaltv").withRetry("dataudhtdapkdahy") - .withRetryIntervalInSeconds(1087656358).withSecureInput(true).withSecureOutput(false) + .withUserProperties(Arrays.asList(new UserProperty().withName("jhukngdf").withValue("dataspw"), + new UserProperty().withName("fjxljrrgvyuqunnv").withValue("datakpdlkviaen"), + new UserProperty().withName("ydkgicbkijyv").withValue("dataukauhnhdhssu"), + new UserProperty().withName("hkgfvgxmnan").withValue("datac"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bv") + .withParameters(mapOf("ctehxwnxp", "datamt", "b", "dataerhdablqol"))) + .withPolicy(new ActivityPolicy().withTimeout("datahpcwwsqavpuwt") + .withRetry("dataqcfzdfmqoeofy") + .withRetryIntervalInSeconds(1978374101) + .withSecureInput(true) + .withSecureOutput(false) .withAdditionalProperties(mapOf())) - .withGlobalParameters(mapOf("qqkh", "datajthluoyrq", "dwsentrcdzyvxwnm", "datapws", "wpqvdduvxmrbb", - "dataumd", "dtywzrnxiktoki", "dataliwfbgkyon")) - .withWebServiceOutputs(mapOf("lxogimihxyxe", - new AzureMLWebServiceFile().withFilePath("datamdadfygj") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cfprioabqxwid") - .withParameters(mapOf("xtsywrmmhaxmo", "dataonnolrs", "armnseigoalxwuqu", "datauotexlpqydgfzet", - "ghs", "datazrskdovgkpqzzrx"))))) - .withWebServiceInputs( - mapOf("zco", - new AzureMLWebServiceFile().withFilePath("datawixdcytd") - .withLinkedServiceName(new LinkedServiceReference() - .withReferenceName("am").withParameters(mapOf("zlbcamdzoauvwjkg", "datab"))), - "pgkwtpzbsytwthv", - new AzureMLWebServiceFile().withFilePath("datawcnnzacqludq") - .withLinkedServiceName(new LinkedServiceReference() - .withReferenceName("aqxztywzaq").withParameters(mapOf("lzpowse", "datatstmyfebbt"))), - "oeky", - new AzureMLWebServiceFile().withFilePath("datadtsvgyzmafqsn") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("u") - .withParameters(mapOf("qyhr", "databyvwejyyngw"))), - "veyngzj", - new AzureMLWebServiceFile().withFilePath("datanvxco") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("pdgnsmhrpzbyudko") - .withParameters(mapOf("d", "datajaaocjlwcouwcrex", "iukvmzxr", "datamkzb"))))); + .withGlobalParameters(mapOf("qlbjxps", "datarqponugco", "zygdkovytjsrboq", "datavvggvnqpar")) + .withWebServiceOutputs(mapOf("swlmxepygkfuwgkb", + new AzureMLWebServiceFile().withFilePath("dataqqvukjtcdppdmmf") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ubcccltygxzit") + .withParameters(mapOf("vydvdjc", "datab", "rfayd", "datadhar"))), + "lefpuxjhxsfb", + new AzureMLWebServiceFile().withFilePath("datajqtk") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zwmqzkjecz") + .withParameters(mapOf("jacvmhpueiuhhn", "datafew", "kvfjbxvhuili", "dataxnxxwafialipymn", + "ahqqumoz", "datadyuuotqpljw"))))) + .withWebServiceInputs(mapOf("ocjkqo", + new AzureMLWebServiceFile().withFilePath("dataaysmmztvkgxz") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rhnkm") + .withParameters(mapOf("iypsmpgopcl", "datanplnnxopixx", "we", "datatysirhnwseb", + "ddcqteozayjim", "datasxrnji", "jtmdw", "datacb"))), + "peafzvxbvk", + new AzureMLWebServiceFile().withFilePath("datacfnomwaoebvjmmsg") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("koql") + .withParameters(mapOf("hztdzmeiicdybcor", "dataerztenzkbppg", "ztzhwbwrocuv", "datamdzafdqqjds", + "gzrt", "datapqag", "je", "datausxh"))), + "g", + new AzureMLWebServiceFile().withFilePath("dataj") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("nmoekohxkgxyd") + .withParameters(mapOf("wsage", "datapzetradbihn", "hoxux", "dataosbzydbt", "zeoxz", + "datavbsazzh", "rjfpqemylkyg", "datarhakhaec"))))); model = BinaryData.fromObject(model).toObject(AzureMLBatchExecutionActivity.class); - Assertions.assertEquals("al", model.name()); - Assertions.assertEquals("uztlxfgy", model.description()); + Assertions.assertEquals("ov", model.name()); + Assertions.assertEquals("x", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("dacskulfqcxz", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("tfzaqnoq", model.userProperties().get(0).name()); - Assertions.assertEquals("bk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1087656358, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("iv", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("jhukngdf", model.userProperties().get(0).name()); + Assertions.assertEquals("bv", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1978374101, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("cfprioabqxwid", - model.webServiceOutputs().get("lxogimihxyxe").linkedServiceName().referenceName()); - Assertions.assertEquals("am", model.webServiceInputs().get("zco").linkedServiceName().referenceName()); + Assertions.assertEquals("ubcccltygxzit", + model.webServiceOutputs().get("swlmxepygkfuwgkb").linkedServiceName().referenceName()); + Assertions.assertEquals("rhnkm", model.webServiceInputs().get("ocjkqo").linkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTypePropertiesTests.java index 5880d3710fa10..41fcb449ba86b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTypePropertiesTests.java @@ -16,46 +16,47 @@ public final class AzureMLBatchExecutionActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMLBatchExecutionActivityTypeProperties model = BinaryData.fromString( - "{\"globalParameters\":{\"qtscduuywgcuvcfm\":\"datab\"},\"webServiceOutputs\":{\"bz\":{\"filePath\":\"datapvoazgtlxgtusw\",\"linkedServiceName\":{\"referenceName\":\"gtskolbjylostrc\",\"parameters\":{\"bwaiqs\":\"datace\"}}}},\"webServiceInputs\":{\"crkf\":{\"filePath\":\"datalphaul\",\"linkedServiceName\":{\"referenceName\":\"alspeanheswxll\",\"parameters\":{\"iycjulunbtuf\":\"datablfprskxhghvg\",\"njpivo\":\"datacipi\",\"ahdplicivoduda\":\"datazxkhoabmahj\",\"ubgrjkgkoxuedml\":\"databmjheyntsdwxpa\"}}},\"kofoqrvnhc\":{\"filePath\":\"datagjywp\",\"linkedServiceName\":{\"referenceName\":\"bvvjyenwvgvhhouh\",\"parameters\":{\"joycyvxbr\":\"datahwlkfljooiiviwlf\",\"trw\":\"datahwb\"}}},\"mridcy\":{\"filePath\":\"dataoghvkzmgvtempy\",\"linkedServiceName\":{\"referenceName\":\"jahwypdh\",\"parameters\":{\"xtg\":\"datajlsatoxsga\",\"ftlbtotu\":\"dataxmxgqgquulyrtk\",\"cwrykwmvcxyu\":\"datazasrwoxumnucqew\"}}},\"qugycorgnxmn\":{\"filePath\":\"datanz\",\"linkedServiceName\":{\"referenceName\":\"e\",\"parameters\":{\"lhxfmvngdrn\":\"datawgqis\",\"nbwdborjyprcojwi\":\"datayvnbhn\",\"eiftm\":\"datagtdjqczoqpkpi\",\"hlnaymsgbyho\":\"datazofont\"}}}}}") + "{\"globalParameters\":{\"ovburvekbknr\":\"dataet\",\"qbyoyhfbbbhxly\":\"datakxcpnxdzpfzmdsly\"},\"webServiceOutputs\":{\"svhsieev\":{\"filePath\":\"datalqprhnchpets\",\"linkedServiceName\":{\"referenceName\":\"bfmttpz\",\"parameters\":{\"dusizsnhekpcnq\":\"datampu\"}}},\"qmaiegrxoo\":{\"filePath\":\"datamubzlmmctdkzp\",\"linkedServiceName\":{\"referenceName\":\"ud\",\"parameters\":{\"res\":\"datagxrsxvzwnuib\",\"w\":\"datajohrvkpnma\"}}}},\"webServiceInputs\":{\"oaallveezesdn\":{\"filePath\":\"datalmbuzkayfjzyco\",\"linkedServiceName\":{\"referenceName\":\"wjpyf\",\"parameters\":{\"hrl\":\"datamgw\",\"cvaibzbvkoxl\":\"dataosqlfvsvwauqxh\",\"fbiodgtziylp\":\"datatv\"}}},\"mddv\":{\"filePath\":\"datalkzzlokmrudepzl\",\"linkedServiceName\":{\"referenceName\":\"uzcwlbefjh\",\"parameters\":{\"txttahsojgf\":\"datapv\",\"fmgudkfoybih\":\"datay\",\"tkkufbi\":\"datapnkwt\"}}},\"vumvhpichsbzgwif\":{\"filePath\":\"dataub\",\"linkedServiceName\":{\"referenceName\":\"iomfflrnggwujy\",\"parameters\":{\"fmuwhgxvgkbffqsi\":\"datafsbmrgbnrnihx\",\"tyhd\":\"datalaubijvavqq\",\"atzgxtasryivlfbr\":\"datakd\"}}}}}") .toObject(AzureMLBatchExecutionActivityTypeProperties.class); - Assertions.assertEquals("gtskolbjylostrc", - model.webServiceOutputs().get("bz").linkedServiceName().referenceName()); - Assertions.assertEquals("alspeanheswxll", - model.webServiceInputs().get("crkf").linkedServiceName().referenceName()); + Assertions.assertEquals("bfmttpz", + model.webServiceOutputs().get("svhsieev").linkedServiceName().referenceName()); + Assertions.assertEquals("wjpyf", + model.webServiceInputs().get("oaallveezesdn").linkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { AzureMLBatchExecutionActivityTypeProperties model - = new AzureMLBatchExecutionActivityTypeProperties().withGlobalParameters(mapOf("qtscduuywgcuvcfm", "datab")) - .withWebServiceOutputs(mapOf("bz", - new AzureMLWebServiceFile().withFilePath("datapvoazgtlxgtusw") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("gtskolbjylostrc") - .withParameters(mapOf("bwaiqs", "datace"))))) - .withWebServiceInputs(mapOf("crkf", - new AzureMLWebServiceFile().withFilePath("datalphaul") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("alspeanheswxll") - .withParameters(mapOf("iycjulunbtuf", "datablfprskxhghvg", "njpivo", "datacipi", - "ahdplicivoduda", "datazxkhoabmahj", "ubgrjkgkoxuedml", "databmjheyntsdwxpa"))), - "kofoqrvnhc", - new AzureMLWebServiceFile().withFilePath("datagjywp") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bvvjyenwvgvhhouh") - .withParameters(mapOf("joycyvxbr", "datahwlkfljooiiviwlf", "trw", "datahwb"))), - "mridcy", - new AzureMLWebServiceFile().withFilePath("dataoghvkzmgvtempy") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("jahwypdh") - .withParameters(mapOf("xtg", "datajlsatoxsga", "ftlbtotu", "dataxmxgqgquulyrtk", - "cwrykwmvcxyu", "datazasrwoxumnucqew"))), - "qugycorgnxmn", - new AzureMLWebServiceFile().withFilePath("datanz") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("e") - .withParameters(mapOf("lhxfmvngdrn", "datawgqis", "nbwdborjyprcojwi", "datayvnbhn", "eiftm", - "datagtdjqczoqpkpi", "hlnaymsgbyho", "datazofont"))))); + = new AzureMLBatchExecutionActivityTypeProperties() + .withGlobalParameters(mapOf("ovburvekbknr", "dataet", "qbyoyhfbbbhxly", "datakxcpnxdzpfzmdsly")) + .withWebServiceOutputs(mapOf("svhsieev", + new AzureMLWebServiceFile().withFilePath("datalqprhnchpets") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bfmttpz") + .withParameters(mapOf("dusizsnhekpcnq", "datampu"))), + "qmaiegrxoo", + new AzureMLWebServiceFile().withFilePath("datamubzlmmctdkzp") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ud") + .withParameters(mapOf("res", "datagxrsxvzwnuib", "w", "datajohrvkpnma"))))) + .withWebServiceInputs(mapOf("oaallveezesdn", + new AzureMLWebServiceFile().withFilePath("datalmbuzkayfjzyco") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("wjpyf") + .withParameters(mapOf("hrl", "datamgw", "cvaibzbvkoxl", "dataosqlfvsvwauqxh", + "fbiodgtziylp", "datatv"))), + "mddv", + new AzureMLWebServiceFile().withFilePath("datalkzzlokmrudepzl") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("uzcwlbefjh") + .withParameters( + mapOf("txttahsojgf", "datapv", "fmgudkfoybih", "datay", "tkkufbi", "datapnkwt"))), + "vumvhpichsbzgwif", + new AzureMLWebServiceFile().withFilePath("dataub") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("iomfflrnggwujy") + .withParameters(mapOf("fmuwhgxvgkbffqsi", "datafsbmrgbnrnihx", "tyhd", "datalaubijvavqq", + "atzgxtasryivlfbr", "datakd"))))); model = BinaryData.fromObject(model).toObject(AzureMLBatchExecutionActivityTypeProperties.class); - Assertions.assertEquals("gtskolbjylostrc", - model.webServiceOutputs().get("bz").linkedServiceName().referenceName()); - Assertions.assertEquals("alspeanheswxll", - model.webServiceInputs().get("crkf").linkedServiceName().referenceName()); + Assertions.assertEquals("bfmttpz", + model.webServiceOutputs().get("svhsieev").linkedServiceName().referenceName()); + Assertions.assertEquals("wjpyf", + model.webServiceInputs().get("oaallveezesdn").linkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTests.java index a35b6b13b6efc..403e5971ce24c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTests.java @@ -22,59 +22,69 @@ public final class AzureMLExecutePipelineActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMLExecutePipelineActivity model = BinaryData.fromString( - "{\"type\":\"AzureMLExecutePipeline\",\"typeProperties\":{\"mlPipelineId\":\"dataggyhpu\",\"mlPipelineEndpointId\":\"dataclehndbutptyabd\",\"version\":\"datakb\",\"experimentName\":\"datapemorfzuhvycd\",\"mlPipelineParameters\":\"dataczmzsfvriskplndd\",\"dataPathAssignments\":\"datacqinvkmkbtp\",\"mlParentRunId\":\"datathzmqabvwbgsanvd\",\"continueOnStepFailure\":\"datambxshrae\"},\"linkedServiceName\":{\"referenceName\":\"lhzme\",\"parameters\":{\"jeqdmolmcyba\":\"datazhrzeibkuuolul\",\"qamvdnexqvt\":\"datakeuraylygclwbu\",\"lkny\":\"datafnhzgtydllauno\",\"hhcqjahhcbzoary\":\"datapglgkeaz\"}},\"policy\":{\"timeout\":\"datamftgmql\",\"retry\":\"dataoyxfrrdbdyhjf\",\"retryIntervalInSeconds\":2106899128,\"secureInput\":false,\"secureOutput\":true,\"\":{\"mjcchqig\":\"datasyydbxlturln\",\"hltgteg\":\"dataamoz\"}},\"name\":\"nguvjryfcxscrs\",\"description\":\"tno\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"m\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"uetoeqfnhmrawm\":\"datanfgfsjptbysvw\",\"ewfjwfkwrthp\":\"datahcdegwtpva\"}},{\"activity\":\"qmtahnimkndujy\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Succeeded\"],\"\":{\"wafslytmttjduco\":\"datam\",\"ovtnfwpmpap\":\"dataxcdh\"}},{\"activity\":\"mpdsvki\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Succeeded\",\"Completed\"],\"\":{\"dg\":\"datakjfvudigwkyykh\"}}],\"userProperties\":[{\"name\":\"aafjxgo\",\"value\":\"datatiupj\"}],\"\":{\"ncqoy\":\"datansuik\",\"bxywojuxyfp\":\"datahrb\"}}") + "{\"type\":\"qlvqkkceb\",\"typeProperties\":{\"mlPipelineId\":\"datawbxa\",\"mlPipelineEndpointId\":\"datajrxvthqjvoydeg\",\"version\":\"datawl\",\"experimentName\":\"datanypkppnz\",\"mlPipelineParameters\":\"datauafxwazfrs\",\"dataPathAssignments\":\"datagvfmbs\",\"mlParentRunId\":\"dataollntvfq\",\"continueOnStepFailure\":\"datafzfhspdsraxz\"},\"linkedServiceName\":{\"referenceName\":\"xkzcfxzcp\",\"parameters\":{\"zlreo\":\"dataqpwe\",\"mftziracz\":\"datasqqcqgnfdimgra\",\"ghznltjxstjge\":\"datalssqv\",\"jq\":\"datanjswnjoni\"}},\"policy\":{\"timeout\":\"dataxswineyjerf\",\"retry\":\"datamlppnmrftnf\",\"retryIntervalInSeconds\":155145237,\"secureInput\":false,\"secureOutput\":true,\"\":{\"gk\":\"datap\",\"zeemsfpmoiykts\":\"datafbfn\",\"cenk\":\"datansnikmwn\"}},\"name\":\"zoc\",\"description\":\"yshoeqpvkk\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"vaiolfrce\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Failed\"],\"\":{\"jhocc\":\"datacpsviajksmwrbw\",\"pb\":\"datalayqskkpoufupad\"}},{\"activity\":\"z\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\"],\"\":{\"nozsflnmjschttl\":\"datayzhbtnagkndn\",\"ch\":\"datartwrnuklshrqrh\",\"tvp\":\"datatbdx\"}},{\"activity\":\"drfxqudyadxnrtk\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"jm\":\"datanbrpvos\"}}],\"userProperties\":[{\"name\":\"gvyztoqd\",\"value\":\"datawubuqxsncrsrtqor\"},{\"name\":\"xeuwbfjzwisx\",\"value\":\"dataasgfmrzxzbuhqm\"},{\"name\":\"adyi\",\"value\":\"datakthrd\"}],\"\":{\"gkikb\":\"databogmfetqw\"}}") .toObject(AzureMLExecutePipelineActivity.class); - Assertions.assertEquals("nguvjryfcxscrs", model.name()); - Assertions.assertEquals("tno", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("m", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("aafjxgo", model.userProperties().get(0).name()); - Assertions.assertEquals("lhzme", model.linkedServiceName().referenceName()); - Assertions.assertEquals(2106899128, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("zoc", model.name()); + Assertions.assertEquals("yshoeqpvkk", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("vaiolfrce", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("gvyztoqd", model.userProperties().get(0).name()); + Assertions.assertEquals("xkzcfxzcp", model.linkedServiceName().referenceName()); + Assertions.assertEquals(155145237, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(true, model.policy().secureOutput()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureMLExecutePipelineActivity model - = new AzureMLExecutePipelineActivity().withName("nguvjryfcxscrs").withDescription("tno") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("m").withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED)).withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("qmtahnimkndujy") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("mpdsvki") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("aafjxgo").withValue("datatiupj"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lhzme") - .withParameters(mapOf("jeqdmolmcyba", "datazhrzeibkuuolul", "qamvdnexqvt", "datakeuraylygclwbu", - "lkny", "datafnhzgtydllauno", "hhcqjahhcbzoary", "datapglgkeaz"))) - .withPolicy(new ActivityPolicy().withTimeout("datamftgmql").withRetry("dataoyxfrrdbdyhjf") - .withRetryIntervalInSeconds(2106899128).withSecureInput(false).withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withMlPipelineId("dataggyhpu").withMlPipelineEndpointId("dataclehndbutptyabd").withVersion("datakb") - .withExperimentName("datapemorfzuhvycd").withMlPipelineParameters("dataczmzsfvriskplndd") - .withDataPathAssignments("datacqinvkmkbtp").withMlParentRunId("datathzmqabvwbgsanvd") - .withContinueOnStepFailure("datambxshrae"); + AzureMLExecutePipelineActivity model = new AzureMLExecutePipelineActivity().withName("zoc") + .withDescription("yshoeqpvkk") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("vaiolfrce") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, + DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("z") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("drfxqudyadxnrtk") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("gvyztoqd").withValue("datawubuqxsncrsrtqor"), + new UserProperty().withName("xeuwbfjzwisx").withValue("dataasgfmrzxzbuhqm"), + new UserProperty().withName("adyi").withValue("datakthrd"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xkzcfxzcp") + .withParameters(mapOf("zlreo", "dataqpwe", "mftziracz", "datasqqcqgnfdimgra", "ghznltjxstjge", + "datalssqv", "jq", "datanjswnjoni"))) + .withPolicy(new ActivityPolicy().withTimeout("dataxswineyjerf") + .withRetry("datamlppnmrftnf") + .withRetryIntervalInSeconds(155145237) + .withSecureInput(false) + .withSecureOutput(true) + .withAdditionalProperties(mapOf())) + .withMlPipelineId("datawbxa") + .withMlPipelineEndpointId("datajrxvthqjvoydeg") + .withVersion("datawl") + .withExperimentName("datanypkppnz") + .withMlPipelineParameters("datauafxwazfrs") + .withDataPathAssignments("datagvfmbs") + .withMlParentRunId("dataollntvfq") + .withContinueOnStepFailure("datafzfhspdsraxz"); model = BinaryData.fromObject(model).toObject(AzureMLExecutePipelineActivity.class); - Assertions.assertEquals("nguvjryfcxscrs", model.name()); - Assertions.assertEquals("tno", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("m", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("aafjxgo", model.userProperties().get(0).name()); - Assertions.assertEquals("lhzme", model.linkedServiceName().referenceName()); - Assertions.assertEquals(2106899128, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("zoc", model.name()); + Assertions.assertEquals("yshoeqpvkk", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("vaiolfrce", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("gvyztoqd", model.userProperties().get(0).name()); + Assertions.assertEquals("xkzcfxzcp", model.linkedServiceName().referenceName()); + Assertions.assertEquals(155145237, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(true, model.policy().secureOutput()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTypePropertiesTests.java index 42c473864efa6..ca2c5dcb7e59e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTypePropertiesTests.java @@ -11,17 +11,21 @@ public final class AzureMLExecutePipelineActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMLExecutePipelineActivityTypeProperties model = BinaryData.fromString( - "{\"mlPipelineId\":\"datablyeo\",\"mlPipelineEndpointId\":\"datathxkqczmfuhsupi\",\"version\":\"dataizkv\",\"experimentName\":\"datakhrphvmezdfad\",\"mlPipelineParameters\":\"dataok\",\"dataPathAssignments\":\"datambonureklgunpajw\",\"mlParentRunId\":\"dataxctdpj\",\"continueOnStepFailure\":\"dataujxxsmook\"}") + "{\"mlPipelineId\":\"dataqdopxbnrnn\",\"mlPipelineEndpointId\":\"datasso\",\"version\":\"dataypvdbpuywxyg\",\"experimentName\":\"datalqszwcwa\",\"mlPipelineParameters\":\"datas\",\"dataPathAssignments\":\"datatq\",\"mlParentRunId\":\"datamagoqfmks\",\"continueOnStepFailure\":\"dataesgdlskw\"}") .toObject(AzureMLExecutePipelineActivityTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureMLExecutePipelineActivityTypeProperties model = new AzureMLExecutePipelineActivityTypeProperties() - .withMlPipelineId("datablyeo").withMlPipelineEndpointId("datathxkqczmfuhsupi").withVersion("dataizkv") - .withExperimentName("datakhrphvmezdfad").withMlPipelineParameters("dataok") - .withDataPathAssignments("datambonureklgunpajw").withMlParentRunId("dataxctdpj") - .withContinueOnStepFailure("dataujxxsmook"); + AzureMLExecutePipelineActivityTypeProperties model + = new AzureMLExecutePipelineActivityTypeProperties().withMlPipelineId("dataqdopxbnrnn") + .withMlPipelineEndpointId("datasso") + .withVersion("dataypvdbpuywxyg") + .withExperimentName("datalqszwcwa") + .withMlPipelineParameters("datas") + .withDataPathAssignments("datatq") + .withMlParentRunId("datamagoqfmks") + .withContinueOnStepFailure("dataesgdlskw"); model = BinaryData.fromObject(model).toObject(AzureMLExecutePipelineActivityTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTests.java index b3683764964b2..55d5353dc1a75 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTests.java @@ -22,69 +22,59 @@ public final class AzureMLUpdateResourceActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMLUpdateResourceActivity model = BinaryData.fromString( - "{\"type\":\"AzureMLUpdateResource\",\"typeProperties\":{\"trainedModelName\":\"datapkoezcab\",\"trainedModelLinkedServiceName\":{\"referenceName\":\"ylsuiyvbildwqlx\",\"parameters\":{\"pylpmtwdvdtzdr\":\"dataqei\",\"urwzrx\":\"dataaxswiind\",\"mbtvcdsl\":\"datahacvsj\"}},\"trainedModelFilePath\":\"databv\"},\"linkedServiceName\":{\"referenceName\":\"nxhszrotunnkb\",\"parameters\":{\"lqyzrtawjkjzvvk\":\"datakaoonbzi\",\"seqxwcimam\":\"datahasxjmfh\",\"nrj\":\"dataqfrdfoiqfvczuu\",\"roxvsclmt\":\"datarpxlfyytjm\"}},\"policy\":{\"timeout\":\"datalfcgkdeitphzuazn\",\"retry\":\"datavubbestyymlj\",\"retryIntervalInSeconds\":235961975,\"secureInput\":false,\"secureOutput\":true,\"\":{\"fudranmd\":\"datanyxrizse\"}},\"name\":\"fwawzjhfauu\",\"description\":\"vnaf\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"mviclhommhaxtegr\",\"dependencyConditions\":[\"Completed\"],\"\":{\"jfzbavqmmk\":\"datan\"}},{\"activity\":\"qdfjeuwwq\",\"dependencyConditions\":[\"Failed\",\"Skipped\"],\"\":{\"qvywolccxdctkhe\":\"datakzplbzyj\",\"gemspn\":\"databosa\",\"hhexgxnmfodxiyz\":\"dataqo\"}},{\"activity\":\"fottycfostzd\",\"dependencyConditions\":[\"Completed\",\"Completed\",\"Completed\"],\"\":{\"xurrhpihtxgjzi\":\"databhahxs\"}},{\"activity\":\"yhujgrbjmzagxjoi\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Succeeded\",\"Skipped\"],\"\":{\"d\":\"datacrf\",\"jzquwjgfihlo\":\"datatcf\",\"efxvggkjbhs\":\"dataauorzbk\",\"yajijzrt\":\"datayy\"}}],\"userProperties\":[{\"name\":\"gonhmblkkel\",\"value\":\"datajk\"},{\"name\":\"emneu\",\"value\":\"datapynenca\"},{\"name\":\"kqvcf\",\"value\":\"datargwxgczwxyghs\"}],\"\":{\"hygbe\":\"datavxcrzpdqwa\",\"lmfh\":\"datafiwbtfki\"}}") + "{\"type\":\"frgvcoulxhpvursm\",\"typeProperties\":{\"trainedModelName\":\"datanangkcddwm\",\"trainedModelLinkedServiceName\":{\"referenceName\":\"sa\",\"parameters\":{\"rtgrz\":\"datalwpa\",\"yzoiqaijnahelf\":\"datatpqvhkjbgcqqeyt\",\"p\":\"dataha\",\"jcrqatx\":\"dataakqg\"}},\"trainedModelFilePath\":\"dataekidjbsfpaomlgyn\"},\"linkedServiceName\":{\"referenceName\":\"pypsjokjjrj\",\"parameters\":{\"tcjimsge\":\"datalcjtm\"}},\"policy\":{\"timeout\":\"datawwedbcrkepjnyrtl\",\"retry\":\"dataijcxvqjwlid\",\"retryIntervalInSeconds\":1580319500,\"secureInput\":true,\"secureOutput\":false,\"\":{\"wvwluzspkqxbtkwa\":\"datakzqass\",\"zjtudnnswwgrqiq\":\"dataw\",\"bjxo\":\"datao\",\"anvjhoshinljquqe\":\"datamwnrw\"}},\"name\":\"umyxps\",\"description\":\"ypwvhjskkgsf\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"alp\",\"dependencyConditions\":[\"Completed\"],\"\":{\"hjfseh\":\"datawtf\",\"inzukzb\":\"datazjbuz\",\"dhleuabsiqnyjjfj\":\"databc\",\"up\":\"datags\"}}],\"userProperties\":[{\"name\":\"exafql\",\"value\":\"datajmbzph\"},{\"name\":\"krjsgbcroltddify\",\"value\":\"dataxe\"}],\"\":{\"ackfupyivqpczx\":\"databijaqyiyefleju\",\"bperkeyhybc\":\"datazlxowgzt\"}}") .toObject(AzureMLUpdateResourceActivity.class); - Assertions.assertEquals("fwawzjhfauu", model.name()); - Assertions.assertEquals("vnaf", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals("umyxps", model.name()); + Assertions.assertEquals("ypwvhjskkgsf", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("mviclhommhaxtegr", model.dependsOn().get(0).activity()); + Assertions.assertEquals("alp", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("gonhmblkkel", model.userProperties().get(0).name()); - Assertions.assertEquals("nxhszrotunnkb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(235961975, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("ylsuiyvbildwqlx", model.trainedModelLinkedServiceName().referenceName()); + Assertions.assertEquals("exafql", model.userProperties().get(0).name()); + Assertions.assertEquals("pypsjokjjrj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1580319500, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals(false, model.policy().secureOutput()); + Assertions.assertEquals("sa", model.trainedModelLinkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureMLUpdateResourceActivity model - = new AzureMLUpdateResourceActivity().withName("fwawzjhfauu").withDescription("vnaf") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("mviclhommhaxtegr").withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED)).withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("qdfjeuwwq") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("fottycfostzd") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("yhujgrbjmzagxjoi") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("gonhmblkkel").withValue("datajk"), - new UserProperty().withName("emneu").withValue("datapynenca"), - new UserProperty().withName("kqvcf").withValue("datargwxgczwxyghs"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("nxhszrotunnkb") - .withParameters(mapOf("lqyzrtawjkjzvvk", "datakaoonbzi", "seqxwcimam", "datahasxjmfh", "nrj", - "dataqfrdfoiqfvczuu", "roxvsclmt", "datarpxlfyytjm"))) - .withPolicy(new ActivityPolicy().withTimeout("datalfcgkdeitphzuazn").withRetry("datavubbestyymlj") - .withRetryIntervalInSeconds(235961975).withSecureInput(false).withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withTrainedModelName("datapkoezcab") - .withTrainedModelLinkedServiceName( - new LinkedServiceReference().withReferenceName("ylsuiyvbildwqlx").withParameters( - mapOf("pylpmtwdvdtzdr", "dataqei", "urwzrx", "dataaxswiind", "mbtvcdsl", "datahacvsj"))) - .withTrainedModelFilePath("databv"); + AzureMLUpdateResourceActivity model = new AzureMLUpdateResourceActivity().withName("umyxps") + .withDescription("ypwvhjskkgsf") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("alp") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("exafql").withValue("datajmbzph"), + new UserProperty().withName("krjsgbcroltddify").withValue("dataxe"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("pypsjokjjrj") + .withParameters(mapOf("tcjimsge", "datalcjtm"))) + .withPolicy(new ActivityPolicy().withTimeout("datawwedbcrkepjnyrtl") + .withRetry("dataijcxvqjwlid") + .withRetryIntervalInSeconds(1580319500) + .withSecureInput(true) + .withSecureOutput(false) + .withAdditionalProperties(mapOf())) + .withTrainedModelName("datanangkcddwm") + .withTrainedModelLinkedServiceName(new LinkedServiceReference().withReferenceName("sa") + .withParameters(mapOf("rtgrz", "datalwpa", "yzoiqaijnahelf", "datatpqvhkjbgcqqeyt", "p", "dataha", + "jcrqatx", "dataakqg"))) + .withTrainedModelFilePath("dataekidjbsfpaomlgyn"); model = BinaryData.fromObject(model).toObject(AzureMLUpdateResourceActivity.class); - Assertions.assertEquals("fwawzjhfauu", model.name()); - Assertions.assertEquals("vnaf", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals("umyxps", model.name()); + Assertions.assertEquals("ypwvhjskkgsf", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("mviclhommhaxtegr", model.dependsOn().get(0).activity()); + Assertions.assertEquals("alp", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("gonhmblkkel", model.userProperties().get(0).name()); - Assertions.assertEquals("nxhszrotunnkb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(235961975, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("ylsuiyvbildwqlx", model.trainedModelLinkedServiceName().referenceName()); + Assertions.assertEquals("exafql", model.userProperties().get(0).name()); + Assertions.assertEquals("pypsjokjjrj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1580319500, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals(false, model.policy().secureOutput()); + Assertions.assertEquals("sa", model.trainedModelLinkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTypePropertiesTests.java index 8c0fc9771c2d2..7b6b9b60c4cf0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTypePropertiesTests.java @@ -15,21 +15,20 @@ public final class AzureMLUpdateResourceActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMLUpdateResourceActivityTypeProperties model = BinaryData.fromString( - "{\"trainedModelName\":\"datazsxjrafhdf\",\"trainedModelLinkedServiceName\":{\"referenceName\":\"ukaaw\",\"parameters\":{\"qhefeasmkdguodo\":\"dataijphhuvflgw\",\"vcp\":\"datajpwqbot\",\"qruympov\":\"dataxxpyrtajlydefqfv\",\"fo\":\"dataxbqdwbjhgjzvceyx\"}},\"trainedModelFilePath\":\"datauyk\"}") + "{\"trainedModelName\":\"dataxurdfzynfm\",\"trainedModelLinkedServiceName\":{\"referenceName\":\"jqrnuo\",\"parameters\":{\"crutf\":\"datatzeauifc\"}},\"trainedModelFilePath\":\"datazdobh\"}") .toObject(AzureMLUpdateResourceActivityTypeProperties.class); - Assertions.assertEquals("ukaaw", model.trainedModelLinkedServiceName().referenceName()); + Assertions.assertEquals("jqrnuo", model.trainedModelLinkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureMLUpdateResourceActivityTypeProperties model - = new AzureMLUpdateResourceActivityTypeProperties().withTrainedModelName("datazsxjrafhdf") - .withTrainedModelLinkedServiceName(new LinkedServiceReference().withReferenceName("ukaaw") - .withParameters(mapOf("qhefeasmkdguodo", "dataijphhuvflgw", "vcp", "datajpwqbot", "qruympov", - "dataxxpyrtajlydefqfv", "fo", "dataxbqdwbjhgjzvceyx"))) - .withTrainedModelFilePath("datauyk"); + AzureMLUpdateResourceActivityTypeProperties model = new AzureMLUpdateResourceActivityTypeProperties() + .withTrainedModelName("dataxurdfzynfm") + .withTrainedModelLinkedServiceName( + new LinkedServiceReference().withReferenceName("jqrnuo").withParameters(mapOf("crutf", "datatzeauifc"))) + .withTrainedModelFilePath("datazdobh"); model = BinaryData.fromObject(model).toObject(AzureMLUpdateResourceActivityTypeProperties.class); - Assertions.assertEquals("ukaaw", model.trainedModelLinkedServiceName().referenceName()); + Assertions.assertEquals("jqrnuo", model.trainedModelLinkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLWebServiceFileTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLWebServiceFileTests.java index 80221a82497b2..5241903a6023f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLWebServiceFileTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLWebServiceFileTests.java @@ -15,18 +15,18 @@ public final class AzureMLWebServiceFileTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMLWebServiceFile model = BinaryData.fromString( - "{\"filePath\":\"dataennobjixoqqjbsag\",\"linkedServiceName\":{\"referenceName\":\"lpuqfmrimwlpa\",\"parameters\":{\"s\":\"dataxuiaktnmwlklqhw\",\"r\":\"dataeoefwnjsorhpga\"}}}") + "{\"filePath\":\"datarbjjswzk\",\"linkedServiceName\":{\"referenceName\":\"wtfe\",\"parameters\":{\"eb\":\"datahwtag\"}}}") .toObject(AzureMLWebServiceFile.class); - Assertions.assertEquals("lpuqfmrimwlpa", model.linkedServiceName().referenceName()); + Assertions.assertEquals("wtfe", model.linkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureMLWebServiceFile model = new AzureMLWebServiceFile().withFilePath("dataennobjixoqqjbsag") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lpuqfmrimwlpa") - .withParameters(mapOf("s", "dataxuiaktnmwlklqhw", "r", "dataeoefwnjsorhpga"))); + AzureMLWebServiceFile model = new AzureMLWebServiceFile().withFilePath("datarbjjswzk") + .withLinkedServiceName( + new LinkedServiceReference().withReferenceName("wtfe").withParameters(mapOf("eb", "datahwtag"))); model = BinaryData.fromObject(model).toObject(AzureMLWebServiceFile.class); - Assertions.assertEquals("lpuqfmrimwlpa", model.linkedServiceName().referenceName()); + Assertions.assertEquals("wtfe", model.linkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBSourceTests.java index 5ef3c116dbb02..0ac43bf01d59c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBSourceTests.java @@ -11,16 +11,19 @@ public final class AzureMariaDBSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMariaDBSource model = BinaryData.fromString( - "{\"type\":\"AzureMariaDBSource\",\"query\":\"dataht\",\"queryTimeout\":\"datauiptudw\",\"additionalColumns\":\"datasrpsjkqfabju\",\"sourceRetryCount\":\"datats\",\"sourceRetryWait\":\"dataupcio\",\"maxConcurrentConnections\":\"datarjdeyfnqanbadkzp\",\"disableMetricsCollection\":\"datatuplpkjexq\",\"\":{\"goeftrbxomaa\":\"datazlal\",\"gvjmllzykalbaumm\":\"datavarfqverxelquqze\",\"r\":\"datadwqiucpj\",\"ftt\":\"databssjtjwzelx\"}}") + "{\"type\":\"grtse\",\"query\":\"datarpjonmins\",\"queryTimeout\":\"datayigfdppgkk\",\"additionalColumns\":\"dataygjldljgd\",\"sourceRetryCount\":\"dataow\",\"sourceRetryWait\":\"dataxkofmtfwcu\",\"maxConcurrentConnections\":\"databnapzfdzmr\",\"disableMetricsCollection\":\"datarbclj\",\"\":{\"skjbasmrdpbmoq\":\"dataaawnzzlfve\",\"apmeomcpvmakdtg\":\"datasvukgfzby\",\"myewbfo\":\"datanyubnw\",\"etj\":\"datawv\"}}") .toObject(AzureMariaDBSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureMariaDBSource model - = new AzureMariaDBSource().withSourceRetryCount("datats").withSourceRetryWait("dataupcio") - .withMaxConcurrentConnections("datarjdeyfnqanbadkzp").withDisableMetricsCollection("datatuplpkjexq") - .withQueryTimeout("datauiptudw").withAdditionalColumns("datasrpsjkqfabju").withQuery("dataht"); + AzureMariaDBSource model = new AzureMariaDBSource().withSourceRetryCount("dataow") + .withSourceRetryWait("dataxkofmtfwcu") + .withMaxConcurrentConnections("databnapzfdzmr") + .withDisableMetricsCollection("datarbclj") + .withQueryTimeout("datayigfdppgkk") + .withAdditionalColumns("dataygjldljgd") + .withQuery("datarpjonmins"); model = BinaryData.fromObject(model).toObject(AzureMariaDBSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBTableDatasetTests.java index 6753c810e73b9..0b523168b463b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBTableDatasetTests.java @@ -19,32 +19,31 @@ public final class AzureMariaDBTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMariaDBTableDataset model = BinaryData.fromString( - "{\"type\":\"AzureMariaDBTable\",\"typeProperties\":{\"tableName\":\"datahvxjuai\"},\"description\":\"znirnygtix\",\"structure\":\"datayob\",\"schema\":\"dataphvd\",\"linkedServiceName\":{\"referenceName\":\"orxzpqdi\",\"parameters\":{\"tkehldopjsxvbb\":\"datacltfcieileem\"}},\"parameters\":{\"bn\":{\"type\":\"Array\",\"defaultValue\":\"datakm\"},\"zwmzhcmrloq\":{\"type\":\"Bool\",\"defaultValue\":\"dataidipwt\"},\"dnmbjqbngzldv\":{\"type\":\"Bool\",\"defaultValue\":\"datatyzavkyjjl\"}},\"annotations\":[\"dataoptythctoxo\"],\"folder\":{\"name\":\"qnerw\"},\"\":{\"pejomeqgxhwisp\":\"datavidsssfzsgzgu\",\"xirppbiichlygkv\":\"dataogdblwjsbaqxaxt\",\"wonkrnizdxywabki\":\"datai\",\"aptgvnaqyjukka\":\"datani\"}}") + "{\"type\":\"wapdunhdikatzmtu\",\"typeProperties\":{\"tableName\":\"dataoeqcrjvcjskqsfn\"},\"description\":\"nh\",\"structure\":\"datajkbi\",\"schema\":\"datalwzlv\",\"linkedServiceName\":{\"referenceName\":\"cmcu\",\"parameters\":{\"cfbaobo\":\"datanlodi\"}},\"parameters\":{\"ksaxyeedvpmodkt\":{\"type\":\"Float\",\"defaultValue\":\"datapsvax\"}},\"annotations\":[\"dataydvvgkmorbpcjesf\",\"datavuztnsvmsh\",\"datakg\",\"datagf\"],\"folder\":{\"name\":\"meh\"},\"\":{\"ivrfnztx\":\"datalpkryrcbmjjviu\",\"ohlgrjcx\":\"datamrmgftjvii\"}}") .toObject(AzureMariaDBTableDataset.class); - Assertions.assertEquals("znirnygtix", model.description()); - Assertions.assertEquals("orxzpqdi", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("bn").type()); - Assertions.assertEquals("qnerw", model.folder().name()); + Assertions.assertEquals("nh", model.description()); + Assertions.assertEquals("cmcu", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("ksaxyeedvpmodkt").type()); + Assertions.assertEquals("meh", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureMariaDBTableDataset model = new AzureMariaDBTableDataset().withDescription("znirnygtix") - .withStructure("datayob").withSchema("dataphvd") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("orxzpqdi") - .withParameters(mapOf("tkehldopjsxvbb", "datacltfcieileem"))) - .withParameters(mapOf("bn", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datakm"), "zwmzhcmrloq", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataidipwt"), - "dnmbjqbngzldv", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datatyzavkyjjl"))) - .withAnnotations(Arrays.asList("dataoptythctoxo")).withFolder(new DatasetFolder().withName("qnerw")) - .withTableName("datahvxjuai"); + AzureMariaDBTableDataset model = new AzureMariaDBTableDataset().withDescription("nh") + .withStructure("datajkbi") + .withSchema("datalwzlv") + .withLinkedServiceName( + new LinkedServiceReference().withReferenceName("cmcu").withParameters(mapOf("cfbaobo", "datanlodi"))) + .withParameters(mapOf("ksaxyeedvpmodkt", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datapsvax"))) + .withAnnotations(Arrays.asList("dataydvvgkmorbpcjesf", "datavuztnsvmsh", "datakg", "datagf")) + .withFolder(new DatasetFolder().withName("meh")) + .withTableName("dataoeqcrjvcjskqsfn"); model = BinaryData.fromObject(model).toObject(AzureMariaDBTableDataset.class); - Assertions.assertEquals("znirnygtix", model.description()); - Assertions.assertEquals("orxzpqdi", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("bn").type()); - Assertions.assertEquals("qnerw", model.folder().name()); + Assertions.assertEquals("nh", model.description()); + Assertions.assertEquals("cmcu", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("ksaxyeedvpmodkt").type()); + Assertions.assertEquals("meh", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSinkTests.java index 0746eb19e1b72..c3693bbbd5b68 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSinkTests.java @@ -11,16 +11,19 @@ public final class AzureMySqlSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMySqlSink model = BinaryData.fromString( - "{\"type\":\"AzureMySqlSink\",\"preCopyScript\":\"datasmgh\",\"writeBatchSize\":\"datatuujcuavctxyrmws\",\"writeBatchTimeout\":\"datazmy\",\"sinkRetryCount\":\"datan\",\"sinkRetryWait\":\"dataajxv\",\"maxConcurrentConnections\":\"dataidlwmewrgu\",\"disableMetricsCollection\":\"dataugpkunvygupgnnvm\",\"\":{\"ekmsn\":\"dataqmxww\",\"jypxcqmdeecdh\":\"datafjbefszfrxfy\",\"mykgrtwh\":\"datajsizyhp\"}}") + "{\"type\":\"kggbmzdnyrmolm\",\"preCopyScript\":\"dataehsnlmdosiyzfdc\",\"writeBatchSize\":\"datakcpumckcbsa\",\"writeBatchTimeout\":\"dataucsscwdqilz\",\"sinkRetryCount\":\"datai\",\"sinkRetryWait\":\"datarqzwypwh\",\"maxConcurrentConnections\":\"databflrpvcgqqxek\",\"disableMetricsCollection\":\"datahpsqvuised\",\"\":{\"btpvwx\":\"datavfjkxxnqrqdx\",\"zss\":\"datalsvicvpagwohkro\",\"jpiezthflgpsal\":\"datamlozjyovrllvhbgk\",\"wzpfbiqjrz\":\"datanan\"}}") .toObject(AzureMySqlSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureMySqlSink model = new AzureMySqlSink().withWriteBatchSize("datatuujcuavctxyrmws") - .withWriteBatchTimeout("datazmy").withSinkRetryCount("datan").withSinkRetryWait("dataajxv") - .withMaxConcurrentConnections("dataidlwmewrgu").withDisableMetricsCollection("dataugpkunvygupgnnvm") - .withPreCopyScript("datasmgh"); + AzureMySqlSink model = new AzureMySqlSink().withWriteBatchSize("datakcpumckcbsa") + .withWriteBatchTimeout("dataucsscwdqilz") + .withSinkRetryCount("datai") + .withSinkRetryWait("datarqzwypwh") + .withMaxConcurrentConnections("databflrpvcgqqxek") + .withDisableMetricsCollection("datahpsqvuised") + .withPreCopyScript("dataehsnlmdosiyzfdc"); model = BinaryData.fromObject(model).toObject(AzureMySqlSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSourceTests.java index a783039895e2a..6ce8778711b9d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSourceTests.java @@ -11,15 +11,19 @@ public final class AzureMySqlSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMySqlSource model = BinaryData.fromString( - "{\"type\":\"AzureMySqlSource\",\"query\":\"datarzhjqengopdvnz\",\"queryTimeout\":\"dataliodajxvs\",\"additionalColumns\":\"datayvzm\",\"sourceRetryCount\":\"dataf\",\"sourceRetryWait\":\"datareawhnz\",\"maxConcurrentConnections\":\"datamue\",\"disableMetricsCollection\":\"databhnkles\",\"\":{\"cyrdtrd\":\"datavakqajian\",\"h\":\"datakdmsktuv\",\"toyfbsgrzw\":\"dataxtv\",\"ud\":\"dataw\"}}") + "{\"type\":\"jnnoot\",\"query\":\"datapvnvdzssssn\",\"queryTimeout\":\"dataotx\",\"additionalColumns\":\"databxzhad\",\"sourceRetryCount\":\"datayupaqdoodhnzkmj\",\"sourceRetryWait\":\"databyogwjr\",\"maxConcurrentConnections\":\"datanrykkh\",\"disableMetricsCollection\":\"datawohsj\",\"\":{\"vrixcveser\":\"dataphnlwey\"}}") .toObject(AzureMySqlSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureMySqlSource model = new AzureMySqlSource().withSourceRetryCount("dataf").withSourceRetryWait("datareawhnz") - .withMaxConcurrentConnections("datamue").withDisableMetricsCollection("databhnkles") - .withQueryTimeout("dataliodajxvs").withAdditionalColumns("datayvzm").withQuery("datarzhjqengopdvnz"); + AzureMySqlSource model = new AzureMySqlSource().withSourceRetryCount("datayupaqdoodhnzkmj") + .withSourceRetryWait("databyogwjr") + .withMaxConcurrentConnections("datanrykkh") + .withDisableMetricsCollection("datawohsj") + .withQueryTimeout("dataotx") + .withAdditionalColumns("databxzhad") + .withQuery("datapvnvdzssssn"); model = BinaryData.fromObject(model).toObject(AzureMySqlSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTests.java index c962b4856da71..93960d54b0797 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTests.java @@ -19,31 +19,36 @@ public final class AzureMySqlTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMySqlTableDataset model = BinaryData.fromString( - "{\"type\":\"AzureMySqlTable\",\"typeProperties\":{\"tableName\":\"dataw\",\"table\":\"datazyjj\"},\"description\":\"t\",\"structure\":\"datazql\",\"schema\":\"dataagwiijc\",\"linkedServiceName\":{\"referenceName\":\"qiywhxpsbapial\",\"parameters\":{\"zudegefxlieg\":\"dataydp\",\"smhssfnwh\":\"dataot\",\"nfmkcuft\":\"datakahhec\",\"dvhzfkdn\":\"datadgwuzron\"}},\"parameters\":{\"zfzdjekeb\":{\"type\":\"Object\",\"defaultValue\":\"datacikgxkk\"},\"jwyfi\":{\"type\":\"Array\",\"defaultValue\":\"dataxz\"}},\"annotations\":[\"datagcjf\",\"dataiwu\",\"datapjkakrxifqnf\"],\"folder\":{\"name\":\"xsqtzngxbs\"},\"\":{\"ly\":\"datawguxcmmhipbvskci\"}}") + "{\"type\":\"zq\",\"typeProperties\":{\"tableName\":\"dataozfrfawtnnsv\",\"table\":\"dataajynihtibu\"},\"description\":\"luctblf\",\"structure\":\"databgcgyohrcmeqlj\",\"schema\":\"dataumhycxonebld\",\"linkedServiceName\":{\"referenceName\":\"a\",\"parameters\":{\"ufzdtsrpju\":\"datafjwcngkwxjsjquvo\"}},\"parameters\":{\"nsbylgmgbh\":{\"type\":\"String\",\"defaultValue\":\"datarz\"},\"an\":{\"type\":\"Array\",\"defaultValue\":\"datazo\"},\"ztilqbzb\":{\"type\":\"Bool\",\"defaultValue\":\"datalvcxglxoqw\"},\"yodpiovnlhrwy\":{\"type\":\"Array\",\"defaultValue\":\"datawzhbhflj\"}},\"annotations\":[\"datauafapwxsvdeatjio\"],\"folder\":{\"name\":\"rgoextqdnw\"},\"\":{\"setk\":\"datatimznupbmt\",\"zmmmjyvdhdgdi\":\"datadsqvhedaakghc\"}}") .toObject(AzureMySqlTableDataset.class); - Assertions.assertEquals("t", model.description()); - Assertions.assertEquals("qiywhxpsbapial", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("zfzdjekeb").type()); - Assertions.assertEquals("xsqtzngxbs", model.folder().name()); + Assertions.assertEquals("luctblf", model.description()); + Assertions.assertEquals("a", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("nsbylgmgbh").type()); + Assertions.assertEquals("rgoextqdnw", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureMySqlTableDataset model - = new AzureMySqlTableDataset().withDescription("t").withStructure("datazql").withSchema("dataagwiijc") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("qiywhxpsbapial") - .withParameters(mapOf("zudegefxlieg", "dataydp", "smhssfnwh", "dataot", "nfmkcuft", "datakahhec", - "dvhzfkdn", "datadgwuzron"))) - .withParameters(mapOf("zfzdjekeb", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datacikgxkk"), - "jwyfi", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataxz"))) - .withAnnotations(Arrays.asList("datagcjf", "dataiwu", "datapjkakrxifqnf")) - .withFolder(new DatasetFolder().withName("xsqtzngxbs")).withTableName("dataw").withTable("datazyjj"); + AzureMySqlTableDataset model = new AzureMySqlTableDataset().withDescription("luctblf") + .withStructure("databgcgyohrcmeqlj") + .withSchema("dataumhycxonebld") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("a") + .withParameters(mapOf("ufzdtsrpju", "datafjwcngkwxjsjquvo"))) + .withParameters(mapOf("nsbylgmgbh", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datarz"), "an", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datazo"), "ztilqbzb", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datalvcxglxoqw"), + "yodpiovnlhrwy", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datawzhbhflj"))) + .withAnnotations(Arrays.asList("datauafapwxsvdeatjio")) + .withFolder(new DatasetFolder().withName("rgoextqdnw")) + .withTableName("dataozfrfawtnnsv") + .withTable("dataajynihtibu"); model = BinaryData.fromObject(model).toObject(AzureMySqlTableDataset.class); - Assertions.assertEquals("t", model.description()); - Assertions.assertEquals("qiywhxpsbapial", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("zfzdjekeb").type()); - Assertions.assertEquals("xsqtzngxbs", model.folder().name()); + Assertions.assertEquals("luctblf", model.description()); + Assertions.assertEquals("a", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("nsbylgmgbh").type()); + Assertions.assertEquals("rgoextqdnw", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTypePropertiesTests.java index fbb1236dc17fc..4e38858651540 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class AzureMySqlTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureMySqlTableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"datawfsaa\",\"table\":\"datafgb\"}") + = BinaryData.fromString("{\"tableName\":\"datalgstmfetq\",\"table\":\"datasjmol\"}") .toObject(AzureMySqlTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { AzureMySqlTableDatasetTypeProperties model - = new AzureMySqlTableDatasetTypeProperties().withTableName("datawfsaa").withTable("datafgb"); + = new AzureMySqlTableDatasetTypeProperties().withTableName("datalgstmfetq").withTable("datasjmol"); model = BinaryData.fromObject(model).toObject(AzureMySqlTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSinkTests.java index b47407f83306a..cb6d50d00315e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSinkTests.java @@ -11,16 +11,19 @@ public final class AzurePostgreSqlSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzurePostgreSqlSink model = BinaryData.fromString( - "{\"type\":\"AzurePostgreSqlSink\",\"preCopyScript\":\"datamhookefdgfexakct\",\"writeBatchSize\":\"datapszdn\",\"writeBatchTimeout\":\"datao\",\"sinkRetryCount\":\"dataqxmdievkmrso\",\"sinkRetryWait\":\"datayiheheimuqqmd\",\"maxConcurrentConnections\":\"datawxfmrm\",\"disableMetricsCollection\":\"dataf\",\"\":{\"xdldhhkdeviwp\":\"dataypmthfvszlaf\",\"nu\":\"datahfxvl\"}}") + "{\"type\":\"b\",\"preCopyScript\":\"datayumqoqw\",\"writeBatchSize\":\"datayeigngrzvegxmx\",\"writeBatchTimeout\":\"datahqxzewlwwdmp\",\"sinkRetryCount\":\"datacpccovzkwhdtf\",\"sinkRetryWait\":\"datafctsfujdap\",\"maxConcurrentConnections\":\"datagamgbnktg\",\"disableMetricsCollection\":\"dataddydbatexkwcoln\",\"\":{\"d\":\"datawsdyvahn\",\"mekgtkojrr\":\"datacpmvnzhdsa\"}}") .toObject(AzurePostgreSqlSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzurePostgreSqlSink model = new AzurePostgreSqlSink().withWriteBatchSize("datapszdn") - .withWriteBatchTimeout("datao").withSinkRetryCount("dataqxmdievkmrso") - .withSinkRetryWait("datayiheheimuqqmd").withMaxConcurrentConnections("datawxfmrm") - .withDisableMetricsCollection("dataf").withPreCopyScript("datamhookefdgfexakct"); + AzurePostgreSqlSink model = new AzurePostgreSqlSink().withWriteBatchSize("datayeigngrzvegxmx") + .withWriteBatchTimeout("datahqxzewlwwdmp") + .withSinkRetryCount("datacpccovzkwhdtf") + .withSinkRetryWait("datafctsfujdap") + .withMaxConcurrentConnections("datagamgbnktg") + .withDisableMetricsCollection("dataddydbatexkwcoln") + .withPreCopyScript("datayumqoqw"); model = BinaryData.fromObject(model).toObject(AzurePostgreSqlSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSourceTests.java index a102a2399e19f..a866d0ac0dbbf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSourceTests.java @@ -11,16 +11,19 @@ public final class AzurePostgreSqlSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzurePostgreSqlSource model = BinaryData.fromString( - "{\"type\":\"AzurePostgreSqlSource\",\"query\":\"datahbfttptsdee\",\"queryTimeout\":\"dataovanag\",\"additionalColumns\":\"dataacsfbmb\",\"sourceRetryCount\":\"dataefqku\",\"sourceRetryWait\":\"datayumoamqxwluslxyt\",\"maxConcurrentConnections\":\"databjledjxblobknfpd\",\"disableMetricsCollection\":\"datahzgj\",\"\":{\"ccypxsrhbqlbnufl\":\"datamctbg\",\"xhbpyoqfbj\":\"datazawkkz\",\"jpjnhwwyhx\":\"dataclboi\",\"hhw\":\"dataythxzrvjfsmfk\"}}") + "{\"type\":\"mjazqlmigkx\",\"query\":\"datakditkwokefde\",\"queryTimeout\":\"datafnsy\",\"additionalColumns\":\"dataytrwyojhmgvm\",\"sourceRetryCount\":\"datashadnholkoyxms\",\"sourceRetryWait\":\"datadsqwvzunrqvu\",\"maxConcurrentConnections\":\"dataxs\",\"disableMetricsCollection\":\"dataqzdfjwo\",\"\":{\"hwaepg\":\"dataifrmoftilhoye\",\"qmfvzubmhsxtryy\":\"datadircdtkvorif\",\"uovturdhnnmhriz\":\"datavwm\",\"tnbvueizua\":\"datamptsygq\"}}") .toObject(AzurePostgreSqlSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzurePostgreSqlSource model - = new AzurePostgreSqlSource().withSourceRetryCount("dataefqku").withSourceRetryWait("datayumoamqxwluslxyt") - .withMaxConcurrentConnections("databjledjxblobknfpd").withDisableMetricsCollection("datahzgj") - .withQueryTimeout("dataovanag").withAdditionalColumns("dataacsfbmb").withQuery("datahbfttptsdee"); + AzurePostgreSqlSource model = new AzurePostgreSqlSource().withSourceRetryCount("datashadnholkoyxms") + .withSourceRetryWait("datadsqwvzunrqvu") + .withMaxConcurrentConnections("dataxs") + .withDisableMetricsCollection("dataqzdfjwo") + .withQueryTimeout("datafnsy") + .withAdditionalColumns("dataytrwyojhmgvm") + .withQuery("datakditkwokefde"); model = BinaryData.fromObject(model).toObject(AzurePostgreSqlSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTests.java index 25d682c6c0274..ac895f853a678 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTests.java @@ -19,37 +19,37 @@ public final class AzurePostgreSqlTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzurePostgreSqlTableDataset model = BinaryData.fromString( - "{\"type\":\"AzurePostgreSqlTable\",\"typeProperties\":{\"tableName\":\"datarllld\",\"table\":\"datanbdzwils\",\"schema\":\"datayiqjz\"},\"description\":\"rd\",\"structure\":\"datantpfxxgjahyxfw\",\"schema\":\"datat\",\"linkedServiceName\":{\"referenceName\":\"veyfbkqynlzxeme\",\"parameters\":{\"ehdmvfoyrxxxff\":\"datajck\",\"cgcsapvbcqpf\":\"datamcuanszeerv\",\"vpyr\":\"datasekijhminenkb\",\"fvvcwvurkmjufa\":\"datavorlfqmljewyn\"}},\"parameters\":{\"vrkkfcwxizkstxne\":{\"type\":\"SecureString\",\"defaultValue\":\"databsotmynklnmrznm\"},\"tc\":{\"type\":\"Object\",\"defaultValue\":\"dataipx\"},\"tvsayyaeiiv\":{\"type\":\"Array\",\"defaultValue\":\"dataiuvnfaz\"},\"xqetxtdqius\":{\"type\":\"Float\",\"defaultValue\":\"dataqtjwrvewojoq\"}},\"annotations\":[\"datazljvgjijzqjhljsa\",\"datamjsisfqqhc\",\"dataecagsbfeiirpn\"],\"folder\":{\"name\":\"llfkchhgs\"},\"\":{\"cabaam\":\"datazcajlwmqc\",\"dyoqywsuarpzhry\":\"datakhdhpmkxdujkxpuq\"}}") + "{\"type\":\"jvzmxyrazzstjvc\",\"typeProperties\":{\"tableName\":\"dataxfpzcuudq\",\"table\":\"datadxjvvlyibweuaugt\",\"schema\":\"datazncoqxtvytzqly\"},\"description\":\"bdbrl\",\"structure\":\"datazlty\",\"schema\":\"dataacbibtk\",\"linkedServiceName\":{\"referenceName\":\"iecup\",\"parameters\":{\"krlnrpeylfiiul\":\"datads\",\"hcxwwwvun\":\"datadgiql\",\"atrtcqyfjvifbmo\":\"datansgvxhxrm\",\"bhukdfpknv\":\"datatehqyoytrcoufkq\"}},\"parameters\":{\"hzjlrknckkfxm\":{\"type\":\"Float\",\"defaultValue\":\"datajezchmeo\"},\"fts\":{\"type\":\"Float\",\"defaultValue\":\"dataqkwqphfv\"},\"zgfctuuzow\":{\"type\":\"Bool\",\"defaultValue\":\"datalpxcachdt\"}},\"annotations\":[\"datavuxnx\",\"datauohshzultdbvm\"],\"folder\":{\"name\":\"ypngocbd\"},\"\":{\"zsuzgrzu\":\"dataivptb\",\"aatvogpyceinha\":\"dataekytkzvtv\",\"khwfjudapbq\":\"datahbdxsbypl\"}}") .toObject(AzurePostgreSqlTableDataset.class); - Assertions.assertEquals("rd", model.description()); - Assertions.assertEquals("veyfbkqynlzxeme", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("vrkkfcwxizkstxne").type()); - Assertions.assertEquals("llfkchhgs", model.folder().name()); + Assertions.assertEquals("bdbrl", model.description()); + Assertions.assertEquals("iecup", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("hzjlrknckkfxm").type()); + Assertions.assertEquals("ypngocbd", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzurePostgreSqlTableDataset model = new AzurePostgreSqlTableDataset().withDescription("rd") - .withStructure("datantpfxxgjahyxfw").withSchema("datat") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("veyfbkqynlzxeme") - .withParameters(mapOf("ehdmvfoyrxxxff", "datajck", "cgcsapvbcqpf", "datamcuanszeerv", "vpyr", - "datasekijhminenkb", "fvvcwvurkmjufa", "datavorlfqmljewyn"))) - .withParameters(mapOf("vrkkfcwxizkstxne", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("databsotmynklnmrznm"), - "tc", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataipx"), - "tvsayyaeiiv", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataiuvnfaz"), - "xqetxtdqius", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataqtjwrvewojoq"))) - .withAnnotations(Arrays.asList("datazljvgjijzqjhljsa", "datamjsisfqqhc", "dataecagsbfeiirpn")) - .withFolder(new DatasetFolder().withName("llfkchhgs")).withTableName("datarllld").withTable("datanbdzwils") - .withSchemaTypePropertiesSchema("datayiqjz"); + AzurePostgreSqlTableDataset model = new AzurePostgreSqlTableDataset().withDescription("bdbrl") + .withStructure("datazlty") + .withSchema("dataacbibtk") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("iecup") + .withParameters(mapOf("krlnrpeylfiiul", "datads", "hcxwwwvun", "datadgiql", "atrtcqyfjvifbmo", + "datansgvxhxrm", "bhukdfpknv", "datatehqyoytrcoufkq"))) + .withParameters(mapOf("hzjlrknckkfxm", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datajezchmeo"), "fts", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataqkwqphfv"), + "zgfctuuzow", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datalpxcachdt"))) + .withAnnotations(Arrays.asList("datavuxnx", "datauohshzultdbvm")) + .withFolder(new DatasetFolder().withName("ypngocbd")) + .withTableName("dataxfpzcuudq") + .withTable("datadxjvvlyibweuaugt") + .withSchemaTypePropertiesSchema("datazncoqxtvytzqly"); model = BinaryData.fromObject(model).toObject(AzurePostgreSqlTableDataset.class); - Assertions.assertEquals("rd", model.description()); - Assertions.assertEquals("veyfbkqynlzxeme", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("vrkkfcwxizkstxne").type()); - Assertions.assertEquals("llfkchhgs", model.folder().name()); + Assertions.assertEquals("bdbrl", model.description()); + Assertions.assertEquals("iecup", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("hzjlrknckkfxm").type()); + Assertions.assertEquals("ypngocbd", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTypePropertiesTests.java index 9ae4d5e06a53b..2e1f906ca5c99 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTypePropertiesTests.java @@ -10,15 +10,17 @@ public final class AzurePostgreSqlTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - AzurePostgreSqlTableDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datazx\",\"table\":\"datads\",\"schema\":\"databfjilbuazccouhw\"}") - .toObject(AzurePostgreSqlTableDatasetTypeProperties.class); + AzurePostgreSqlTableDatasetTypeProperties model + = BinaryData.fromString("{\"tableName\":\"dataez\",\"table\":\"datagdpftfcb\",\"schema\":\"datasrdp\"}") + .toObject(AzurePostgreSqlTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzurePostgreSqlTableDatasetTypeProperties model = new AzurePostgreSqlTableDatasetTypeProperties() - .withTableName("datazx").withTable("datads").withSchema("databfjilbuazccouhw"); + AzurePostgreSqlTableDatasetTypeProperties model + = new AzurePostgreSqlTableDatasetTypeProperties().withTableName("dataez") + .withTable("datagdpftfcb") + .withSchema("datasrdp"); model = BinaryData.fromObject(model).toObject(AzurePostgreSqlTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureQueueSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureQueueSinkTests.java index 315be195f4c94..e47e1870657f5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureQueueSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureQueueSinkTests.java @@ -11,16 +11,18 @@ public final class AzureQueueSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureQueueSink model = BinaryData.fromString( - "{\"type\":\"AzureQueueSink\",\"writeBatchSize\":\"dataiottdawgkaohhtt\",\"writeBatchTimeout\":\"datahypidzjjjfcyskpn\",\"sinkRetryCount\":\"dataxoic\",\"sinkRetryWait\":\"datasmfvltbocqhv\",\"maxConcurrentConnections\":\"datam\",\"disableMetricsCollection\":\"datapvgri\",\"\":{\"fmfkuvybem\":\"datagrlgkoqbzrclarr\"}}") + "{\"type\":\"uxljiqyrwdmgr\",\"writeBatchSize\":\"datavewgvwmybokq\",\"writeBatchTimeout\":\"datahswbpjzoyzydlysz\",\"sinkRetryCount\":\"datapnwzpkisefy\",\"sinkRetryWait\":\"dataaumerkgmgqy\",\"maxConcurrentConnections\":\"datajqkasxiczv\",\"disableMetricsCollection\":\"dataoihcqxexbksa\",\"\":{\"gpszwvooxieyyww\":\"datacwyrtluujyesp\",\"fybktbviaqvzzszc\":\"dataiwiaqrc\",\"rxo\":\"dataw\",\"twwaxx\":\"datavygdefpy\"}}") .toObject(AzureQueueSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureQueueSink model = new AzureQueueSink().withWriteBatchSize("dataiottdawgkaohhtt") - .withWriteBatchTimeout("datahypidzjjjfcyskpn").withSinkRetryCount("dataxoic") - .withSinkRetryWait("datasmfvltbocqhv").withMaxConcurrentConnections("datam") - .withDisableMetricsCollection("datapvgri"); + AzureQueueSink model = new AzureQueueSink().withWriteBatchSize("datavewgvwmybokq") + .withWriteBatchTimeout("datahswbpjzoyzydlysz") + .withSinkRetryCount("datapnwzpkisefy") + .withSinkRetryWait("dataaumerkgmgqy") + .withMaxConcurrentConnections("datajqkasxiczv") + .withDisableMetricsCollection("dataoihcqxexbksa"); model = BinaryData.fromObject(model).toObject(AzureQueueSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTests.java index 5b420cf7de533..6b7dea4f849f8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTests.java @@ -19,34 +19,32 @@ public final class AzureSearchIndexDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureSearchIndexDataset model = BinaryData.fromString( - "{\"type\":\"AzureSearchIndex\",\"typeProperties\":{\"indexName\":\"datalivgti\"},\"description\":\"kqjqjcaj\",\"structure\":\"datafytkhhkemrv\",\"schema\":\"dataeoj\",\"linkedServiceName\":{\"referenceName\":\"dyulglhelwr\",\"parameters\":{\"gaex\":\"datafqfxspxgogypbz\",\"mb\":\"datanskvctvuz\",\"cyxrn\":\"datattmhlvr\",\"pdwzjggkwdep\":\"dataukfaj\"}},\"parameters\":{\"gtrttcuayiqylnk\":{\"type\":\"Float\",\"defaultValue\":\"datayfiqiidxcorjvudy\"},\"gqexowq\":{\"type\":\"Object\",\"defaultValue\":\"datazifb\"},\"kuobpw\":{\"type\":\"Array\",\"defaultValue\":\"datagqrqkkvfy\"},\"ewhpnyjt\":{\"type\":\"String\",\"defaultValue\":\"datapgobothx\"}},\"annotations\":[\"datazyvextchslro\",\"datadowuwhdlri\"],\"folder\":{\"name\":\"zttcbiich\"},\"\":{\"ycymzrlcfgdwzauz\":\"datadsozodwjcfqoyxry\",\"ilykqadfgesv\":\"datadheadnyciwz\",\"bri\":\"dataoha\",\"ljqovqmxqsxo\":\"datamadjrsbga\"}}") + "{\"type\":\"uiygtcyzcjefpub\",\"typeProperties\":{\"indexName\":\"datajyffwflbkjc\"},\"description\":\"djcgldry\",\"structure\":\"datar\",\"schema\":\"databhzirmx\",\"linkedServiceName\":{\"referenceName\":\"aujbfomfbozpj\",\"parameters\":{\"thpsnxebycymp\":\"datafppqcwdnnj\"}},\"parameters\":{\"sebcxno\":{\"type\":\"String\",\"defaultValue\":\"datannp\"},\"gspjlf\":{\"type\":\"Bool\",\"defaultValue\":\"datadyzssjlmykdygj\"}},\"annotations\":[\"datangwqxcrbcrgyoim\"],\"folder\":{\"name\":\"z\"},\"\":{\"kognhtvagwnn\":\"datat\"}}") .toObject(AzureSearchIndexDataset.class); - Assertions.assertEquals("kqjqjcaj", model.description()); - Assertions.assertEquals("dyulglhelwr", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("gtrttcuayiqylnk").type()); - Assertions.assertEquals("zttcbiich", model.folder().name()); + Assertions.assertEquals("djcgldry", model.description()); + Assertions.assertEquals("aujbfomfbozpj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("sebcxno").type()); + Assertions.assertEquals("z", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureSearchIndexDataset model = new AzureSearchIndexDataset().withDescription("kqjqjcaj") - .withStructure("datafytkhhkemrv").withSchema("dataeoj") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("dyulglhelwr") - .withParameters(mapOf("gaex", "datafqfxspxgogypbz", "mb", "datanskvctvuz", "cyxrn", "datattmhlvr", - "pdwzjggkwdep", "dataukfaj"))) - .withParameters(mapOf("gtrttcuayiqylnk", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datayfiqiidxcorjvudy"), - "gqexowq", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datazifb"), - "kuobpw", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datagqrqkkvfy"), - "ewhpnyjt", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datapgobothx"))) - .withAnnotations(Arrays.asList("datazyvextchslro", "datadowuwhdlri")) - .withFolder(new DatasetFolder().withName("zttcbiich")).withIndexName("datalivgti"); + AzureSearchIndexDataset model = new AzureSearchIndexDataset().withDescription("djcgldry") + .withStructure("datar") + .withSchema("databhzirmx") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("aujbfomfbozpj") + .withParameters(mapOf("thpsnxebycymp", "datafppqcwdnnj"))) + .withParameters(mapOf("sebcxno", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datannp"), "gspjlf", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datadyzssjlmykdygj"))) + .withAnnotations(Arrays.asList("datangwqxcrbcrgyoim")) + .withFolder(new DatasetFolder().withName("z")) + .withIndexName("datajyffwflbkjc"); model = BinaryData.fromObject(model).toObject(AzureSearchIndexDataset.class); - Assertions.assertEquals("kqjqjcaj", model.description()); - Assertions.assertEquals("dyulglhelwr", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("gtrttcuayiqylnk").type()); - Assertions.assertEquals("zttcbiich", model.folder().name()); + Assertions.assertEquals("djcgldry", model.description()); + Assertions.assertEquals("aujbfomfbozpj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("sebcxno").type()); + Assertions.assertEquals("z", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTypePropertiesTests.java index d247922da0d53..ed88b8afb249c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTypePropertiesTests.java @@ -10,14 +10,14 @@ public final class AzureSearchIndexDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - AzureSearchIndexDatasetTypeProperties model = BinaryData.fromString("{\"indexName\":\"dataxqnkiuokg\"}") + AzureSearchIndexDatasetTypeProperties model = BinaryData.fromString("{\"indexName\":\"datacnvpnyldjdkjv\"}") .toObject(AzureSearchIndexDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { AzureSearchIndexDatasetTypeProperties model - = new AzureSearchIndexDatasetTypeProperties().withIndexName("dataxqnkiuokg"); + = new AzureSearchIndexDatasetTypeProperties().withIndexName("datacnvpnyldjdkjv"); model = BinaryData.fromObject(model).toObject(AzureSearchIndexDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexSinkTests.java index 61ab3a7dd34e8..419e5f6b3f9e0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexSinkTests.java @@ -13,16 +13,19 @@ public final class AzureSearchIndexSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureSearchIndexSink model = BinaryData.fromString( - "{\"type\":\"AzureSearchIndexSink\",\"writeBehavior\":\"Merge\",\"writeBatchSize\":\"datayxauw\",\"writeBatchTimeout\":\"dataqofrkfccqjenz\",\"sinkRetryCount\":\"dataxazywijb\",\"sinkRetryWait\":\"dataae\",\"maxConcurrentConnections\":\"dataozbdwflxdwl\",\"disableMetricsCollection\":\"datataieledmiupddlet\",\"\":{\"ktsgcypjlmsta\":\"datadc\",\"k\":\"datagrebecxuuzeuklu\",\"kttxvmbedvvmr\":\"dataejamychwwrvvtj\"}}") + "{\"type\":\"ko\",\"writeBehavior\":\"Merge\",\"writeBatchSize\":\"dataquvf\",\"writeBatchTimeout\":\"dataaaozpcc\",\"sinkRetryCount\":\"datawfqtqbnakmgyd\",\"sinkRetryWait\":\"datakzgwf\",\"maxConcurrentConnections\":\"dataeqz\",\"disableMetricsCollection\":\"datae\",\"\":{\"qwfvxvoqbruyma\":\"datascrkkankjkszud\",\"fofxi\":\"dataj\",\"stuhlwzcn\":\"datajfmvydjax\",\"mnyfhkxcplhqzpw\":\"datavugb\"}}") .toObject(AzureSearchIndexSink.class); Assertions.assertEquals(AzureSearchIndexWriteBehaviorType.MERGE, model.writeBehavior()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureSearchIndexSink model = new AzureSearchIndexSink().withWriteBatchSize("datayxauw") - .withWriteBatchTimeout("dataqofrkfccqjenz").withSinkRetryCount("dataxazywijb").withSinkRetryWait("dataae") - .withMaxConcurrentConnections("dataozbdwflxdwl").withDisableMetricsCollection("datataieledmiupddlet") + AzureSearchIndexSink model = new AzureSearchIndexSink().withWriteBatchSize("dataquvf") + .withWriteBatchTimeout("dataaaozpcc") + .withSinkRetryCount("datawfqtqbnakmgyd") + .withSinkRetryWait("datakzgwf") + .withMaxConcurrentConnections("dataeqz") + .withDisableMetricsCollection("datae") .withWriteBehavior(AzureSearchIndexWriteBehaviorType.MERGE); model = BinaryData.fromObject(model).toObject(AzureSearchIndexSink.class); Assertions.assertEquals(AzureSearchIndexWriteBehaviorType.MERGE, model.writeBehavior()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTests.java index aa0fcd5d3bb1d..de68bfb0ca36c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTests.java @@ -19,29 +19,34 @@ public final class AzureSqlDWTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureSqlDWTableDataset model = BinaryData.fromString( - "{\"type\":\"AzureSqlDWTable\",\"typeProperties\":{\"tableName\":\"dataxjfsg\",\"schema\":\"dataspoebnx\",\"table\":\"datacowscuyfqlam\"},\"description\":\"qhsujkafuzp\",\"structure\":\"dataqpwnikxkcajgr\",\"schema\":\"datact\",\"linkedServiceName\":{\"referenceName\":\"vgoo\",\"parameters\":{\"tm\":\"dataazmzlpcx\",\"ic\":\"dataxxr\"}},\"parameters\":{\"hkvpyeyoa\":{\"type\":\"SecureString\",\"defaultValue\":\"datajd\"}},\"annotations\":[\"datampnqup\",\"datakjr\"],\"folder\":{\"name\":\"ky\"},\"\":{\"hqdcclcvqsr\":\"databdx\"}}") + "{\"type\":\"gwlckihbamyqs\",\"typeProperties\":{\"tableName\":\"datax\",\"schema\":\"datazsvmaigb\",\"table\":\"datavhyejthgeecbp\"},\"description\":\"knp\",\"structure\":\"datazjwdizcr\",\"schema\":\"datax\",\"linkedServiceName\":{\"referenceName\":\"ujzkcsexgkrs\",\"parameters\":{\"nwzoknvu\":\"dataykkbxktxbbwl\",\"a\":\"dataddlggb\",\"lkvggcmfnsffet\":\"datalgzubak\",\"gsmepnqvxgvoh\":\"datakmixwewzls\"}},\"parameters\":{\"gannvwxqhp\":{\"type\":\"String\",\"defaultValue\":\"dataxmoevvudeepf\"}},\"annotations\":[\"databohxvzgaybvrh\",\"dataog\"],\"folder\":{\"name\":\"xnwfmzvzt\"},\"\":{\"fcssanybzzghvd\":\"databpamq\",\"jsvcdhlyw\":\"dataeum\"}}") .toObject(AzureSqlDWTableDataset.class); - Assertions.assertEquals("qhsujkafuzp", model.description()); - Assertions.assertEquals("vgoo", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("hkvpyeyoa").type()); - Assertions.assertEquals("ky", model.folder().name()); + Assertions.assertEquals("knp", model.description()); + Assertions.assertEquals("ujzkcsexgkrs", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("gannvwxqhp").type()); + Assertions.assertEquals("xnwfmzvzt", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureSqlDWTableDataset model = new AzureSqlDWTableDataset().withDescription("qhsujkafuzp") - .withStructure("dataqpwnikxkcajgr").withSchema("datact") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vgoo") - .withParameters(mapOf("tm", "dataazmzlpcx", "ic", "dataxxr"))) - .withParameters(mapOf("hkvpyeyoa", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datajd"))) - .withAnnotations(Arrays.asList("datampnqup", "datakjr")).withFolder(new DatasetFolder().withName("ky")) - .withTableName("dataxjfsg").withSchemaTypePropertiesSchema("dataspoebnx").withTable("datacowscuyfqlam"); + AzureSqlDWTableDataset model = new AzureSqlDWTableDataset().withDescription("knp") + .withStructure("datazjwdizcr") + .withSchema("datax") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ujzkcsexgkrs") + .withParameters(mapOf("nwzoknvu", "dataykkbxktxbbwl", "a", "dataddlggb", "lkvggcmfnsffet", + "datalgzubak", "gsmepnqvxgvoh", "datakmixwewzls"))) + .withParameters(mapOf("gannvwxqhp", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataxmoevvudeepf"))) + .withAnnotations(Arrays.asList("databohxvzgaybvrh", "dataog")) + .withFolder(new DatasetFolder().withName("xnwfmzvzt")) + .withTableName("datax") + .withSchemaTypePropertiesSchema("datazsvmaigb") + .withTable("datavhyejthgeecbp"); model = BinaryData.fromObject(model).toObject(AzureSqlDWTableDataset.class); - Assertions.assertEquals("qhsujkafuzp", model.description()); - Assertions.assertEquals("vgoo", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("hkvpyeyoa").type()); - Assertions.assertEquals("ky", model.folder().name()); + Assertions.assertEquals("knp", model.description()); + Assertions.assertEquals("ujzkcsexgkrs", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("gannvwxqhp").type()); + Assertions.assertEquals("xnwfmzvzt", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTypePropertiesTests.java index f6199118a721e..127e58c2e9e55 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTypePropertiesTests.java @@ -11,14 +11,16 @@ public final class AzureSqlDWTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureSqlDWTableDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"dataay\",\"schema\":\"datavwbzmfxlrymf\",\"table\":\"datalpiywqnpfydrfbg\"}") + .fromString("{\"tableName\":\"dataookju\",\"schema\":\"dataxezriw\",\"table\":\"dataewapcqksaaa\"}") .toObject(AzureSqlDWTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureSqlDWTableDatasetTypeProperties model = new AzureSqlDWTableDatasetTypeProperties().withTableName("dataay") - .withSchema("datavwbzmfxlrymf").withTable("datalpiywqnpfydrfbg"); + AzureSqlDWTableDatasetTypeProperties model + = new AzureSqlDWTableDatasetTypeProperties().withTableName("dataookju") + .withSchema("dataxezriw") + .withTable("dataewapcqksaaa"); model = BinaryData.fromObject(model).toObject(AzureSqlDWTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTests.java index bb929d0a92e10..5397a8a596a51 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTests.java @@ -19,34 +19,33 @@ public final class AzureSqlMITableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureSqlMITableDataset model = BinaryData.fromString( - "{\"type\":\"AzureSqlMITable\",\"typeProperties\":{\"tableName\":\"dataczygpmgfjcu\",\"schema\":\"datajhhy\",\"table\":\"datadevfi\"},\"description\":\"motuzbybwjmtf\",\"structure\":\"datavelni\",\"schema\":\"datapk\",\"linkedServiceName\":{\"referenceName\":\"nstp\",\"parameters\":{\"vswmehfxrtt\":\"dataibjg\",\"ectcxsfmbzdx\":\"databmsennqfabqcama\"}},\"parameters\":{\"zyq\":{\"type\":\"Bool\",\"defaultValue\":\"datakdnnyufxuzms\"},\"ara\":{\"type\":\"Int\",\"defaultValue\":\"datanxhjtlxfikjk\"},\"zpcjcnwjzbqblxr\":{\"type\":\"Bool\",\"defaultValue\":\"datauasnjeglhtrxb\"},\"wsdsorg\":{\"type\":\"Float\",\"defaultValue\":\"datadsvoqiza\"}},\"annotations\":[\"dataxsawooauff\",\"dataxfqk\",\"datawzrdqyoybm\"],\"folder\":{\"name\":\"to\"},\"\":{\"rpqphkvyyzad\":\"datazdaiovrb\",\"yzvelffo\":\"datarxylaypd\"}}") + "{\"type\":\"fmsxjwdy\",\"typeProperties\":{\"tableName\":\"datamcreihu\",\"schema\":\"datarnig\",\"table\":\"datainuwqxungrob\"},\"description\":\"xmvzjow\",\"structure\":\"datageerclbl\",\"schema\":\"datahpwachyeu\",\"linkedServiceName\":{\"referenceName\":\"jwmvwryvdi\",\"parameters\":{\"txsytrtexegwmrq\":\"dataikgpruccwmec\"}},\"parameters\":{\"oqkajwjuriarsbc\":{\"type\":\"String\",\"defaultValue\":\"datahvycfjncindi\"}},\"annotations\":[\"datanhzcknjxizb\"],\"folder\":{\"name\":\"ygzkztxfexwacyy\"},\"\":{\"p\":\"datax\",\"b\":\"datandzkfevuiiui\"}}") .toObject(AzureSqlMITableDataset.class); - Assertions.assertEquals("motuzbybwjmtf", model.description()); - Assertions.assertEquals("nstp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zyq").type()); - Assertions.assertEquals("to", model.folder().name()); + Assertions.assertEquals("xmvzjow", model.description()); + Assertions.assertEquals("jwmvwryvdi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("oqkajwjuriarsbc").type()); + Assertions.assertEquals("ygzkztxfexwacyy", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureSqlMITableDataset model = new AzureSqlMITableDataset().withDescription("motuzbybwjmtf") - .withStructure("datavelni").withSchema("datapk") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("nstp") - .withParameters(mapOf("vswmehfxrtt", "dataibjg", "ectcxsfmbzdx", "databmsennqfabqcama"))) - .withParameters(mapOf("zyq", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datakdnnyufxuzms"), "ara", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datanxhjtlxfikjk"), - "zpcjcnwjzbqblxr", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datauasnjeglhtrxb"), - "wsdsorg", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datadsvoqiza"))) - .withAnnotations(Arrays.asList("dataxsawooauff", "dataxfqk", "datawzrdqyoybm")) - .withFolder(new DatasetFolder().withName("to")).withTableName("dataczygpmgfjcu") - .withSchemaTypePropertiesSchema("datajhhy").withTable("datadevfi"); + AzureSqlMITableDataset model = new AzureSqlMITableDataset().withDescription("xmvzjow") + .withStructure("datageerclbl") + .withSchema("datahpwachyeu") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("jwmvwryvdi") + .withParameters(mapOf("txsytrtexegwmrq", "dataikgpruccwmec"))) + .withParameters(mapOf("oqkajwjuriarsbc", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datahvycfjncindi"))) + .withAnnotations(Arrays.asList("datanhzcknjxizb")) + .withFolder(new DatasetFolder().withName("ygzkztxfexwacyy")) + .withTableName("datamcreihu") + .withSchemaTypePropertiesSchema("datarnig") + .withTable("datainuwqxungrob"); model = BinaryData.fromObject(model).toObject(AzureSqlMITableDataset.class); - Assertions.assertEquals("motuzbybwjmtf", model.description()); - Assertions.assertEquals("nstp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zyq").type()); - Assertions.assertEquals("to", model.folder().name()); + Assertions.assertEquals("xmvzjow", model.description()); + Assertions.assertEquals("jwmvwryvdi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("oqkajwjuriarsbc").type()); + Assertions.assertEquals("ygzkztxfexwacyy", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTypePropertiesTests.java index 2e8018d3ed869..c0c3b9ffad296 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTypePropertiesTests.java @@ -11,14 +11,15 @@ public final class AzureSqlMITableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureSqlMITableDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datariwhjdfrwp\",\"schema\":\"datah\",\"table\":\"datankcclpctuog\"}") + .fromString("{\"tableName\":\"datacjy\",\"schema\":\"datadcizeqqfop\",\"table\":\"dataopmotdsf\"}") .toObject(AzureSqlMITableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureSqlMITableDatasetTypeProperties model = new AzureSqlMITableDatasetTypeProperties() - .withTableName("datariwhjdfrwp").withSchema("datah").withTable("datankcclpctuog"); + AzureSqlMITableDatasetTypeProperties model = new AzureSqlMITableDatasetTypeProperties().withTableName("datacjy") + .withSchema("datadcizeqqfop") + .withTable("dataopmotdsf"); model = BinaryData.fromObject(model).toObject(AzureSqlMITableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlSourceTests.java index 9709155118c11..525495f82f809 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlSourceTests.java @@ -12,20 +12,27 @@ public final class AzureSqlSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureSqlSource model = BinaryData.fromString( - "{\"type\":\"AzureSqlSource\",\"sqlReaderQuery\":\"datak\",\"sqlReaderStoredProcedureName\":\"datayu\",\"storedProcedureParameters\":\"dataemrclsxgpkyetmt\",\"isolationLevel\":\"dataihixisdvy\",\"produceAdditionalTypes\":\"datakeqg\",\"partitionOption\":\"datajsbtosiwcve\",\"partitionSettings\":{\"partitionColumnName\":\"datahbw\",\"partitionUpperBound\":\"dataoc\",\"partitionLowerBound\":\"datazlfhhwdajfth\"},\"queryTimeout\":\"datawuomjd\",\"additionalColumns\":\"dataufqhq\",\"sourceRetryCount\":\"dataio\",\"sourceRetryWait\":\"datahxxbneiobubtpyem\",\"maxConcurrentConnections\":\"datawkryzrdqpkqfuv\",\"disableMetricsCollection\":\"datai\",\"\":{\"jgmfyernckg\":\"databhmy\",\"kafiokeolzizfbun\":\"datawiq\"}}") + "{\"type\":\"hcyno\",\"sqlReaderQuery\":\"dataf\",\"sqlReaderStoredProcedureName\":\"dataks\",\"storedProcedureParameters\":\"dataehrajbatgmxkolt\",\"isolationLevel\":\"dataqjcmkpxbc\",\"produceAdditionalTypes\":\"datarfkwc\",\"partitionOption\":\"datamyowddhtwaxob\",\"partitionSettings\":{\"partitionColumnName\":\"datatqoc\",\"partitionUpperBound\":\"datadjp\",\"partitionLowerBound\":\"datac\"},\"queryTimeout\":\"datahocyvymvnla\",\"additionalColumns\":\"dataitxoibfo\",\"sourceRetryCount\":\"datahbvbqxtk\",\"sourceRetryWait\":\"dataeuapomoofbnbhp\",\"maxConcurrentConnections\":\"dataaljcqp\",\"disableMetricsCollection\":\"datamathiydmkyvsxc\",\"\":{\"mfmkpjoe\":\"dataghajpddgfoz\",\"drrruy\":\"dataozcuhunmfzbmwp\",\"bivgmckxhmxz\":\"datanoiumuxn\",\"biojncgjo\":\"datampoiutaatv\"}}") .toObject(AzureSqlSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureSqlSource model = new AzureSqlSource().withSourceRetryCount("dataio") - .withSourceRetryWait("datahxxbneiobubtpyem").withMaxConcurrentConnections("datawkryzrdqpkqfuv") - .withDisableMetricsCollection("datai").withQueryTimeout("datawuomjd").withAdditionalColumns("dataufqhq") - .withSqlReaderQuery("datak").withSqlReaderStoredProcedureName("datayu") - .withStoredProcedureParameters("dataemrclsxgpkyetmt").withIsolationLevel("dataihixisdvy") - .withProduceAdditionalTypes("datakeqg").withPartitionOption("datajsbtosiwcve") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datahbw") - .withPartitionUpperBound("dataoc").withPartitionLowerBound("datazlfhhwdajfth")); + AzureSqlSource model = new AzureSqlSource().withSourceRetryCount("datahbvbqxtk") + .withSourceRetryWait("dataeuapomoofbnbhp") + .withMaxConcurrentConnections("dataaljcqp") + .withDisableMetricsCollection("datamathiydmkyvsxc") + .withQueryTimeout("datahocyvymvnla") + .withAdditionalColumns("dataitxoibfo") + .withSqlReaderQuery("dataf") + .withSqlReaderStoredProcedureName("dataks") + .withStoredProcedureParameters("dataehrajbatgmxkolt") + .withIsolationLevel("dataqjcmkpxbc") + .withProduceAdditionalTypes("datarfkwc") + .withPartitionOption("datamyowddhtwaxob") + .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datatqoc") + .withPartitionUpperBound("datadjp") + .withPartitionLowerBound("datac")); model = BinaryData.fromObject(model).toObject(AzureSqlSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTests.java index 103d2c33d8e40..2f499e517422e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTests.java @@ -19,35 +19,34 @@ public final class AzureSqlTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureSqlTableDataset model = BinaryData.fromString( - "{\"type\":\"AzureSqlTable\",\"typeProperties\":{\"tableName\":\"databs\",\"schema\":\"datalonbzaowcahdkmb\",\"table\":\"datamihrijezbfsj\"},\"description\":\"czglkvbgukbsvb\",\"structure\":\"dataotygnbknhjg\",\"schema\":\"dataxaxw\",\"linkedServiceName\":{\"referenceName\":\"ffaspsdzkucsz\",\"parameters\":{\"zrn\":\"dataoaqipmnxclfrs\",\"wvpu\":\"datau\",\"n\":\"datafddtbfmekjcng\",\"aoy\":\"datadv\"}},\"parameters\":{\"nofxlttxoqx\":{\"type\":\"SecureString\",\"defaultValue\":\"datayxzmx\"},\"kcjhmmofbnivd\":{\"type\":\"Float\",\"defaultValue\":\"datazujsjirkrp\"},\"caccptbzetxyg\":{\"type\":\"SecureString\",\"defaultValue\":\"dataykpaxnlsfgny\"},\"eoxmpzzw\":{\"type\":\"Int\",\"defaultValue\":\"dataceecvjwyu\"}},\"annotations\":[\"datardvhaztkxbi\",\"datazfgxmbry\"],\"folder\":{\"name\":\"ibio\"},\"\":{\"wdrtxtfdaglmrco\":\"datasykqfd\",\"hubymfp\":\"datazzertkounzsiy\"}}") + "{\"type\":\"ihwpadhedbfobd\",\"typeProperties\":{\"tableName\":\"datacfddofxnfb\",\"schema\":\"datayyrqaedwov\",\"table\":\"dataytjgoeayokrw\"},\"description\":\"vothmkhjaoz\",\"structure\":\"datawfcn\",\"schema\":\"databpoelhscmyhrhjv\",\"linkedServiceName\":{\"referenceName\":\"zfq\",\"parameters\":{\"joevzzuf\":\"datandwpppqw\",\"krbuoggtdltlcuha\":\"datatdxmlynzlyvap\"}},\"parameters\":{\"tqkrvmhvbvvcpwtq\":{\"type\":\"String\",\"defaultValue\":\"dataaylzeohlps\"},\"ffetevrnt\":{\"type\":\"Array\",\"defaultValue\":\"datapnhmzy\"}},\"annotations\":[\"datawacycsyotctkhf\",\"datafsatvcsx\"],\"folder\":{\"name\":\"nmizhv\"},\"\":{\"obzgott\":\"dataqqwcublehhkp\",\"h\":\"dataksadzi\"}}") .toObject(AzureSqlTableDataset.class); - Assertions.assertEquals("czglkvbgukbsvb", model.description()); - Assertions.assertEquals("ffaspsdzkucsz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("nofxlttxoqx").type()); - Assertions.assertEquals("ibio", model.folder().name()); + Assertions.assertEquals("vothmkhjaoz", model.description()); + Assertions.assertEquals("zfq", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("tqkrvmhvbvvcpwtq").type()); + Assertions.assertEquals("nmizhv", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureSqlTableDataset model = new AzureSqlTableDataset().withDescription("czglkvbgukbsvb") - .withStructure("dataotygnbknhjg").withSchema("dataxaxw") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ffaspsdzkucsz").withParameters( - mapOf("zrn", "dataoaqipmnxclfrs", "wvpu", "datau", "n", "datafddtbfmekjcng", "aoy", "datadv"))) - .withParameters(mapOf("nofxlttxoqx", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datayxzmx"), - "kcjhmmofbnivd", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datazujsjirkrp"), - "caccptbzetxyg", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataykpaxnlsfgny"), - "eoxmpzzw", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataceecvjwyu"))) - .withAnnotations(Arrays.asList("datardvhaztkxbi", "datazfgxmbry")) - .withFolder(new DatasetFolder().withName("ibio")).withTableName("databs") - .withSchemaTypePropertiesSchema("datalonbzaowcahdkmb").withTable("datamihrijezbfsj"); + AzureSqlTableDataset model = new AzureSqlTableDataset().withDescription("vothmkhjaoz") + .withStructure("datawfcn") + .withSchema("databpoelhscmyhrhjv") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zfq") + .withParameters(mapOf("joevzzuf", "datandwpppqw", "krbuoggtdltlcuha", "datatdxmlynzlyvap"))) + .withParameters(mapOf("tqkrvmhvbvvcpwtq", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataaylzeohlps"), + "ffetevrnt", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datapnhmzy"))) + .withAnnotations(Arrays.asList("datawacycsyotctkhf", "datafsatvcsx")) + .withFolder(new DatasetFolder().withName("nmizhv")) + .withTableName("datacfddofxnfb") + .withSchemaTypePropertiesSchema("datayyrqaedwov") + .withTable("dataytjgoeayokrw"); model = BinaryData.fromObject(model).toObject(AzureSqlTableDataset.class); - Assertions.assertEquals("czglkvbgukbsvb", model.description()); - Assertions.assertEquals("ffaspsdzkucsz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("nofxlttxoqx").type()); - Assertions.assertEquals("ibio", model.folder().name()); + Assertions.assertEquals("vothmkhjaoz", model.description()); + Assertions.assertEquals("zfq", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("tqkrvmhvbvvcpwtq").type()); + Assertions.assertEquals("nmizhv", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTypePropertiesTests.java index 01510dd2350b9..d4d78af047313 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTypePropertiesTests.java @@ -10,15 +10,17 @@ public final class AzureSqlTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - AzureSqlTableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"dataikze\",\"schema\":\"datannf\",\"table\":\"datatkqowsd\"}") - .toObject(AzureSqlTableDatasetTypeProperties.class); + AzureSqlTableDatasetTypeProperties model = BinaryData + .fromString( + "{\"tableName\":\"datatbtdvu\",\"schema\":\"datavvraabeurdeewlsu\",\"table\":\"datacbwkdwjyj\"}") + .toObject(AzureSqlTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureSqlTableDatasetTypeProperties model = new AzureSqlTableDatasetTypeProperties().withTableName("dataikze") - .withSchema("datannf").withTable("datatkqowsd"); + AzureSqlTableDatasetTypeProperties model = new AzureSqlTableDatasetTypeProperties().withTableName("datatbtdvu") + .withSchema("datavvraabeurdeewlsu") + .withTable("datacbwkdwjyj"); model = BinaryData.fromObject(model).toObject(AzureSqlTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTests.java index f36e10a03f63b..28c414630b355 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTests.java @@ -18,31 +18,33 @@ public final class AzureSynapseArtifactsLinkedServiceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureSynapseArtifactsLinkedService model = BinaryData.fromString( - "{\"type\":\"AzureSynapseArtifacts\",\"typeProperties\":{\"endpoint\":\"datahflnlutacsslel\",\"authentication\":\"dataukqurrtcf\",\"workspaceResourceId\":\"datat\"},\"connectVia\":{\"referenceName\":\"yrsleghozsmjj\",\"parameters\":{\"fsxvqqu\":\"datamozryyyvlxmspjq\",\"putizpvvihgxsd\":\"datazoblxxk\",\"lnarjtuoarjir\":\"datalodfodokhaogewd\"}},\"description\":\"cuyqt\",\"parameters\":{\"nvpzdbzpizgau\":{\"type\":\"Object\",\"defaultValue\":\"datajgmgfayvbsia\"},\"xorrceom\":{\"type\":\"String\",\"defaultValue\":\"datauwfy\"},\"n\":{\"type\":\"Float\",\"defaultValue\":\"datarbtrk\"},\"hg\":{\"type\":\"Object\",\"defaultValue\":\"dataijpjiudnustbmox\"}},\"annotations\":[\"datahbhue\"],\"\":{\"frhyxlwq\":\"datarlxzqzj\"}}") + "{\"type\":\"efekfxmgjywwidnr\",\"typeProperties\":{\"endpoint\":\"datagfpqxse\",\"authentication\":\"datazkpdmmowftfrqeb\",\"workspaceResourceId\":\"dataop\"},\"connectVia\":{\"referenceName\":\"dkvhui\",\"parameters\":{\"adokuqnkoskflnj\":\"datahodisypgapfdwhwb\",\"rjee\":\"datay\"}},\"description\":\"bhemrhbzetsspw\",\"parameters\":{\"e\":{\"type\":\"Int\",\"defaultValue\":\"datatb\"},\"cscootfsgilwis\":{\"type\":\"Bool\",\"defaultValue\":\"datarlieeocyarvsfz\"},\"ogknocshmpcjqt\":{\"type\":\"SecureString\",\"defaultValue\":\"datapzitustrtrf\"}},\"annotations\":[\"datairbrvz\",\"datafjqpxydpamctzmwr\",\"dataccdgunsjssre\",\"datajsgkouenpgkxyr\"],\"\":{\"hyekggo\":\"datab\"}}") .toObject(AzureSynapseArtifactsLinkedService.class); - Assertions.assertEquals("yrsleghozsmjj", model.connectVia().referenceName()); - Assertions.assertEquals("cuyqt", model.description()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("nvpzdbzpizgau").type()); + Assertions.assertEquals("dkvhui", model.connectVia().referenceName()); + Assertions.assertEquals("bhemrhbzetsspw", model.description()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("e").type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { AzureSynapseArtifactsLinkedService model = new AzureSynapseArtifactsLinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("yrsleghozsmjj") - .withParameters(mapOf("fsxvqqu", "datamozryyyvlxmspjq", "putizpvvihgxsd", "datazoblxxk", - "lnarjtuoarjir", "datalodfodokhaogewd"))) - .withDescription("cuyqt") - .withParameters(mapOf("nvpzdbzpizgau", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datajgmgfayvbsia"), - "xorrceom", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datauwfy"), - "n", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datarbtrk"), "hg", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataijpjiudnustbmox"))) - .withAnnotations(Arrays.asList("datahbhue")).withEndpoint("datahflnlutacsslel") - .withAuthentication("dataukqurrtcf").withWorkspaceResourceId("datat"); + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("dkvhui") + .withParameters(mapOf("adokuqnkoskflnj", "datahodisypgapfdwhwb", "rjee", "datay"))) + .withDescription("bhemrhbzetsspw") + .withParameters(mapOf("e", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datatb"), "cscootfsgilwis", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datarlieeocyarvsfz"), + "ogknocshmpcjqt", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datapzitustrtrf"))) + .withAnnotations( + Arrays.asList("datairbrvz", "datafjqpxydpamctzmwr", "dataccdgunsjssre", "datajsgkouenpgkxyr")) + .withEndpoint("datagfpqxse") + .withAuthentication("datazkpdmmowftfrqeb") + .withWorkspaceResourceId("dataop"); model = BinaryData.fromObject(model).toObject(AzureSynapseArtifactsLinkedService.class); - Assertions.assertEquals("yrsleghozsmjj", model.connectVia().referenceName()); - Assertions.assertEquals("cuyqt", model.description()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("nvpzdbzpizgau").type()); + Assertions.assertEquals("dkvhui", model.connectVia().referenceName()); + Assertions.assertEquals("bhemrhbzetsspw", model.description()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("e").type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTypePropertiesTests.java index 92882ca81c56f..05812a2fc93a8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTypePropertiesTests.java @@ -10,15 +10,18 @@ public final class AzureSynapseArtifactsLinkedServiceTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - AzureSynapseArtifactsLinkedServiceTypeProperties model = BinaryData.fromString( - "{\"endpoint\":\"dataousqmer\",\"authentication\":\"datajpl\",\"workspaceResourceId\":\"dataemkyouwmjxhm\"}") + AzureSynapseArtifactsLinkedServiceTypeProperties model = BinaryData + .fromString( + "{\"endpoint\":\"dataw\",\"authentication\":\"datavu\",\"workspaceResourceId\":\"dataqmwqsgy\"}") .toObject(AzureSynapseArtifactsLinkedServiceTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureSynapseArtifactsLinkedServiceTypeProperties model = new AzureSynapseArtifactsLinkedServiceTypeProperties() - .withEndpoint("dataousqmer").withAuthentication("datajpl").withWorkspaceResourceId("dataemkyouwmjxhm"); + AzureSynapseArtifactsLinkedServiceTypeProperties model + = new AzureSynapseArtifactsLinkedServiceTypeProperties().withEndpoint("dataw") + .withAuthentication("datavu") + .withWorkspaceResourceId("dataqmwqsgy"); model = BinaryData.fromObject(model).toObject(AzureSynapseArtifactsLinkedServiceTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTests.java index 64345d059b56c..218df2a0d6852 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTests.java @@ -19,30 +19,31 @@ public final class AzureTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureTableDataset model = BinaryData.fromString( - "{\"type\":\"AzureTable\",\"typeProperties\":{\"tableName\":\"dataexvtemasp\"},\"description\":\"nydscdkx\",\"structure\":\"datapwjc\",\"schema\":\"dataaahntofelfh\",\"linkedServiceName\":{\"referenceName\":\"fixoskk\",\"parameters\":{\"ujybsrwz\":\"dataiv\",\"t\":\"datamr\",\"ikesmkwtzgfr\":\"datadhmfppinm\"}},\"parameters\":{\"btqhvmmniiqyhol\":{\"type\":\"String\",\"defaultValue\":\"dataerxlobk\"},\"nq\":{\"type\":\"String\",\"defaultValue\":\"dataskbggi\"}},\"annotations\":[\"datatmwpblxk\"],\"folder\":{\"name\":\"gvxrktjcjigc\"},\"\":{\"efpgeedyyb\":\"datapanbqxasevc\"}}") + "{\"type\":\"v\",\"typeProperties\":{\"tableName\":\"datarbvgwxhlxr\"},\"description\":\"rwynb\",\"structure\":\"datavazoymdvh\",\"schema\":\"datalkhw\",\"linkedServiceName\":{\"referenceName\":\"dkatveqmgk\",\"parameters\":{\"buklvsmfasg\":\"datazeyxryearmhp\"}},\"parameters\":{\"s\":{\"type\":\"String\",\"defaultValue\":\"datapoil\"}},\"annotations\":[\"datamcezevftmhllpok\",\"datayghz\",\"datams\"],\"folder\":{\"name\":\"tpcflcezsw\"},\"\":{\"jtvbfp\":\"dataift\",\"jnqswxd\":\"datahruptsyq\",\"ohclqddnhfkneb\":\"datawumxqukrcd\",\"yzcwy\":\"dataeddp\"}}") .toObject(AzureTableDataset.class); - Assertions.assertEquals("nydscdkx", model.description()); - Assertions.assertEquals("fixoskk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("btqhvmmniiqyhol").type()); - Assertions.assertEquals("gvxrktjcjigc", model.folder().name()); + Assertions.assertEquals("rwynb", model.description()); + Assertions.assertEquals("dkatveqmgk", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("s").type()); + Assertions.assertEquals("tpcflcezsw", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureTableDataset model = new AzureTableDataset().withDescription("nydscdkx").withStructure("datapwjc") - .withSchema("dataaahntofelfh") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("fixoskk") - .withParameters(mapOf("ujybsrwz", "dataiv", "t", "datamr", "ikesmkwtzgfr", "datadhmfppinm"))) - .withParameters(mapOf("btqhvmmniiqyhol", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataerxlobk"), "nq", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataskbggi"))) - .withAnnotations(Arrays.asList("datatmwpblxk")).withFolder(new DatasetFolder().withName("gvxrktjcjigc")) - .withTableName("dataexvtemasp"); + AzureTableDataset model = new AzureTableDataset().withDescription("rwynb") + .withStructure("datavazoymdvh") + .withSchema("datalkhw") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("dkatveqmgk") + .withParameters(mapOf("buklvsmfasg", "datazeyxryearmhp"))) + .withParameters( + mapOf("s", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datapoil"))) + .withAnnotations(Arrays.asList("datamcezevftmhllpok", "datayghz", "datams")) + .withFolder(new DatasetFolder().withName("tpcflcezsw")) + .withTableName("datarbvgwxhlxr"); model = BinaryData.fromObject(model).toObject(AzureTableDataset.class); - Assertions.assertEquals("nydscdkx", model.description()); - Assertions.assertEquals("fixoskk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("btqhvmmniiqyhol").type()); - Assertions.assertEquals("gvxrktjcjigc", model.folder().name()); + Assertions.assertEquals("rwynb", model.description()); + Assertions.assertEquals("dkatveqmgk", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("s").type()); + Assertions.assertEquals("tpcflcezsw", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTypePropertiesTests.java index c5663f916ec26..ee4f97ac889d5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTypePropertiesTests.java @@ -10,14 +10,13 @@ public final class AzureTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - AzureTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datauholaemwcgimmri\"}") + AzureTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datasmkaqldqab\"}") .toObject(AzureTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureTableDatasetTypeProperties model - = new AzureTableDatasetTypeProperties().withTableName("datauholaemwcgimmri"); + AzureTableDatasetTypeProperties model = new AzureTableDatasetTypeProperties().withTableName("datasmkaqldqab"); model = BinaryData.fromObject(model).toObject(AzureTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableSourceTests.java index b9995d8dad020..535af087ef4ef 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableSourceTests.java @@ -11,17 +11,20 @@ public final class AzureTableSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { AzureTableSource model = BinaryData.fromString( - "{\"type\":\"AzureTableSource\",\"azureTableSourceQuery\":\"dataeomufazacwzb\",\"azureTableSourceIgnoreTableNotFound\":\"datapbsspexejhwpnjco\",\"queryTimeout\":\"datajzbovuvmdz\",\"additionalColumns\":\"datatirguajs\",\"sourceRetryCount\":\"dataecbowkhm\",\"sourceRetryWait\":\"datafllpdn\",\"maxConcurrentConnections\":\"dataujxoueqljz\",\"disableMetricsCollection\":\"datancaeykueatztnp\",\"\":{\"gsqwiubgbltjy\":\"datahlniahvl\",\"uwyluktz\":\"datasjs\"}}") + "{\"type\":\"bii\",\"azureTableSourceQuery\":\"dataqeply\",\"azureTableSourceIgnoreTableNotFound\":\"dataad\",\"queryTimeout\":\"datajvyintgkve\",\"additionalColumns\":\"dataeldnmb\",\"sourceRetryCount\":\"databkxiujaagfeiwuux\",\"sourceRetryWait\":\"datamzmsivqeg\",\"maxConcurrentConnections\":\"datafzbrha\",\"disableMetricsCollection\":\"dataptkr\",\"\":{\"yxyoyjasqdhbftt\":\"datapziievcttszca\"}}") .toObject(AzureTableSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - AzureTableSource model = new AzureTableSource().withSourceRetryCount("dataecbowkhm") - .withSourceRetryWait("datafllpdn").withMaxConcurrentConnections("dataujxoueqljz") - .withDisableMetricsCollection("datancaeykueatztnp").withQueryTimeout("datajzbovuvmdz") - .withAdditionalColumns("datatirguajs").withAzureTableSourceQuery("dataeomufazacwzb") - .withAzureTableSourceIgnoreTableNotFound("datapbsspexejhwpnjco"); + AzureTableSource model = new AzureTableSource().withSourceRetryCount("databkxiujaagfeiwuux") + .withSourceRetryWait("datamzmsivqeg") + .withMaxConcurrentConnections("datafzbrha") + .withDisableMetricsCollection("dataptkr") + .withQueryTimeout("datajvyintgkve") + .withAdditionalColumns("dataeldnmb") + .withAzureTableSourceQuery("dataqeply") + .withAzureTableSourceIgnoreTableNotFound("dataad"); model = BinaryData.fromObject(model).toObject(AzureTableSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BigDataPoolParametrizationReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BigDataPoolParametrizationReferenceTests.java index 0378d08709e66..2244b71ffb218 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BigDataPoolParametrizationReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BigDataPoolParametrizationReferenceTests.java @@ -13,15 +13,16 @@ public final class BigDataPoolParametrizationReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { BigDataPoolParametrizationReference model - = BinaryData.fromString("{\"type\":\"BigDataPoolReference\",\"referenceName\":\"datacuiipnszrrmq\"}") + = BinaryData.fromString("{\"type\":\"BigDataPoolReference\",\"referenceName\":\"datauycnayhodtugrwp\"}") .toObject(BigDataPoolParametrizationReference.class); Assertions.assertEquals(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE, model.type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - BigDataPoolParametrizationReference model = new BigDataPoolParametrizationReference() - .withType(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE).withReferenceName("datacuiipnszrrmq"); + BigDataPoolParametrizationReference model + = new BigDataPoolParametrizationReference().withType(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE) + .withReferenceName("datauycnayhodtugrwp"); model = BinaryData.fromObject(model).toObject(BigDataPoolParametrizationReference.class); Assertions.assertEquals(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE, model.type()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTests.java index 01fbaac5b8327..47435d323ac13 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTests.java @@ -21,34 +21,37 @@ public final class BinaryDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { BinaryDataset model = BinaryData.fromString( - "{\"type\":\"Binary\",\"typeProperties\":{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datagvp\",\"fileName\":\"datargjjktfinfhoksmm\",\"\":{\"icruo\":\"datawk\",\"qpqsdoctpzpuj\":\"datakrjflsgaojb\"}},\"compression\":{\"type\":\"datalmcds\",\"level\":\"dataceluj\",\"\":{\"wlfxzfw\":\"datalluunxh\",\"kgrvfpsjd\":\"datageupcknece\",\"qpdzf\":\"datangsy\",\"awoxcgzbe\":\"dataxcn\"}}},\"description\":\"fbifopfjxdwdrp\",\"structure\":\"dataq\",\"schema\":\"datarfm\",\"linkedServiceName\":{\"referenceName\":\"wotfcuu\",\"parameters\":{\"nzmec\":\"datajigpgayiawohfmh\",\"gloiovsl\":\"datajkmqenhaidzrpv\",\"pijpkhc\":\"dataivqsuvwtenb\",\"xukuicjuftekio\":\"dataoa\"}},\"parameters\":{\"zubfjzabbwz\":{\"type\":\"Bool\",\"defaultValue\":\"dataewfhvpxjh\"}},\"annotations\":[\"datauaixcdckix\",\"dataps\",\"dataigavk\",\"datavyxzer\"],\"folder\":{\"name\":\"kpzjbyetjxryopt\"},\"\":{\"bpemnrrabovr\":\"datatwhlbecgih\",\"pskpeswyhhmif\":\"datawxywpjhspboxhif\",\"y\":\"datauajxwwvcmmpeg\"}}") + "{\"type\":\"zkucszghdoaqipmn\",\"typeProperties\":{\"location\":{\"type\":\"qyho\",\"folderPath\":\"datajn\",\"fileName\":\"databggicnqwlctmw\",\"\":{\"jigcwtspanb\":\"dataxkrkqgvxrktj\",\"yy\":\"dataxasevchefpgee\"}},\"compression\":{\"type\":\"datauholaemwcgimmri\",\"level\":\"dataabsqqlon\",\"\":{\"ahdkmbjsmihrij\":\"dataow\",\"bgukbsvbwyoty\":\"datazbfsjwfczglk\",\"nhjgclxaxwcffasp\":\"datanb\"}}},\"description\":\"lfrsbzrnmuv\",\"structure\":\"datapu\",\"schema\":\"dataddtbfmekjcnginxd\",\"linkedServiceName\":{\"referenceName\":\"maoyqxfv\",\"parameters\":{\"xoqxtdnzujsj\":\"datazmxynofxlt\",\"ivd\":\"datarkrpskcjhmmofb\",\"accptbzetxy\":\"datatkykpaxnlsfgnys\"}},\"parameters\":{\"pzzwahdrdvhaztk\":{\"type\":\"Int\",\"defaultValue\":\"dataeecvjwyuveox\"}},\"annotations\":[\"datavz\"],\"folder\":{\"name\":\"mbrygmw\"},\"\":{\"fdaglmrcokzzert\":\"dataosiqsykqfdqwdrtx\",\"bym\":\"dataounzsiywh\"}}") .toObject(BinaryDataset.class); - Assertions.assertEquals("fbifopfjxdwdrp", model.description()); - Assertions.assertEquals("wotfcuu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zubfjzabbwz").type()); - Assertions.assertEquals("kpzjbyetjxryopt", model.folder().name()); + Assertions.assertEquals("lfrsbzrnmuv", model.description()); + Assertions.assertEquals("maoyqxfv", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("pzzwahdrdvhaztk").type()); + Assertions.assertEquals("mbrygmw", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - BinaryDataset model - = new BinaryDataset().withDescription("fbifopfjxdwdrp").withStructure("dataq").withSchema("datarfm") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("wotfcuu") - .withParameters(mapOf("nzmec", "datajigpgayiawohfmh", "gloiovsl", "datajkmqenhaidzrpv", "pijpkhc", - "dataivqsuvwtenb", "xukuicjuftekio", "dataoa"))) - .withParameters(mapOf("zubfjzabbwz", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataewfhvpxjh"))) - .withAnnotations(Arrays.asList("datauaixcdckix", "dataps", "dataigavk", "datavyxzer")) - .withFolder(new DatasetFolder().withName("kpzjbyetjxryopt")) - .withLocation(new DatasetLocation().withFolderPath("datagvp").withFileName("datargjjktfinfhoksmm") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withCompression(new DatasetCompression().withType("datalmcds").withLevel("dataceluj") - .withAdditionalProperties(mapOf())); + BinaryDataset model = new BinaryDataset().withDescription("lfrsbzrnmuv") + .withStructure("datapu") + .withSchema("dataddtbfmekjcnginxd") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("maoyqxfv") + .withParameters(mapOf("xoqxtdnzujsj", "datazmxynofxlt", "ivd", "datarkrpskcjhmmofb", "accptbzetxy", + "datatkykpaxnlsfgnys"))) + .withParameters(mapOf("pzzwahdrdvhaztk", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataeecvjwyuveox"))) + .withAnnotations(Arrays.asList("datavz")) + .withFolder(new DatasetFolder().withName("mbrygmw")) + .withLocation(new DatasetLocation().withFolderPath("datajn") + .withFileName("databggicnqwlctmw") + .withAdditionalProperties(mapOf("type", "qyho"))) + .withCompression(new DatasetCompression().withType("datauholaemwcgimmri") + .withLevel("dataabsqqlon") + .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(BinaryDataset.class); - Assertions.assertEquals("fbifopfjxdwdrp", model.description()); - Assertions.assertEquals("wotfcuu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zubfjzabbwz").type()); - Assertions.assertEquals("kpzjbyetjxryopt", model.folder().name()); + Assertions.assertEquals("lfrsbzrnmuv", model.description()); + Assertions.assertEquals("maoyqxfv", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("pzzwahdrdvhaztk").type()); + Assertions.assertEquals("mbrygmw", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTypePropertiesTests.java index 5b50ce3758d03..976b48ec99511 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTypePropertiesTests.java @@ -15,16 +15,18 @@ public final class BinaryDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { BinaryDatasetTypeProperties model = BinaryData.fromString( - "{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datavcmlaexbzbquf\",\"fileName\":\"dataezs\",\"\":{\"ime\":\"dataymldrorhyogzm\",\"m\":\"datatcuuwdhtqqhyhn\",\"wnwngh\":\"datatnsugisno\",\"cjixxf\":\"datajovkeyym\"}},\"compression\":{\"type\":\"datapcrtn\",\"level\":\"datauefxxijtebdveywe\",\"\":{\"wepf\":\"datahlolmc\",\"bv\":\"datasv\",\"l\":\"dataqdljnpe\",\"detawevxehue\":\"datan\"}}}") + "{\"location\":{\"type\":\"opikzebqnnfywtkq\",\"folderPath\":\"datasdlkhcz\",\"fileName\":\"datapmgfjculojhhylx\",\"\":{\"bwjmtftcveln\":\"datafiyymotuzb\",\"j\":\"datarupkqnstpaqpi\"}},\"compression\":{\"type\":\"datavswmehfxrtt\",\"level\":\"datamsennqfabqcama\",\"\":{\"kdnnyufxuzms\":\"datatcxsfmbzdxmsyn\",\"brnxhjtlxfikj\":\"datazyq\"}}}") .toObject(BinaryDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { BinaryDatasetTypeProperties model = new BinaryDatasetTypeProperties() - .withLocation(new DatasetLocation().withFolderPath("datavcmlaexbzbquf").withFileName("dataezs") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withCompression(new DatasetCompression().withType("datapcrtn").withLevel("datauefxxijtebdveywe") + .withLocation(new DatasetLocation().withFolderPath("datasdlkhcz") + .withFileName("datapmgfjculojhhylx") + .withAdditionalProperties(mapOf("type", "opikzebqnnfywtkq"))) + .withCompression(new DatasetCompression().withType("datavswmehfxrtt") + .withLevel("datamsennqfabqcama") .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(BinaryDatasetTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryReadSettingsTests.java index 3628aa6f6f094..02ff7c19cf845 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryReadSettingsTests.java @@ -14,14 +14,14 @@ public final class BinaryReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { BinaryReadSettings model = BinaryData.fromString( - "{\"type\":\"BinaryReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"wigif\":\"datazpuz\"}},\"\":{\"tlpshxjhanskooal\":\"datays\"}}") + "{\"type\":\"mxdrgimsioff\",\"compressionProperties\":{\"type\":\"zluilzgpghjakzmn\",\"\":{\"pbxvpfyupgo\":\"dataqmajslwmj\",\"frkzgtxwyqkk\":\"datarwpoxuykqyoyjptk\"}},\"\":{\"vfundkhdmyxmsbt\":\"dataonl\"}}") .toObject(BinaryReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { BinaryReadSettings model = new BinaryReadSettings().withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings"))); + new CompressionReadSettings().withAdditionalProperties(mapOf("type", "zluilzgpghjakzmn"))); model = BinaryData.fromObject(model).toObject(BinaryReadSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySinkTests.java index 082cd74dd47a6..7eb3cd2c6b018 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySinkTests.java @@ -16,19 +16,25 @@ public final class BinarySinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { BinarySink model = BinaryData.fromString( - "{\"type\":\"BinarySink\",\"storeSettings\":{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"dataoogijiqwxw\",\"disableMetricsCollection\":\"databdhqnprb\",\"copyBehavior\":\"datauhdjzivl\",\"metadata\":[{\"name\":\"datamq\",\"value\":\"datambfpt\"}],\"\":{\"zulmqxficinwij\":\"datamksxxbdtjvvngnvk\",\"er\":\"dataeyxt\",\"ytten\":\"datattobosjxb\",\"lyimhspj\":\"datauditumyycvtya\"}},\"writeBatchSize\":\"dataivxblqvwhjgt\",\"writeBatchTimeout\":\"datareoutqohp\",\"sinkRetryCount\":\"dataqi\",\"sinkRetryWait\":\"databougcwzgdfdrdx\",\"maxConcurrentConnections\":\"datatkgezul\",\"disableMetricsCollection\":\"datatp\",\"\":{\"eoy\":\"dataejxjhlxoljbp\",\"havwhrivvzrc\":\"datayk\",\"eearbbxaneviqk\":\"datayfrxlsypwu\"}}") + "{\"type\":\"jyfbutqlotojfvba\",\"storeSettings\":{\"type\":\"islcfxs\",\"maxConcurrentConnections\":\"datadiqemcghorrj\",\"disableMetricsCollection\":\"datafczb\",\"copyBehavior\":\"datarmvhtmzwg\",\"metadata\":[{\"name\":\"datanzpybrflqv\",\"value\":\"datalqwpmmmhupv\"},{\"name\":\"datahpsugebgboq\",\"value\":\"dataiiiwu\"},{\"name\":\"dataofgfqge\",\"value\":\"dataypxm\"}],\"\":{\"fdsogl\":\"datajonasjdaxe\",\"duwncaifwogq\":\"dataih\",\"fachkzzn\":\"datadxtp\",\"oxhzcmgm\":\"datakmmsfnig\"}},\"writeBatchSize\":\"datawjgjlo\",\"writeBatchTimeout\":\"datahhkxlquupbihui\",\"sinkRetryCount\":\"datay\",\"sinkRetryWait\":\"dataqweptejryvvuktc\",\"maxConcurrentConnections\":\"datatp\",\"disableMetricsCollection\":\"datawjbmrkcqpyxjj\",\"\":{\"lyvqycknqmb\":\"dataovhuif\",\"i\":\"datassjbyneusnncnn\"}}") .toObject(BinarySink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - BinarySink model = new BinarySink().withWriteBatchSize("dataivxblqvwhjgt") - .withWriteBatchTimeout("datareoutqohp").withSinkRetryCount("dataqi").withSinkRetryWait("databougcwzgdfdrdx") - .withMaxConcurrentConnections("datatkgezul").withDisableMetricsCollection("datatp") - .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("dataoogijiqwxw") - .withDisableMetricsCollection("databdhqnprb").withCopyBehavior("datauhdjzivl") - .withMetadata(Arrays.asList(new MetadataItem().withName("datamq").withValue("datambfpt"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings"))); + BinarySink model = new BinarySink().withWriteBatchSize("datawjgjlo") + .withWriteBatchTimeout("datahhkxlquupbihui") + .withSinkRetryCount("datay") + .withSinkRetryWait("dataqweptejryvvuktc") + .withMaxConcurrentConnections("datatp") + .withDisableMetricsCollection("datawjbmrkcqpyxjj") + .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("datadiqemcghorrj") + .withDisableMetricsCollection("datafczb") + .withCopyBehavior("datarmvhtmzwg") + .withMetadata(Arrays.asList(new MetadataItem().withName("datanzpybrflqv").withValue("datalqwpmmmhupv"), + new MetadataItem().withName("datahpsugebgboq").withValue("dataiiiwu"), + new MetadataItem().withName("dataofgfqge").withValue("dataypxm"))) + .withAdditionalProperties(mapOf("type", "islcfxs"))); model = BinaryData.fromObject(model).toObject(BinarySink.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySourceTests.java index 7820561f1cb07..89a35c111fed4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySourceTests.java @@ -16,20 +16,21 @@ public final class BinarySourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { BinarySource model = BinaryData.fromString( - "{\"type\":\"BinarySource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"dataliteenaheecsf\",\"disableMetricsCollection\":\"datasubzfuhjnmdcyrbz\",\"\":{\"dxtwyxpkwwdkkvd\":\"dataqgvt\",\"iluexvml\":\"datavdvkeyqxjchdnlx\",\"lbqkguchd\":\"dataxqpsqpfxjwt\"}},\"formatSettings\":{\"type\":\"BinaryReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"gddkujvqzcuqculw\":\"datadjikiqtzu\",\"tjcrp\":\"dataxryy\"}},\"\":{\"dldm\":\"dataxlfxsetvdz\",\"iuxotbvflgkkiu\":\"datafqftywbbanzhd\",\"fobpyeo\":\"datahop\"}},\"sourceRetryCount\":\"datataevqafdhpkiiu\",\"sourceRetryWait\":\"datarobckelo\",\"maxConcurrentConnections\":\"datamrvdtqhrtnqssqy\",\"disableMetricsCollection\":\"datapskitokphamefzz\",\"\":{\"fgq\":\"dataoauedmdpndouylf\",\"mzassr\":\"datanaokxouknz\",\"btxtdmutdrrqq\":\"dataqzuk\",\"twqjft\":\"datajhklttliuwd\"}}") + "{\"type\":\"pyavcbmzembv\",\"storeSettings\":{\"type\":\"vhf\",\"maxConcurrentConnections\":\"dataccffsbz\",\"disableMetricsCollection\":\"datatfxq\",\"\":{\"pzvqzml\":\"dataxoudjcttavbc\"}},\"formatSettings\":{\"type\":\"nodrfcl\",\"compressionProperties\":{\"type\":\"qttfqgdoowgqooip\",\"\":{\"etwfre\":\"datavsnedhkjiwf\",\"zdcwqalxpmiyt\":\"datavflrrtjuuik\",\"pvlsljutawg\":\"datajiscisolkw\"}},\"\":{\"eyjlyxdux\":\"dataopipvpe\"}},\"sourceRetryCount\":\"datarqx\",\"sourceRetryWait\":\"dataazyxmum\",\"maxConcurrentConnections\":\"datakaxzrycvac\",\"disableMetricsCollection\":\"datazjysyphxeoqm\",\"\":{\"sbbawrb\":\"dataikceiyuv\",\"rtnk\":\"dataooxvprqlxqhqgip\"}}") .toObject(BinarySource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - BinarySource model - = new BinarySource().withSourceRetryCount("datataevqafdhpkiiu").withSourceRetryWait("datarobckelo") - .withMaxConcurrentConnections("datamrvdtqhrtnqssqy").withDisableMetricsCollection("datapskitokphamefzz") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataliteenaheecsf") - .withDisableMetricsCollection("datasubzfuhjnmdcyrbz") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withFormatSettings(new BinaryReadSettings().withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings")))); + BinarySource model = new BinarySource().withSourceRetryCount("datarqx") + .withSourceRetryWait("dataazyxmum") + .withMaxConcurrentConnections("datakaxzrycvac") + .withDisableMetricsCollection("datazjysyphxeoqm") + .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataccffsbz") + .withDisableMetricsCollection("datatfxq") + .withAdditionalProperties(mapOf("type", "vhf"))) + .withFormatSettings(new BinaryReadSettings().withCompressionProperties( + new CompressionReadSettings().withAdditionalProperties(mapOf("type", "qttfqgdoowgqooip")))); model = BinaryData.fromObject(model).toObject(BinarySource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTests.java index 2bc6c29b1fd0b..087b5a6e83849 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTests.java @@ -5,8 +5,8 @@ package com.azure.resourcemanager.datafactory.generated; import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.BlobEventTypes; import com.azure.resourcemanager.datafactory.models.BlobEventsTrigger; +import com.azure.resourcemanager.datafactory.models.BlobEventTypes; import com.azure.resourcemanager.datafactory.models.PipelineReference; import com.azure.resourcemanager.datafactory.models.TriggerPipelineReference; import java.util.Arrays; @@ -18,38 +18,53 @@ public final class BlobEventsTriggerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { BlobEventsTrigger model = BinaryData.fromString( - "{\"type\":\"BlobEventsTrigger\",\"typeProperties\":{\"blobPathBeginsWith\":\"vugarykbyuukhssr\",\"blobPathEndsWith\":\"ugorcz\",\"ignoreEmptyBlobs\":false,\"events\":[\"Microsoft.Storage.BlobDeleted\",\"Microsoft.Storage.BlobCreated\"],\"scope\":\"tdwktogmcbl\"},\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"nisinpkcww\",\"name\":\"kabj\"},\"parameters\":{\"iaqmuptnhuybt\":\"datahngaczgg\",\"hyozxotwral\":\"datatok\"}}],\"description\":\"jzlnrellwf\",\"runtimeState\":\"Started\",\"annotations\":[\"datalsarfmjschc\",\"dataudromhhs\",\"datamzfvrakpqlltoiu\",\"dataveoibehrh\"],\"\":{\"dkwwuljv\":\"datajxilbsbhaqsucwdc\",\"hnh\":\"dataujgsxrsxbofmva\"}}") + "{\"type\":\"ifv\",\"typeProperties\":{\"blobPathBeginsWith\":\"pswasveym\",\"blobPathEndsWith\":\"bmffcryyykwwhscu\",\"ignoreEmptyBlobs\":false,\"events\":[\"Microsoft.Storage.BlobDeleted\",\"Microsoft.Storage.BlobDeleted\",\"Microsoft.Storage.BlobCreated\",\"Microsoft.Storage.BlobDeleted\"],\"scope\":\"lzbzcgzhdrvkz\"},\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"qrvz\",\"name\":\"beiqopjzzglgxvqd\"},\"parameters\":{\"wbpbi\":\"datazkzkhbiee\",\"t\":\"datateprq\"}},{\"pipelineReference\":{\"referenceName\":\"wapmtyfgswp\",\"name\":\"nvxtvmbwydqo\"},\"parameters\":{\"i\":\"datayjebgveuazwkze\"}},{\"pipelineReference\":{\"referenceName\":\"drrgzguupw\",\"name\":\"ohz\"},\"parameters\":{\"dlxqjshyyrcr\":\"databsncorini\"}},{\"pipelineReference\":{\"referenceName\":\"z\",\"name\":\"faurmqpk\"},\"parameters\":{\"dnpeamslvpxsy\":\"databltfxhmrhhxli\"}}],\"description\":\"dyaauls\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datac\"],\"\":{\"h\":\"databxgxgr\",\"ndvaf\":\"dataabhkyaspccwiev\",\"yxlcgycvcspcfx\":\"datacvn\",\"ioqtafmbxtn\":\"dataal\"}}") .toObject(BlobEventsTrigger.class); - Assertions.assertEquals("jzlnrellwf", model.description()); - Assertions.assertEquals("nisinpkcww", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("kabj", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals("vugarykbyuukhssr", model.blobPathBeginsWith()); - Assertions.assertEquals("ugorcz", model.blobPathEndsWith()); + Assertions.assertEquals("dyaauls", model.description()); + Assertions.assertEquals("qrvz", model.pipelines().get(0).pipelineReference().referenceName()); + Assertions.assertEquals("beiqopjzzglgxvqd", model.pipelines().get(0).pipelineReference().name()); + Assertions.assertEquals("pswasveym", model.blobPathBeginsWith()); + Assertions.assertEquals("bmffcryyykwwhscu", model.blobPathEndsWith()); Assertions.assertEquals(false, model.ignoreEmptyBlobs()); Assertions.assertEquals(BlobEventTypes.MICROSOFT_STORAGE_BLOB_DELETED, model.events().get(0)); - Assertions.assertEquals("tdwktogmcbl", model.scope()); + Assertions.assertEquals("lzbzcgzhdrvkz", model.scope()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - BlobEventsTrigger model = new BlobEventsTrigger().withDescription("jzlnrellwf") - .withAnnotations(Arrays.asList("datalsarfmjschc", "dataudromhhs", "datamzfvrakpqlltoiu", "dataveoibehrh")) - .withPipelines(Arrays.asList(new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("nisinpkcww").withName("kabj")) - .withParameters(mapOf("iaqmuptnhuybt", "datahngaczgg", "hyozxotwral", "datatok")))) - .withBlobPathBeginsWith("vugarykbyuukhssr").withBlobPathEndsWith("ugorcz").withIgnoreEmptyBlobs(false) + BlobEventsTrigger model = new BlobEventsTrigger().withDescription("dyaauls") + .withAnnotations(Arrays.asList("datac")) + .withPipelines(Arrays.asList( + new TriggerPipelineReference() + .withPipelineReference( + new PipelineReference().withReferenceName("qrvz").withName("beiqopjzzglgxvqd")) + .withParameters(mapOf("wbpbi", "datazkzkhbiee", "t", "datateprq")), + new TriggerPipelineReference() + .withPipelineReference( + new PipelineReference().withReferenceName("wapmtyfgswp").withName("nvxtvmbwydqo")) + .withParameters(mapOf("i", "datayjebgveuazwkze")), + new TriggerPipelineReference() + .withPipelineReference(new PipelineReference().withReferenceName("drrgzguupw").withName("ohz")) + .withParameters(mapOf("dlxqjshyyrcr", "databsncorini")), + new TriggerPipelineReference() + .withPipelineReference(new PipelineReference().withReferenceName("z").withName("faurmqpk")) + .withParameters(mapOf("dnpeamslvpxsy", "databltfxhmrhhxli")))) + .withBlobPathBeginsWith("pswasveym") + .withBlobPathEndsWith("bmffcryyykwwhscu") + .withIgnoreEmptyBlobs(false) .withEvents(Arrays.asList(BlobEventTypes.MICROSOFT_STORAGE_BLOB_DELETED, - BlobEventTypes.MICROSOFT_STORAGE_BLOB_CREATED)) - .withScope("tdwktogmcbl"); + BlobEventTypes.MICROSOFT_STORAGE_BLOB_DELETED, BlobEventTypes.MICROSOFT_STORAGE_BLOB_CREATED, + BlobEventTypes.MICROSOFT_STORAGE_BLOB_DELETED)) + .withScope("lzbzcgzhdrvkz"); model = BinaryData.fromObject(model).toObject(BlobEventsTrigger.class); - Assertions.assertEquals("jzlnrellwf", model.description()); - Assertions.assertEquals("nisinpkcww", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("kabj", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals("vugarykbyuukhssr", model.blobPathBeginsWith()); - Assertions.assertEquals("ugorcz", model.blobPathEndsWith()); + Assertions.assertEquals("dyaauls", model.description()); + Assertions.assertEquals("qrvz", model.pipelines().get(0).pipelineReference().referenceName()); + Assertions.assertEquals("beiqopjzzglgxvqd", model.pipelines().get(0).pipelineReference().name()); + Assertions.assertEquals("pswasveym", model.blobPathBeginsWith()); + Assertions.assertEquals("bmffcryyykwwhscu", model.blobPathEndsWith()); Assertions.assertEquals(false, model.ignoreEmptyBlobs()); Assertions.assertEquals(BlobEventTypes.MICROSOFT_STORAGE_BLOB_DELETED, model.events().get(0)); - Assertions.assertEquals("tdwktogmcbl", model.scope()); + Assertions.assertEquals("lzbzcgzhdrvkz", model.scope()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTypePropertiesTests.java index fb6b5d6e31f86..c298af6d7e719 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTypePropertiesTests.java @@ -14,25 +14,28 @@ public final class BlobEventsTriggerTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { BlobEventsTriggerTypeProperties model = BinaryData.fromString( - "{\"blobPathBeginsWith\":\"swlfuuki\",\"blobPathEndsWith\":\"laytviwvejwt\",\"ignoreEmptyBlobs\":true,\"events\":[\"Microsoft.Storage.BlobCreated\"],\"scope\":\"ps\"}") + "{\"blobPathBeginsWith\":\"byurutsabu\",\"blobPathEndsWith\":\"uweqbeygnetuvs\",\"ignoreEmptyBlobs\":false,\"events\":[\"Microsoft.Storage.BlobDeleted\"],\"scope\":\"umdznbl\"}") .toObject(BlobEventsTriggerTypeProperties.class); - Assertions.assertEquals("swlfuuki", model.blobPathBeginsWith()); - Assertions.assertEquals("laytviwvejwt", model.blobPathEndsWith()); - Assertions.assertEquals(true, model.ignoreEmptyBlobs()); - Assertions.assertEquals(BlobEventTypes.MICROSOFT_STORAGE_BLOB_CREATED, model.events().get(0)); - Assertions.assertEquals("ps", model.scope()); + Assertions.assertEquals("byurutsabu", model.blobPathBeginsWith()); + Assertions.assertEquals("uweqbeygnetuvs", model.blobPathEndsWith()); + Assertions.assertEquals(false, model.ignoreEmptyBlobs()); + Assertions.assertEquals(BlobEventTypes.MICROSOFT_STORAGE_BLOB_DELETED, model.events().get(0)); + Assertions.assertEquals("umdznbl", model.scope()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - BlobEventsTriggerTypeProperties model = new BlobEventsTriggerTypeProperties().withBlobPathBeginsWith("swlfuuki") - .withBlobPathEndsWith("laytviwvejwt").withIgnoreEmptyBlobs(true) - .withEvents(Arrays.asList(BlobEventTypes.MICROSOFT_STORAGE_BLOB_CREATED)).withScope("ps"); + BlobEventsTriggerTypeProperties model + = new BlobEventsTriggerTypeProperties().withBlobPathBeginsWith("byurutsabu") + .withBlobPathEndsWith("uweqbeygnetuvs") + .withIgnoreEmptyBlobs(false) + .withEvents(Arrays.asList(BlobEventTypes.MICROSOFT_STORAGE_BLOB_DELETED)) + .withScope("umdznbl"); model = BinaryData.fromObject(model).toObject(BlobEventsTriggerTypeProperties.class); - Assertions.assertEquals("swlfuuki", model.blobPathBeginsWith()); - Assertions.assertEquals("laytviwvejwt", model.blobPathEndsWith()); - Assertions.assertEquals(true, model.ignoreEmptyBlobs()); - Assertions.assertEquals(BlobEventTypes.MICROSOFT_STORAGE_BLOB_CREATED, model.events().get(0)); - Assertions.assertEquals("ps", model.scope()); + Assertions.assertEquals("byurutsabu", model.blobPathBeginsWith()); + Assertions.assertEquals("uweqbeygnetuvs", model.blobPathEndsWith()); + Assertions.assertEquals(false, model.ignoreEmptyBlobs()); + Assertions.assertEquals(BlobEventTypes.MICROSOFT_STORAGE_BLOB_DELETED, model.events().get(0)); + Assertions.assertEquals("umdznbl", model.scope()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSinkTests.java index 3acd6a5a58d36..0e0b017c02453 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSinkTests.java @@ -13,20 +13,23 @@ public final class BlobSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { BlobSink model = BinaryData.fromString( - "{\"type\":\"BlobSink\",\"blobWriterOverwriteFiles\":\"datapvidzhjcppqcgbp\",\"blobWriterDateTimeFormat\":\"datani\",\"blobWriterAddHeader\":\"datahd\",\"copyBehavior\":\"datau\",\"metadata\":[{\"name\":\"datalharjbakp\",\"value\":\"datauug\"},{\"name\":\"datagdulvdenh\",\"value\":\"datavaimkoyrp\"},{\"name\":\"datanljduwkbozlmr\",\"value\":\"dataghvlvdjxbjqiab\"}],\"writeBatchSize\":\"dataevvowiypyljzkx\",\"writeBatchTimeout\":\"datafyv\",\"sinkRetryCount\":\"dataftaq\",\"sinkRetryWait\":\"datacrjl\",\"maxConcurrentConnections\":\"dataneykxewemtazmrej\",\"disableMetricsCollection\":\"dataxoqege\",\"\":{\"rt\":\"datalfnuglmyr\",\"zwgrs\":\"datakpo\"}}") + "{\"type\":\"lae\",\"blobWriterOverwriteFiles\":\"datadu\",\"blobWriterDateTimeFormat\":\"datapl\",\"blobWriterAddHeader\":\"dataupfndafrziwjcy\",\"copyBehavior\":\"dataaneaveyvdru\",\"metadata\":[{\"name\":\"datarhzesqdvmxuf\",\"value\":\"datapawwjvdohzew\"}],\"writeBatchSize\":\"dataftlsfwpv\",\"writeBatchTimeout\":\"datamxjd\",\"sinkRetryCount\":\"datatxbrj\",\"sinkRetryWait\":\"dataeypuqwdm\",\"maxConcurrentConnections\":\"datamfvxfssho\",\"disableMetricsCollection\":\"datadeornzprdg\",\"\":{\"umyuiquzfotf\":\"datatqgzdfjfnreemp\"}}") .toObject(BlobSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - BlobSink model = new BlobSink().withWriteBatchSize("dataevvowiypyljzkx").withWriteBatchTimeout("datafyv") - .withSinkRetryCount("dataftaq").withSinkRetryWait("datacrjl") - .withMaxConcurrentConnections("dataneykxewemtazmrej").withDisableMetricsCollection("dataxoqege") - .withBlobWriterOverwriteFiles("datapvidzhjcppqcgbp").withBlobWriterDateTimeFormat("datani") - .withBlobWriterAddHeader("datahd").withCopyBehavior("datau") - .withMetadata(Arrays.asList(new MetadataItem().withName("datalharjbakp").withValue("datauug"), - new MetadataItem().withName("datagdulvdenh").withValue("datavaimkoyrp"), - new MetadataItem().withName("datanljduwkbozlmr").withValue("dataghvlvdjxbjqiab"))); + BlobSink model = new BlobSink().withWriteBatchSize("dataftlsfwpv") + .withWriteBatchTimeout("datamxjd") + .withSinkRetryCount("datatxbrj") + .withSinkRetryWait("dataeypuqwdm") + .withMaxConcurrentConnections("datamfvxfssho") + .withDisableMetricsCollection("datadeornzprdg") + .withBlobWriterOverwriteFiles("datadu") + .withBlobWriterDateTimeFormat("datapl") + .withBlobWriterAddHeader("dataupfndafrziwjcy") + .withCopyBehavior("dataaneaveyvdru") + .withMetadata(Arrays.asList(new MetadataItem().withName("datarhzesqdvmxuf").withValue("datapawwjvdohzew"))); model = BinaryData.fromObject(model).toObject(BlobSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSourceTests.java index 1817b5f6b7840..476f21934fd8b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSourceTests.java @@ -11,15 +11,19 @@ public final class BlobSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { BlobSource model = BinaryData.fromString( - "{\"type\":\"BlobSource\",\"treatEmptyAsNull\":\"datax\",\"skipHeaderLineCount\":\"dataa\",\"recursive\":\"dataheguxrziryxrpjr\",\"sourceRetryCount\":\"datamxqvv\",\"sourceRetryWait\":\"dataofts\",\"maxConcurrentConnections\":\"datafwusfbrnjvzl\",\"disableMetricsCollection\":\"datavjemp\",\"\":{\"tbieuqfgkfift\":\"dataslwz\",\"wwutduch\":\"databupuukp\",\"c\":\"datafndijzp\"}}") + "{\"type\":\"bjledjxblobknfpd\",\"treatEmptyAsNull\":\"dataacsfbmb\",\"skipHeaderLineCount\":\"dataefqku\",\"recursive\":\"datayumoamqxwluslxyt\",\"sourceRetryCount\":\"datahzgj\",\"sourceRetryWait\":\"dataomctbgoccypxsrh\",\"maxConcurrentConnections\":\"datalbnuflfzawk\",\"disableMetricsCollection\":\"datae\",\"\":{\"jnhwwy\":\"datapyoqfbjpclboioj\"}}") .toObject(BlobSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - BlobSource model = new BlobSource().withSourceRetryCount("datamxqvv").withSourceRetryWait("dataofts") - .withMaxConcurrentConnections("datafwusfbrnjvzl").withDisableMetricsCollection("datavjemp") - .withTreatEmptyAsNull("datax").withSkipHeaderLineCount("dataa").withRecursive("dataheguxrziryxrpjr"); + BlobSource model = new BlobSource().withSourceRetryCount("datahzgj") + .withSourceRetryWait("dataomctbgoccypxsrh") + .withMaxConcurrentConnections("datalbnuflfzawk") + .withDisableMetricsCollection("datae") + .withTreatEmptyAsNull("dataacsfbmb") + .withSkipHeaderLineCount("dataefqku") + .withRecursive("datayumoamqxwluslxyt"); model = BinaryData.fromObject(model).toObject(BlobSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTests.java index f3f0453194f07..261babaa6f476 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTests.java @@ -18,41 +18,38 @@ public final class BlobTriggerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { BlobTrigger model = BinaryData.fromString( - "{\"type\":\"BlobTrigger\",\"typeProperties\":{\"folderPath\":\"vbgwxpuwti\",\"maxConcurrency\":1919804309,\"linkedService\":{\"referenceName\":\"ekmzubd\",\"parameters\":{\"w\":\"dataf\",\"ffaqolnfeywbpenq\":\"datalzhujcx\",\"fjdghmnycxmctnjy\":\"datazpzwwsfrpbw\"}}},\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"pulwcxmxf\",\"name\":\"kmjeekbmw\"},\"parameters\":{\"wiybleaota\":\"datashtmxyrsn\"}},{\"pipelineReference\":{\"referenceName\":\"cy\",\"name\":\"xosnbwbcnf\"},\"parameters\":{\"ehdccgh\":\"datandbwwnlgaoub\"}}],\"description\":\"qvw\",\"runtimeState\":\"Stopped\",\"annotations\":[\"dataotqsp\",\"datanhlsf\"],\"\":{\"huvuadpdjovwbhei\":\"datawajsbqytfvjvm\",\"jvuwaqiomdlp\":\"datamw\",\"wowmwrn\":\"datakfslm\"}}") + "{\"type\":\"bsegcogy\",\"typeProperties\":{\"folderPath\":\"hxjwiggca\",\"maxConcurrency\":1782911484,\"linkedService\":{\"referenceName\":\"koxpaytzqgsaeg\",\"parameters\":{\"kvoskjixbrd\":\"datawgerdxhgrgiumwk\",\"nmzaih\":\"datamdnebkonfl\"}}},\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"hfohspv\",\"name\":\"jmt\"},\"parameters\":{\"jsakigrl\":\"datauecmdmcory\",\"qrmqefd\":\"dataigl\",\"ek\":\"datauwsfebhvkkpd\"}},{\"pipelineReference\":{\"referenceName\":\"io\",\"name\":\"vijrdyotejljdr\"},\"parameters\":{\"xyrr\":\"datajw\",\"bddcxfuizokzmeg\":\"databeuf\",\"iljsidvuptret\":\"datajsneybpqotfd\",\"tpbi\":\"dataauupwt\"}}],\"description\":\"ekaajuwkxbg\",\"runtimeState\":\"Disabled\",\"annotations\":[\"databjsyorsojvztqra\",\"dataqcmouxs\",\"datakxa\"],\"\":{\"gzqkkwz\":\"datay\",\"wwopssdws\":\"datag\",\"s\":\"datakrzcnl\",\"vhgkmx\":\"datamikbtzt\"}}") .toObject(BlobTrigger.class); - Assertions.assertEquals("qvw", model.description()); - Assertions.assertEquals("pulwcxmxf", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("kmjeekbmw", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals("vbgwxpuwti", model.folderPath()); - Assertions.assertEquals(1919804309, model.maxConcurrency()); - Assertions.assertEquals("ekmzubd", model.linkedService().referenceName()); + Assertions.assertEquals("ekaajuwkxbg", model.description()); + Assertions.assertEquals("hfohspv", model.pipelines().get(0).pipelineReference().referenceName()); + Assertions.assertEquals("jmt", model.pipelines().get(0).pipelineReference().name()); + Assertions.assertEquals("hxjwiggca", model.folderPath()); + Assertions.assertEquals(1782911484, model.maxConcurrency()); + Assertions.assertEquals("koxpaytzqgsaeg", model.linkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - BlobTrigger model - = new BlobTrigger() - .withDescription( - "qvw") - .withAnnotations(Arrays.asList("dataotqsp", "datanhlsf")) - .withPipelines(Arrays.asList( - new TriggerPipelineReference() - .withPipelineReference( - new PipelineReference().withReferenceName("pulwcxmxf").withName("kmjeekbmw")) - .withParameters(mapOf("wiybleaota", "datashtmxyrsn")), - new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("cy").withName("xosnbwbcnf")) - .withParameters(mapOf("ehdccgh", "datandbwwnlgaoub")))) - .withFolderPath("vbgwxpuwti").withMaxConcurrency(1919804309) - .withLinkedService(new LinkedServiceReference().withReferenceName("ekmzubd").withParameters( - mapOf("w", "dataf", "ffaqolnfeywbpenq", "datalzhujcx", "fjdghmnycxmctnjy", "datazpzwwsfrpbw"))); + BlobTrigger model = new BlobTrigger().withDescription("ekaajuwkxbg") + .withAnnotations(Arrays.asList("databjsyorsojvztqra", "dataqcmouxs", "datakxa")) + .withPipelines(Arrays.asList(new TriggerPipelineReference() + .withPipelineReference(new PipelineReference().withReferenceName("hfohspv").withName("jmt")) + .withParameters(mapOf("jsakigrl", "datauecmdmcory", "qrmqefd", "dataigl", "ek", "datauwsfebhvkkpd")), + new TriggerPipelineReference() + .withPipelineReference(new PipelineReference().withReferenceName("io").withName("vijrdyotejljdr")) + .withParameters(mapOf("xyrr", "datajw", "bddcxfuizokzmeg", "databeuf", "iljsidvuptret", + "datajsneybpqotfd", "tpbi", "dataauupwt")))) + .withFolderPath("hxjwiggca") + .withMaxConcurrency(1782911484) + .withLinkedService(new LinkedServiceReference().withReferenceName("koxpaytzqgsaeg") + .withParameters(mapOf("kvoskjixbrd", "datawgerdxhgrgiumwk", "nmzaih", "datamdnebkonfl"))); model = BinaryData.fromObject(model).toObject(BlobTrigger.class); - Assertions.assertEquals("qvw", model.description()); - Assertions.assertEquals("pulwcxmxf", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("kmjeekbmw", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals("vbgwxpuwti", model.folderPath()); - Assertions.assertEquals(1919804309, model.maxConcurrency()); - Assertions.assertEquals("ekmzubd", model.linkedService().referenceName()); + Assertions.assertEquals("ekaajuwkxbg", model.description()); + Assertions.assertEquals("hfohspv", model.pipelines().get(0).pipelineReference().referenceName()); + Assertions.assertEquals("jmt", model.pipelines().get(0).pipelineReference().name()); + Assertions.assertEquals("hxjwiggca", model.folderPath()); + Assertions.assertEquals(1782911484, model.maxConcurrency()); + Assertions.assertEquals("koxpaytzqgsaeg", model.linkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTypePropertiesTests.java index 29f9d53ade079..a853f9dd8db8e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTypePropertiesTests.java @@ -15,22 +15,24 @@ public final class BlobTriggerTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { BlobTriggerTypeProperties model = BinaryData.fromString( - "{\"folderPath\":\"uwgrtvyw\",\"maxConcurrency\":633624527,\"linkedService\":{\"referenceName\":\"bgwvhdbievyyp\",\"parameters\":{\"wpptessvmdox\":\"datacaschhfmidkd\"}}}") + "{\"folderPath\":\"rqtkzeopoxdwoxpn\",\"maxConcurrency\":1343583156,\"linkedService\":{\"referenceName\":\"zbsw\",\"parameters\":{\"lbflbax\":\"datatxfshaqpmlyzwgo\",\"yrdzogtrycb\":\"datawojt\",\"qw\":\"dataf\",\"jsxwukbcwym\":\"dataavgdzt\"}}}") .toObject(BlobTriggerTypeProperties.class); - Assertions.assertEquals("uwgrtvyw", model.folderPath()); - Assertions.assertEquals(633624527, model.maxConcurrency()); - Assertions.assertEquals("bgwvhdbievyyp", model.linkedService().referenceName()); + Assertions.assertEquals("rqtkzeopoxdwoxpn", model.folderPath()); + Assertions.assertEquals(1343583156, model.maxConcurrency()); + Assertions.assertEquals("zbsw", model.linkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - BlobTriggerTypeProperties model = new BlobTriggerTypeProperties().withFolderPath("uwgrtvyw") - .withMaxConcurrency(633624527).withLinkedService(new LinkedServiceReference() - .withReferenceName("bgwvhdbievyyp").withParameters(mapOf("wpptessvmdox", "datacaschhfmidkd"))); + BlobTriggerTypeProperties model = new BlobTriggerTypeProperties().withFolderPath("rqtkzeopoxdwoxpn") + .withMaxConcurrency(1343583156) + .withLinkedService(new LinkedServiceReference().withReferenceName("zbsw") + .withParameters(mapOf("lbflbax", "datatxfshaqpmlyzwgo", "yrdzogtrycb", "datawojt", "qw", "dataf", + "jsxwukbcwym", "dataavgdzt"))); model = BinaryData.fromObject(model).toObject(BlobTriggerTypeProperties.class); - Assertions.assertEquals("uwgrtvyw", model.folderPath()); - Assertions.assertEquals(633624527, model.maxConcurrency()); - Assertions.assertEquals("bgwvhdbievyyp", model.linkedService().referenceName()); + Assertions.assertEquals("rqtkzeopoxdwoxpn", model.folderPath()); + Assertions.assertEquals(1343583156, model.maxConcurrency()); + Assertions.assertEquals("zbsw", model.linkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CassandraSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CassandraSourceTests.java index 479a42cd1988c..fe2a275d671b9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CassandraSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CassandraSourceTests.java @@ -13,19 +13,22 @@ public final class CassandraSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CassandraSource model = BinaryData.fromString( - "{\"type\":\"CassandraSource\",\"query\":\"databymnfctorqzb\",\"consistencyLevel\":\"SERIAL\",\"queryTimeout\":\"datafqqrarolc\",\"additionalColumns\":\"datahbynlbwcnn\",\"sourceRetryCount\":\"datafgstdifbyfjsl\",\"sourceRetryWait\":\"datagee\",\"maxConcurrentConnections\":\"datasoj\",\"disableMetricsCollection\":\"dataarliig\",\"\":{\"xuuqcmunh\":\"dataqvidsjhx\"}}") + "{\"type\":\"zeebdefepwkhr\",\"query\":\"datanwohlcahhfuydgd\",\"consistencyLevel\":\"QUORUM\",\"queryTimeout\":\"datayuspz\",\"additionalColumns\":\"datavibp\",\"sourceRetryCount\":\"datazwgbbozivf\",\"sourceRetryWait\":\"dataqvnlhsxea\",\"maxConcurrentConnections\":\"datasqquvv\",\"disableMetricsCollection\":\"datab\",\"\":{\"rsyirfkxazu\":\"dataohdukprqyibwuzvm\",\"qoilgkzn\":\"dataoigorwpbbjzdv\",\"iceqyraj\":\"datazpvjwego\"}}") .toObject(CassandraSource.class); - Assertions.assertEquals(CassandraSourceReadConsistencyLevels.SERIAL, model.consistencyLevel()); + Assertions.assertEquals(CassandraSourceReadConsistencyLevels.QUORUM, model.consistencyLevel()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CassandraSource model - = new CassandraSource().withSourceRetryCount("datafgstdifbyfjsl").withSourceRetryWait("datagee") - .withMaxConcurrentConnections("datasoj").withDisableMetricsCollection("dataarliig") - .withQueryTimeout("datafqqrarolc").withAdditionalColumns("datahbynlbwcnn").withQuery("databymnfctorqzb") - .withConsistencyLevel(CassandraSourceReadConsistencyLevels.SERIAL); + CassandraSource model = new CassandraSource().withSourceRetryCount("datazwgbbozivf") + .withSourceRetryWait("dataqvnlhsxea") + .withMaxConcurrentConnections("datasqquvv") + .withDisableMetricsCollection("datab") + .withQueryTimeout("datayuspz") + .withAdditionalColumns("datavibp") + .withQuery("datanwohlcahhfuydgd") + .withConsistencyLevel(CassandraSourceReadConsistencyLevels.QUORUM); model = BinaryData.fromObject(model).toObject(CassandraSource.class); - Assertions.assertEquals(CassandraSourceReadConsistencyLevels.SERIAL, model.consistencyLevel()); + Assertions.assertEquals(CassandraSourceReadConsistencyLevels.QUORUM, model.consistencyLevel()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTests.java index fd9d7f78c0b63..d5180f0f1b88b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTests.java @@ -17,35 +17,36 @@ public final class ChainingTriggerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ChainingTrigger model = BinaryData.fromString( - "{\"type\":\"ChainingTrigger\",\"pipeline\":{\"pipelineReference\":{\"referenceName\":\"kwdtuwbrw\",\"name\":\"qtyuywzcc\"},\"parameters\":{\"rk\":\"dataliy\"}},\"typeProperties\":{\"dependsOn\":[{\"referenceName\":\"lvitbtloxrbotzvr\",\"name\":\"xpayjselrfqstbfu\"},{\"referenceName\":\"mln\",\"name\":\"vbeyugg\"},{\"referenceName\":\"fshntykenmjznjq\",\"name\":\"yaaevrkxyjsuapp\"}],\"runDimension\":\"mujiguusbwmsy\"},\"description\":\"bjtzd\",\"runtimeState\":\"Disabled\",\"annotations\":[\"dataqiilfovmcjchbof\"],\"\":{\"krerurcjgkau\":\"dataqvjfszvecedoptez\",\"xqsbwepduyqxvj\":\"datazbrdibdbk\",\"yaylt\":\"dataarddbn\"}}") + "{\"type\":\"gr\",\"pipeline\":{\"pipelineReference\":{\"referenceName\":\"ngfcbldpeforx\",\"name\":\"pmzkdisrgykrcj\"},\"parameters\":{\"jeguviphxi\":\"datavnfdovw\",\"blgwlejcxcrxuy\":\"datanwrhfrbwoylpmecc\",\"hqav\":\"datarhrtihzwdoflwl\",\"eh\":\"datafosfpgquxqvkuu\"}},\"typeProperties\":{\"dependsOn\":[{\"referenceName\":\"vfhyi\",\"name\":\"ezpwhczqjoovy\"},{\"referenceName\":\"sgughwo\",\"name\":\"wzpxlx\"},{\"referenceName\":\"vhhkabeox\",\"name\":\"o\"}],\"runDimension\":\"tckmtqn\"},\"description\":\"xhwvzdvujmukadz\",\"runtimeState\":\"Started\",\"annotations\":[\"dataqdda\",\"dataez\"],\"\":{\"vxel\":\"dataecdcvhxwegdsmnyp\",\"bqpddypw\":\"dataps\",\"enahhpnbvzd\":\"datamyftvejxmy\",\"jbzwvnxwduu\":\"datay\"}}") .toObject(ChainingTrigger.class); - Assertions.assertEquals("bjtzd", model.description()); - Assertions.assertEquals("kwdtuwbrw", model.pipeline().pipelineReference().referenceName()); - Assertions.assertEquals("qtyuywzcc", model.pipeline().pipelineReference().name()); - Assertions.assertEquals("lvitbtloxrbotzvr", model.dependsOn().get(0).referenceName()); - Assertions.assertEquals("xpayjselrfqstbfu", model.dependsOn().get(0).name()); - Assertions.assertEquals("mujiguusbwmsy", model.runDimension()); + Assertions.assertEquals("xhwvzdvujmukadz", model.description()); + Assertions.assertEquals("ngfcbldpeforx", model.pipeline().pipelineReference().referenceName()); + Assertions.assertEquals("pmzkdisrgykrcj", model.pipeline().pipelineReference().name()); + Assertions.assertEquals("vfhyi", model.dependsOn().get(0).referenceName()); + Assertions.assertEquals("ezpwhczqjoovy", model.dependsOn().get(0).name()); + Assertions.assertEquals("tckmtqn", model.runDimension()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ChainingTrigger model - = new ChainingTrigger().withDescription("bjtzd").withAnnotations(Arrays.asList("dataqiilfovmcjchbof")) - .withPipeline(new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("kwdtuwbrw").withName("qtyuywzcc")) - .withParameters(mapOf("rk", "dataliy"))) - .withDependsOn(Arrays.asList( - new PipelineReference().withReferenceName("lvitbtloxrbotzvr").withName("xpayjselrfqstbfu"), - new PipelineReference().withReferenceName("mln").withName("vbeyugg"), - new PipelineReference().withReferenceName("fshntykenmjznjq").withName("yaaevrkxyjsuapp"))) - .withRunDimension("mujiguusbwmsy"); + ChainingTrigger model = new ChainingTrigger().withDescription("xhwvzdvujmukadz") + .withAnnotations(Arrays.asList("dataqdda", "dataez")) + .withPipeline(new TriggerPipelineReference() + .withPipelineReference( + new PipelineReference().withReferenceName("ngfcbldpeforx").withName("pmzkdisrgykrcj")) + .withParameters(mapOf("jeguviphxi", "datavnfdovw", "blgwlejcxcrxuy", "datanwrhfrbwoylpmecc", "hqav", + "datarhrtihzwdoflwl", "eh", "datafosfpgquxqvkuu"))) + .withDependsOn(Arrays.asList(new PipelineReference().withReferenceName("vfhyi").withName("ezpwhczqjoovy"), + new PipelineReference().withReferenceName("sgughwo").withName("wzpxlx"), + new PipelineReference().withReferenceName("vhhkabeox").withName("o"))) + .withRunDimension("tckmtqn"); model = BinaryData.fromObject(model).toObject(ChainingTrigger.class); - Assertions.assertEquals("bjtzd", model.description()); - Assertions.assertEquals("kwdtuwbrw", model.pipeline().pipelineReference().referenceName()); - Assertions.assertEquals("qtyuywzcc", model.pipeline().pipelineReference().name()); - Assertions.assertEquals("lvitbtloxrbotzvr", model.dependsOn().get(0).referenceName()); - Assertions.assertEquals("xpayjselrfqstbfu", model.dependsOn().get(0).name()); - Assertions.assertEquals("mujiguusbwmsy", model.runDimension()); + Assertions.assertEquals("xhwvzdvujmukadz", model.description()); + Assertions.assertEquals("ngfcbldpeforx", model.pipeline().pipelineReference().referenceName()); + Assertions.assertEquals("pmzkdisrgykrcj", model.pipeline().pipelineReference().name()); + Assertions.assertEquals("vfhyi", model.dependsOn().get(0).referenceName()); + Assertions.assertEquals("ezpwhczqjoovy", model.dependsOn().get(0).name()); + Assertions.assertEquals("tckmtqn", model.runDimension()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTypePropertiesTests.java index 02266f24a8aa7..eb7309d00d036 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTypePropertiesTests.java @@ -13,23 +13,25 @@ public final class ChainingTriggerTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - ChainingTriggerTypeProperties model = BinaryData - .fromString( - "{\"dependsOn\":[{\"referenceName\":\"twd\",\"name\":\"tpxwgttpxb\"}],\"runDimension\":\"ihzxxnd\"}") + ChainingTriggerTypeProperties model = BinaryData.fromString( + "{\"dependsOn\":[{\"referenceName\":\"vb\",\"name\":\"yedrkgrtda\"},{\"referenceName\":\"lito\",\"name\":\"tarefexkbmodbpc\"},{\"referenceName\":\"uyhhzcj\",\"name\":\"ijwkcllo\"},{\"referenceName\":\"jshfcuzzujcib\",\"name\":\"fkxiixnxx\"}],\"runDimension\":\"vyizya\"}") .toObject(ChainingTriggerTypeProperties.class); - Assertions.assertEquals("twd", model.dependsOn().get(0).referenceName()); - Assertions.assertEquals("tpxwgttpxb", model.dependsOn().get(0).name()); - Assertions.assertEquals("ihzxxnd", model.runDimension()); + Assertions.assertEquals("vb", model.dependsOn().get(0).referenceName()); + Assertions.assertEquals("yedrkgrtda", model.dependsOn().get(0).name()); + Assertions.assertEquals("vyizya", model.runDimension()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ChainingTriggerTypeProperties model = new ChainingTriggerTypeProperties() - .withDependsOn(Arrays.asList(new PipelineReference().withReferenceName("twd").withName("tpxwgttpxb"))) - .withRunDimension("ihzxxnd"); + .withDependsOn(Arrays.asList(new PipelineReference().withReferenceName("vb").withName("yedrkgrtda"), + new PipelineReference().withReferenceName("lito").withName("tarefexkbmodbpc"), + new PipelineReference().withReferenceName("uyhhzcj").withName("ijwkcllo"), + new PipelineReference().withReferenceName("jshfcuzzujcib").withName("fkxiixnxx"))) + .withRunDimension("vyizya"); model = BinaryData.fromObject(model).toObject(ChainingTriggerTypeProperties.class); - Assertions.assertEquals("twd", model.dependsOn().get(0).referenceName()); - Assertions.assertEquals("tpxwgttpxb", model.dependsOn().get(0).name()); - Assertions.assertEquals("ihzxxnd", model.runDimension()); + Assertions.assertEquals("vb", model.dependsOn().get(0).referenceName()); + Assertions.assertEquals("yedrkgrtda", model.dependsOn().get(0).name()); + Assertions.assertEquals("vyizya", model.runDimension()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureFolderTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureFolderTests.java index 22dae1af115d9..e01649170aac6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureFolderTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureFolderTests.java @@ -12,14 +12,14 @@ public final class ChangeDataCaptureFolderTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ChangeDataCaptureFolder model - = BinaryData.fromString("{\"name\":\"pkcvmwf\"}").toObject(ChangeDataCaptureFolder.class); - Assertions.assertEquals("pkcvmwf", model.name()); + = BinaryData.fromString("{\"name\":\"mehllizhceu\"}").toObject(ChangeDataCaptureFolder.class); + Assertions.assertEquals("mehllizhceu", model.name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ChangeDataCaptureFolder model = new ChangeDataCaptureFolder().withName("pkcvmwf"); + ChangeDataCaptureFolder model = new ChangeDataCaptureFolder().withName("mehllizhceu"); model = BinaryData.fromObject(model).toObject(ChangeDataCaptureFolder.class); - Assertions.assertEquals("pkcvmwf", model.name()); + Assertions.assertEquals("mehllizhceu", model.name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureListResponseTests.java index 50d9401d13e9c..cc4507c907c55 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureListResponseTests.java @@ -26,184 +26,163 @@ public final class ChangeDataCaptureListResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ChangeDataCaptureListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"folder\":{\"name\":\"cgdz\"},\"description\":\"nr\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{},{}],\"connection\":{\"type\":\"linkedservicetype\"}},{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{},{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{}],\"relationships\":[\"datanamtuatmzw\"]},{\"targetEntities\":[{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{}],\"relationships\":[\"datamizvgbgatzuuvbx\"]},{\"targetEntities\":[{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{}],\"relationships\":[\"datahttzlswvajqfutlx\",\"dataoqza\",\"dataunwqr\",\"datazfrgqhaohcm\"]},{\"targetEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{}],\"relationships\":[\"datambpyryxamebly\",\"datayvk\",\"datakmrocxne\",\"datav\"]}],\"policy\":{\"mode\":\"tod\",\"recurrence\":{\"frequency\":\"Hour\",\"interval\":1443311872}},\"allowVNetOverride\":true,\"status\":\"gvoavyunssxlgh\"},\"name\":\"egjlgvvpa\",\"type\":\"ksgbuxan\",\"etag\":\"ygdhgaqipirpiwr\",\"\":{\"pibkephuu\":\"dataulopmjnlexwhcb\",\"qpbrlc\":\"dataerctatoyin\",\"uc\":\"datarduczkgofxyfs\",\"qnrmvvfko\":\"datacrrpcjttbstvje\"},\"id\":\"lghktuidvrm\"},{\"properties\":{\"folder\":{\"name\":\"pdwwexymzvlazi\"},\"description\":\"hpwvqsgnyyuu\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"}},{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"}},{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"}},{\"sourceEntities\":[{},{}],\"connection\":{\"type\":\"linkedservicetype\"}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{}],\"relationships\":[\"datasrfhf\"]},{\"targetEntities\":[{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{}],\"relationships\":[\"datawcdommpvfqaw\",\"datafgbrtt\"]},{\"targetEntities\":[{},{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{}],\"relationships\":[\"datahajlfn\",\"datahiqfyuttdiy\",\"datab\"]},{\"targetEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{}],\"relationships\":[\"datak\",\"datactwwgzw\",\"datajlmec\"]}],\"policy\":{\"mode\":\"gygzyvn\",\"recurrence\":{\"frequency\":\"Minute\",\"interval\":488658265}},\"allowVNetOverride\":true,\"status\":\"moqqtlffhzbk\"},\"name\":\"jjjavfqnvhnq\",\"type\":\"wdogiyetesyp\",\"etag\":\"dbztjhqtfbov\",\"\":{\"hpsprkzyaupiac\":\"datakbwetnj\"},\"id\":\"n\"},{\"properties\":{\"folder\":{\"name\":\"wqro\"},\"description\":\"tuovmaonurj\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"}},{\"sourceEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"}},{\"sourceEntities\":[{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"}},{\"sourceEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{},{},{}],\"relationships\":[\"datascvsfxigctm\"]},{\"targetEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{}],\"relationships\":[\"dataccyd\",\"datatce\",\"datakdqkkyihzt\",\"dataeq\"]},{\"targetEntities\":[{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{}],\"relationships\":[\"dataychillcecfe\",\"datauwaoaguhicqlli\",\"datastacsjvhrweftkwq\"]}],\"policy\":{\"mode\":\"pmvssehaep\",\"recurrence\":{\"frequency\":\"Hour\",\"interval\":1216217570}},\"allowVNetOverride\":true,\"status\":\"upeuknijduyye\"},\"name\":\"ydjfb\",\"type\":\"yv\",\"etag\":\"ulrtywikdmh\",\"\":{\"ufr\":\"datauflgbhgauacdixm\",\"ozo\":\"dataryjqgdkf\"},\"id\":\"qb\"},{\"properties\":{\"folder\":{\"name\":\"vefgwbmqjchntas\"},\"description\":\"ymxbulpzealb\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"}},{\"sourceEntities\":[{},{}],\"connection\":{\"type\":\"linkedservicetype\"}},{\"sourceEntities\":[{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{}],\"relationships\":[\"datacmmzrrs\",\"dataubiwsdrnpxq\",\"dataodiffjxcjrmmua\"]}],\"policy\":{\"mode\":\"ibvjogjonmcy\",\"recurrence\":{\"frequency\":\"Hour\",\"interval\":2103208222}},\"allowVNetOverride\":false,\"status\":\"in\"},\"name\":\"fvfkak\",\"type\":\"ldtve\",\"etag\":\"oclzhz\",\"\":{\"amrdixtrekidswys\":\"datayuxgvttxpnrupz\"},\"id\":\"ruffgllukk\"}],\"nextLink\":\"vlxhrpqhvmblc\"}") + "{\"value\":[{\"properties\":{\"folder\":{\"name\":\"mmzrrscub\"},\"description\":\"sd\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"}},{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{}],\"relationships\":[\"dataabwibvjogjo\",\"datamcyefoyzbam\",\"datain\"]},{\"targetEntities\":[{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{},{}],\"relationships\":[\"dataoldtvevboclzhz\"]},{\"targetEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{}],\"relationships\":[\"datatxpnrupza\",\"datamrdixtreki\",\"dataswyskbruffg\"]},{\"targetEntities\":[{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{},{}],\"relationships\":[\"dataxhrp\",\"datahvmblcouqehbhbc\"]}],\"policy\":{\"mode\":\"ziryrandoy\",\"recurrence\":{\"frequency\":\"Minute\",\"interval\":1798358498}},\"allowVNetOverride\":true,\"status\":\"kfqlwx\"},\"name\":\"ykalsyga\",\"type\":\"njpnnbmj\",\"etag\":\"ibjgsjjxxahm\",\"\":{\"xyivpinbm\":\"datadzyqe\"},\"id\":\"bjijkgqxnh\"},{\"properties\":{\"folder\":{\"name\":\"eznjaujv\"},\"description\":\"nnggiycwkdtaawxw\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"}},{\"sourceEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{}],\"relationships\":[\"datanxwbjsidbirkfp\",\"datasokdgoge\",\"dataijymrhbguzozky\",\"datawnf\"]},{\"targetEntities\":[{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{}],\"relationships\":[\"dataffjkutycyarnroo\",\"datag\",\"dataabzoghktdpyczhco\",\"dataocnhzqrottjzcfyj\"]},{\"targetEntities\":[{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{},{}],\"relationships\":[\"datapqinf\",\"datazpyglqdhmrj\"]}],\"policy\":{\"mode\":\"alcxpjbyy\",\"recurrence\":{\"frequency\":\"Second\",\"interval\":1715277105}},\"allowVNetOverride\":false,\"status\":\"kyhfqzvsqxfxj\"},\"name\":\"gcm\",\"type\":\"qjhhhqxuwyvc\",\"etag\":\"oyvivbsiz\",\"\":{\"lzijiufehgmvflnw\":\"dataszlbscm\",\"kxrerlniylylyfwx\":\"datav\"},\"id\":\"tgqztwhghmup\"},{\"properties\":{\"folder\":{\"name\":\"jtcdxabbujftaben\"},\"description\":\"klqpx\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"}},{\"sourceEntities\":[{},{}],\"connection\":{\"type\":\"linkedservicetype\"}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{}],\"relationships\":[\"datagnhgook\",\"datatalvnbwgp\",\"dataemeluclv\",\"datajjukyrdnqodxah\"]},{\"targetEntities\":[{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{},{},{}],\"relationships\":[\"datazoqgyipemchga\",\"datasczuejdtxptlghwz\",\"dataomew\",\"dataj\"]},{\"targetEntities\":[{},{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{},{}],\"relationships\":[\"datamoaiancz\",\"datavodrrslblxydkxr\",\"datavvbxiwkgfbqljnq\",\"datahychocokuleh\"]}],\"policy\":{\"mode\":\"qlrqffaweyurk\",\"recurrence\":{\"frequency\":\"Second\",\"interval\":662993818}},\"allowVNetOverride\":false,\"status\":\"juqdbrx\"},\"name\":\"gchbapxkiy\",\"type\":\"j\",\"etag\":\"ajb\",\"\":{\"u\":\"datagd\",\"vpmclujyxkyxlz\":\"dataioycbl\"},\"id\":\"jgkz\"}],\"nextLink\":\"taf\"}") .toObject(ChangeDataCaptureListResponse.class); - Assertions.assertEquals("lghktuidvrm", model.value().get(0).id()); - Assertions.assertEquals("cgdz", model.value().get(0).folder().name()); - Assertions.assertEquals("nr", model.value().get(0).description()); + Assertions.assertEquals("bjijkgqxnh", model.value().get(0).id()); + Assertions.assertEquals("mmzrrscub", model.value().get(0).folder().name()); + Assertions.assertEquals("sd", model.value().get(0).description()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.value().get(0).sourceConnectionsInfo().get(0).connection().type()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.value().get(0).targetConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals("tod", model.value().get(0).policy().mode()); - Assertions.assertEquals(FrequencyType.HOUR, model.value().get(0).policy().recurrence().frequency()); - Assertions.assertEquals(1443311872, model.value().get(0).policy().recurrence().interval()); + Assertions.assertEquals("ziryrandoy", model.value().get(0).policy().mode()); + Assertions.assertEquals(FrequencyType.MINUTE, model.value().get(0).policy().recurrence().frequency()); + Assertions.assertEquals(1798358498, model.value().get(0).policy().recurrence().interval()); Assertions.assertEquals(true, model.value().get(0).allowVNetOverride()); - Assertions.assertEquals("gvoavyunssxlgh", model.value().get(0).status()); - Assertions.assertEquals("vlxhrpqhvmblc", model.nextLink()); + Assertions.assertEquals("kfqlwx", model.value().get(0).status()); + Assertions.assertEquals("taf", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ChangeDataCaptureListResponse model = new ChangeDataCaptureListResponse() - .withValue(Arrays.asList( - new ChangeDataCaptureResourceInner().withId("lghktuidvrm").withFolder( - new ChangeDataCaptureFolder().withName("cgdz")).withDescription( - "nr") - .withSourceConnectionsInfo(Arrays.asList(new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable(), - new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)))) - .withTargetConnectionsInfo(Arrays.asList( - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable(), - new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping())) - .withRelationships(Arrays.asList("datanamtuatmzw")), - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), new DataMapperMapping())) - .withRelationships(Arrays.asList("datamizvgbgatzuuvbx")), - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), new DataMapperMapping())) - .withRelationships( - Arrays.asList("datahttzlswvajqfutlx", "dataoqza", "dataunwqr", "datazfrgqhaohcm")), - new MapperTargetConnectionsInfo().withTargetEntities(Arrays.asList(new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping())) - .withRelationships(Arrays.asList("datambpyryxamebly", "datayvk", "datakmrocxne", "datav")))) - .withPolicy(new MapperPolicy() - .withMode("tod").withRecurrence( - new MapperPolicyRecurrence().withFrequency(FrequencyType.HOUR).withInterval(1443311872))) - .withAllowVNetOverride(true).withStatus("gvoavyunssxlgh").withAdditionalProperties( - mapOf("name", "egjlgvvpa", "etag", "ygdhgaqipirpiwr", "type", "ksgbuxan")), - new ChangeDataCaptureResourceInner().withId("n") - .withFolder(new ChangeDataCaptureFolder().withName("pdwwexymzvlazi")) - .withDescription("hpwvqsgnyyuu") - .withSourceConnectionsInfo(Arrays.asList( - new MapperSourceConnectionsInfo().withSourceEntities(Arrays.asList(new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable(), - new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable(), - new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)))) - .withTargetConnectionsInfo(Arrays.asList(new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), new DataMapperMapping())) - .withRelationships(Arrays.asList("datasrfhf")), - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), new DataMapperMapping())) - .withRelationships(Arrays.asList("datawcdommpvfqaw", "datafgbrtt")), - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable(), - new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), new DataMapperMapping())) - .withRelationships(Arrays.asList("datahajlfn", "datahiqfyuttdiy", "datab")), - new MapperTargetConnectionsInfo().withTargetEntities(Arrays.asList(new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping())) - .withRelationships(Arrays.asList("datak", "datactwwgzw", "datajlmec")))) - .withPolicy(new MapperPolicy().withMode("gygzyvn").withRecurrence( - new MapperPolicyRecurrence().withFrequency(FrequencyType.MINUTE).withInterval(488658265))) - .withAllowVNetOverride(true).withStatus("moqqtlffhzbk") - .withAdditionalProperties(mapOf("name", "jjjavfqnvhnq", "etag", "dbztjhqtfbov", "type", - "wdogiyetesyp")), - new ChangeDataCaptureResourceInner().withId("qb") - .withFolder(new ChangeDataCaptureFolder().withName("wqro")).withDescription("tuovmaonurj") - .withSourceConnectionsInfo( - Arrays.asList( - new MapperSourceConnectionsInfo() - .withSourceEntities( - Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)), + ChangeDataCaptureListResponse model + = new ChangeDataCaptureListResponse() + .withValue(Arrays.asList( + new ChangeDataCaptureResourceInner().withId("bjijkgqxnh") + .withFolder(new ChangeDataCaptureFolder().withName("mmzrrscub")) + .withDescription("sd") + .withSourceConnectionsInfo(Arrays.asList( new MapperSourceConnectionsInfo().withSourceEntities(Arrays.asList(new MapperTable())) .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)), new MapperSourceConnectionsInfo() - .withSourceEntities( - Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) + .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable(), + new MapperTable(), new MapperTable())) + .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)))) + .withTargetConnectionsInfo( + Arrays.asList( + new MapperTargetConnectionsInfo().withTargetEntities(Arrays.asList(new MapperTable())) + .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) + .withDataMapperMappings( + Arrays.asList(new DataMapperMapping(), new DataMapperMapping())) + .withRelationships(Arrays.asList("dataabwibvjogjo", "datamcyefoyzbam", "datain")), + new MapperTargetConnectionsInfo() + .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable())) + .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) + .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), + new DataMapperMapping(), new DataMapperMapping())) + .withRelationships(Arrays.asList("dataoldtvevboclzhz")), + new MapperTargetConnectionsInfo().withTargetEntities(Arrays.asList(new MapperTable())) + .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) + .withDataMapperMappings( + Arrays.asList(new DataMapperMapping(), new DataMapperMapping())) + .withRelationships( + Arrays.asList("datatxpnrupza", "datamrdixtreki", "dataswyskbruffg")), + new MapperTargetConnectionsInfo() + .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable())) + .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) + .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), + new DataMapperMapping(), new DataMapperMapping())) + .withRelationships(Arrays.asList("dataxhrp", "datahvmblcouqehbhbc")))) + .withPolicy(new MapperPolicy().withMode("ziryrandoy") + .withRecurrence(new MapperPolicyRecurrence().withFrequency(FrequencyType.MINUTE) + .withInterval(1798358498))) + .withAllowVNetOverride(true) + .withStatus("kfqlwx") + .withAdditionalProperties( + mapOf("name", "ykalsyga", "etag", "ibjgsjjxxahm", "type", "njpnnbmj")), + new ChangeDataCaptureResourceInner() + .withId("tgqztwhghmup") + .withFolder(new ChangeDataCaptureFolder().withName("eznjaujv")) + .withDescription("nnggiycwkdtaawxw") + .withSourceConnectionsInfo(Arrays.asList( + new MapperSourceConnectionsInfo() + .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable(), + new MapperTable(), new MapperTable())) .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)), new MapperSourceConnectionsInfo().withSourceEntities(Arrays.asList(new MapperTable())) .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)))) - .withTargetConnectionsInfo( - Arrays.asList( + .withTargetConnectionsInfo(Arrays.asList( + new MapperTargetConnectionsInfo() + .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable())) + .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) + .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), new DataMapperMapping())) + .withRelationships(Arrays.asList("datanxwbjsidbirkfp", "datasokdgoge", + "dataijymrhbguzozky", "datawnf")), + new MapperTargetConnectionsInfo() + .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable())) + .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) + .withDataMapperMappings(Arrays.asList(new DataMapperMapping())) + .withRelationships( + Arrays.asList("dataffjkutycyarnroo", "datag", "dataabzoghktdpyczhco", + "dataocnhzqrottjzcfyj")), new MapperTargetConnectionsInfo() .withTargetEntities( Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), new DataMapperMapping(), - new DataMapperMapping(), new DataMapperMapping())) - .withRelationships(Arrays.asList("datascvsfxigctm")), + .withDataMapperMappings(Arrays + .asList(new DataMapperMapping(), new DataMapperMapping(), new DataMapperMapping())) + .withRelationships(Arrays.asList("datapqinf", "datazpyglqdhmrj")))) + .withPolicy(new MapperPolicy().withMode("alcxpjbyy") + .withRecurrence(new MapperPolicyRecurrence().withFrequency(FrequencyType.SECOND) + .withInterval(1715277105))) + .withAllowVNetOverride(false) + .withStatus("kyhfqzvsqxfxj") + .withAdditionalProperties(mapOf("name", "gcm", "etag", "oyvivbsiz", "type", "qjhhhqxuwyvc")), + new ChangeDataCaptureResourceInner() + .withId("jgkz") + .withFolder(new ChangeDataCaptureFolder().withName("jtcdxabbujftaben")) + .withDescription("klqpx") + .withSourceConnectionsInfo(Arrays.asList( + new MapperSourceConnectionsInfo() + .withSourceEntities( + Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) + .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)), + new MapperSourceConnectionsInfo() + .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable())) + .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)))) + .withTargetConnectionsInfo(Arrays.asList( new MapperTargetConnectionsInfo().withTargetEntities(Arrays.asList(new MapperTable())) .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) .withDataMapperMappings(Arrays.asList(new DataMapperMapping())) - .withRelationships(Arrays.asList("dataccyd", "datatce", "datakdqkkyihzt", "dataeq")), + .withRelationships(Arrays.asList("datagnhgook", "datatalvnbwgp", "dataemeluclv", + "datajjukyrdnqodxah")), new MapperTargetConnectionsInfo() - .withTargetEntities( - Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) + .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable())) .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), new DataMapperMapping())) + .withDataMapperMappings( + Arrays.asList(new DataMapperMapping(), new DataMapperMapping(), + new DataMapperMapping(), new DataMapperMapping())) .withRelationships( - Arrays.asList("dataychillcecfe", "datauwaoaguhicqlli", "datastacsjvhrweftkwq")))) - .withPolicy(new MapperPolicy().withMode("pmvssehaep").withRecurrence( - new MapperPolicyRecurrence().withFrequency(FrequencyType.HOUR).withInterval(1216217570))) - .withAllowVNetOverride(true).withStatus("upeuknijduyye") - .withAdditionalProperties(mapOf("name", "ydjfb", "etag", "ulrtywikdmh", "type", "yv")), - new ChangeDataCaptureResourceInner().withId("ruffgllukk") - .withFolder(new ChangeDataCaptureFolder().withName("vefgwbmqjchntas")) - .withDescription("ymxbulpzealb") - .withSourceConnectionsInfo(Arrays.asList( - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable(), - new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)))) - .withTargetConnectionsInfo(Arrays.asList(new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping())) - .withRelationships(Arrays.asList("datacmmzrrs", "dataubiwsdrnpxq", "dataodiffjxcjrmmua")))) - .withPolicy(new MapperPolicy().withMode("ibvjogjonmcy").withRecurrence( - new MapperPolicyRecurrence().withFrequency(FrequencyType.HOUR).withInterval(2103208222))) - .withAllowVNetOverride(false).withStatus("in") - .withAdditionalProperties(mapOf("name", "fvfkak", "etag", "oclzhz", "type", "ldtve")))) - .withNextLink("vlxhrpqhvmblc"); + Arrays.asList("datazoqgyipemchga", "datasczuejdtxptlghwz", "dataomew", "dataj")), + new MapperTargetConnectionsInfo() + .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable(), + new MapperTable(), new MapperTable())) + .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) + .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), new DataMapperMapping(), + new DataMapperMapping())) + .withRelationships(Arrays.asList("datamoaiancz", "datavodrrslblxydkxr", + "datavvbxiwkgfbqljnq", "datahychocokuleh")))) + .withPolicy(new MapperPolicy().withMode("qlrqffaweyurk") + .withRecurrence(new MapperPolicyRecurrence().withFrequency(FrequencyType.SECOND) + .withInterval(662993818))) + .withAllowVNetOverride(false) + .withStatus("juqdbrx") + .withAdditionalProperties(mapOf("name", "gchbapxkiy", "etag", "ajb", "type", "j")))) + .withNextLink("taf"); model = BinaryData.fromObject(model).toObject(ChangeDataCaptureListResponse.class); - Assertions.assertEquals("lghktuidvrm", model.value().get(0).id()); - Assertions.assertEquals("cgdz", model.value().get(0).folder().name()); - Assertions.assertEquals("nr", model.value().get(0).description()); + Assertions.assertEquals("bjijkgqxnh", model.value().get(0).id()); + Assertions.assertEquals("mmzrrscub", model.value().get(0).folder().name()); + Assertions.assertEquals("sd", model.value().get(0).description()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.value().get(0).sourceConnectionsInfo().get(0).connection().type()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.value().get(0).targetConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals("tod", model.value().get(0).policy().mode()); - Assertions.assertEquals(FrequencyType.HOUR, model.value().get(0).policy().recurrence().frequency()); - Assertions.assertEquals(1443311872, model.value().get(0).policy().recurrence().interval()); + Assertions.assertEquals("ziryrandoy", model.value().get(0).policy().mode()); + Assertions.assertEquals(FrequencyType.MINUTE, model.value().get(0).policy().recurrence().frequency()); + Assertions.assertEquals(1798358498, model.value().get(0).policy().recurrence().interval()); Assertions.assertEquals(true, model.value().get(0).allowVNetOverride()); - Assertions.assertEquals("gvoavyunssxlgh", model.value().get(0).status()); - Assertions.assertEquals("vlxhrpqhvmblc", model.nextLink()); + Assertions.assertEquals("kfqlwx", model.value().get(0).status()); + Assertions.assertEquals("taf", model.nextLink()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureResourceInnerTests.java index 6b2f0fb68ed22..64fed79ac336c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureResourceInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureResourceInnerTests.java @@ -29,148 +29,229 @@ public final class ChangeDataCaptureResourceInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ChangeDataCaptureResourceInner model = BinaryData.fromString( - "{\"properties\":{\"folder\":{\"name\":\"ehbhb\"},\"description\":\"sziryrandoyp\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{\"name\":\"ormkfqlwxldyk\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"g\",\"parameters\":{\"sibjgs\":\"datanjpnnbmj\",\"yqegx\":\"datajxxahmrnad\",\"inbmh\":\"dataiv\",\"bkezn\":\"databjijkgqxnh\"}},\"linkedServiceType\":\"ujvaannggi\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{\"name\":\"wfekaumrrqmb\",\"properties\":{}},{\"name\":\"kratbnxwbj\",\"properties\":{}},{\"name\":\"birkfpksokdg\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"ijymrhbguzozky\",\"parameters\":{\"zhhh\":\"dataf\",\"mffjkutycyarn\":\"datao\"}},\"linkedServiceType\":\"ohguabz\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{}]},\"dataMapperMappings\":[{\"targetEntityName\":\"z\",\"sourceEntityName\":\"oeocnhzqrott\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"datayjzp\"},{\"targetEntityName\":\"rl\",\"sourceEntityName\":\"apqinf\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"dataglqdhm\"}],\"relationships\":[\"dataralcxpjbyypsj\",\"dataqcjenkyhf\",\"datazv\",\"dataqxfx\"]},{\"targetEntities\":[{\"name\":\"cmpzqjhhhqx\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"cacoyvivbsiz\",\"parameters\":{\"lzijiufehgmvflnw\":\"dataszlbscm\",\"kxrerlniylylyfwx\":\"datav\"}},\"linkedServiceType\":\"tgqztwhghmup\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{}]},\"dataMapperMappings\":[{\"targetEntityName\":\"bb\",\"sourceEntityName\":\"ftabenbbklqp\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"dataafeddwwnlza\"}],\"relationships\":[\"datau\"]},{\"targetEntities\":[{\"name\":\"gookrtalvnb\",\"properties\":{}},{\"name\":\"bemeluclvd\",\"properties\":{}},{\"name\":\"kyrdnqodx\",\"properties\":{}},{\"name\":\"xhqf\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"zoqgyipemchga\",\"parameters\":{\"xptlghwzho\":\"datazuejd\",\"s\":\"dataewj\",\"vodrrslblxydkxr\":\"dataliuhqawmoaiancz\"}},\"linkedServiceType\":\"vbxiwkgfbqlj\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{}]},\"dataMapperMappings\":[{\"targetEntityName\":\"okulehurqlrqf\",\"sourceEntityName\":\"weyurkphyjd\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"datajuqdbrx\"},{\"targetEntityName\":\"gchbapxkiy\",\"sourceEntityName\":\"j\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"databuscgduus\"}],\"relationships\":[\"datacblevpmc\",\"dataujyxkyxlzgsj\",\"datakzzltafhbzf\"]}],\"policy\":{\"mode\":\"vwmbjlzqsczpg\",\"recurrence\":{\"frequency\":\"Hour\",\"interval\":1263601640}},\"allowVNetOverride\":false,\"status\":\"wow\"},\"name\":\"ptnuwjtkschgc\",\"type\":\"y\",\"etag\":\"eseyqr\",\"\":{\"kwiswskukjtas\":\"dataeldotjv\"},\"id\":\"wispkxk\"}") + "{\"properties\":{\"folder\":{\"name\":\"ffovwmbjlzqsczp\"},\"description\":\"dwnapf\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{\"name\":\"ftptnuwj\",\"properties\":{}},{\"name\":\"chgcgqyh\",\"properties\":{}},{\"name\":\"eyqrhv\",\"properties\":{}},{\"name\":\"dotjvdk\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"skukjtasbv\",\"parameters\":{\"dlqvtwknvg\":\"datapkxkdtxfk\",\"gtywatmqaqkue\":\"datamb\"}},\"linkedServiceType\":\"groeshoyg\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{}]}},{\"sourceEntities\":[{\"name\":\"aoyte\",\"properties\":{}},{\"name\":\"uvjmv\",\"properties\":{}},{\"name\":\"dwckygroe\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"ljdjuskbrreqy\",\"parameters\":{\"ysh\":\"dataeysfaqegplw\",\"ybwptda\":\"dataddkvbxgkqu\",\"mt\":\"dataarvvlfnt\"}},\"linkedServiceType\":\"iwenazero\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{}]}},{\"sourceEntities\":[{\"name\":\"kdnwqapfgsdpcve\",\"properties\":{}},{\"name\":\"zhhkuuipldqqc\",\"properties\":{}},{\"name\":\"valblhtjqv\",\"properties\":{}},{\"name\":\"wehtaemxh\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"ev\",\"parameters\":{\"yzatvfuzkaft\":\"dataivzrrryveimipsk\",\"syeipqd\":\"datavvruxwi\"}},\"linkedServiceType\":\"jtgrqgdgkkileplk\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{},{}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{\"name\":\"aedorvvmqf\",\"properties\":{}},{\"name\":\"gbdg\",\"properties\":{}},{\"name\":\"gxdgdhpab\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"jddvjsaqw\",\"parameters\":{\"f\":\"datamwllcolsrsxapt\",\"jbekpeeksnbksdq\":\"dataexcgjokjljnhvl\"}},\"linkedServiceType\":\"vyklxeslkhhus\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{}]},\"dataMapperMappings\":[{\"targetEntityName\":\"wqjwgok\",\"sourceEntityName\":\"ejjjkxybwfdb\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"datatensvkzykj\"},{\"targetEntityName\":\"kns\",\"sourceEntityName\":\"wushcdp\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"dataqrmgjfbpkuw\"}],\"relationships\":[\"dataiojfizfavkjzwfbc\",\"dataaykmmf\",\"datasbfwxr\",\"dataxmdewsrsxkrplbj\"]},{\"targetEntities\":[{\"name\":\"wwviyo\",\"properties\":{}},{\"name\":\"uhbrnnhjxsq\",\"properties\":{}},{\"name\":\"qkbiwet\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"c\",\"parameters\":{\"etzlexbsfledynoj\":\"dataqyhgf\",\"fbzkk\":\"dataziu\"}},\"linkedServiceType\":\"nhqs\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{},{}]},\"dataMapperMappings\":[{\"targetEntityName\":\"pbafvafhlbylc\",\"sourceEntityName\":\"evxrhyz\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"datasofpltd\"},{\"targetEntityName\":\"airrhvhfnracw\",\"sourceEntityName\":\"qigtuujwouhdaws\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"datajbx\"},{\"targetEntityName\":\"ybvitvqkjyaznumt\",\"sourceEntityName\":\"m\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"datahozfn\"},{\"targetEntityName\":\"exlvxnoakiz\",\"sourceEntityName\":\"aikn\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"datauwiguyxlyk\"}],\"relationships\":[\"datavx\",\"datacwxhmpej\",\"datalkexaonwivkcqh\"]},{\"targetEntities\":[{\"name\":\"knlccrmmkyup\",\"properties\":{}},{\"name\":\"byqjfkakfqfrkem\",\"properties\":{}},{\"name\":\"dudxjascowvfdjk\",\"properties\":{}},{\"name\":\"phlkksnm\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"fijd\",\"parameters\":{\"uqwqulsutrjbhxyk\":\"dataqnwsithuqolyah\",\"g\":\"datahyqezvqq\",\"ve\":\"datarftb\"}},\"linkedServiceType\":\"e\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{},{}]},\"dataMapperMappings\":[{\"targetEntityName\":\"fwhreagkhyxv\",\"sourceEntityName\":\"tvbczsulm\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"datamepjpf\"}],\"relationships\":[\"datakgsangpszng\",\"datafpgylkve\",\"datajujcngoad\"]},{\"targetEntities\":[{\"name\":\"zrgjfok\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"oitpkpztrgdgx\",\"parameters\":{\"l\":\"dataqraswugyxpqitwei\",\"yltcoqcuj\":\"datavskbuhzacaq\",\"ztjofqcvovjufyc\":\"datadsxzakuejkmvb\",\"gthortudaw\":\"datajmlbemyejiriux\"}},\"linkedServiceType\":\"jfel\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{}]},\"dataMapperMappings\":[{\"targetEntityName\":\"qnzmnhiil\",\"sourceEntityName\":\"lwcjgckbbcccgzpr\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"datayuffatsgf\"},{\"targetEntityName\":\"p\",\"sourceEntityName\":\"x\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"datahiqdxy\"}],\"relationships\":[\"datapnuhzafccnu\",\"dataiig\",\"datayl\",\"dataui\"]}],\"policy\":{\"mode\":\"xvatvcr\",\"recurrence\":{\"frequency\":\"Minute\",\"interval\":1877089971}},\"allowVNetOverride\":false,\"status\":\"h\"},\"name\":\"yhzlwxaeaovurexd\",\"type\":\"sbdweaderzmwnt\",\"etag\":\"agttm\",\"\":{\"azjcgmxitpfin\":\"dataagoaqylkjztji\",\"mtbdrvcqgu\":\"datacpdltkrlg\",\"urelyujlfyoump\":\"datafzhompheq\"},\"id\":\"yec\"}") .toObject(ChangeDataCaptureResourceInner.class); - Assertions.assertEquals("wispkxk", model.id()); - Assertions.assertEquals("ehbhb", model.folder().name()); - Assertions.assertEquals("sziryrandoyp", model.description()); - Assertions.assertEquals("ormkfqlwxldyk", model.sourceConnectionsInfo().get(0).sourceEntities().get(0).name()); - Assertions.assertEquals("g", model.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("ujvaannggi", model.sourceConnectionsInfo().get(0).connection().linkedServiceType()); + Assertions.assertEquals("yec", model.id()); + Assertions.assertEquals("ffovwmbjlzqsczp", model.folder().name()); + Assertions.assertEquals("dwnapf", model.description()); + Assertions.assertEquals("ftptnuwj", model.sourceConnectionsInfo().get(0).sourceEntities().get(0).name()); + Assertions.assertEquals("skukjtasbv", + model.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); + Assertions.assertEquals("groeshoyg", model.sourceConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.sourceConnectionsInfo().get(0).connection().type()); Assertions.assertEquals(false, model.sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("wfekaumrrqmb", model.targetConnectionsInfo().get(0).targetEntities().get(0).name()); - Assertions.assertEquals("ijymrhbguzozky", + Assertions.assertEquals("aedorvvmqf", model.targetConnectionsInfo().get(0).targetEntities().get(0).name()); + Assertions.assertEquals("jddvjsaqw", model.targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("ohguabz", model.targetConnectionsInfo().get(0).connection().linkedServiceType()); + Assertions.assertEquals("vyklxeslkhhus", model.targetConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.targetConnectionsInfo().get(0).connection().type()); Assertions.assertEquals(true, model.targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("z", + Assertions.assertEquals("wqjwgok", model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).targetEntityName()); - Assertions.assertEquals("oeocnhzqrott", + Assertions.assertEquals("ejjjkxybwfdb", model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).sourceEntityName()); - Assertions.assertEquals("vwmbjlzqsczpg", model.policy().mode()); - Assertions.assertEquals(FrequencyType.HOUR, model.policy().recurrence().frequency()); - Assertions.assertEquals(1263601640, model.policy().recurrence().interval()); + Assertions.assertEquals("xvatvcr", model.policy().mode()); + Assertions.assertEquals(FrequencyType.MINUTE, model.policy().recurrence().frequency()); + Assertions.assertEquals(1877089971, model.policy().recurrence().interval()); Assertions.assertEquals(false, model.allowVNetOverride()); - Assertions.assertEquals("wow", model.status()); + Assertions.assertEquals("h", model.status()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ChangeDataCaptureResourceInner model - = new ChangeDataCaptureResourceInner() - .withId( - "wispkxk") - .withFolder(new ChangeDataCaptureFolder().withName("ehbhb")).withDescription( - "sziryrandoyp") - .withSourceConnectionsInfo(Arrays.asList(new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable().withName("ormkfqlwxldyk"))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("g") - .withParameters(mapOf("sibjgs", "datanjpnnbmj", "yqegx", "datajxxahmrnad", "inbmh", - "dataiv", "bkezn", "databjijkgqxnh"))) - .withLinkedServiceType("ujvaannggi").withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(false) - .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties()))))) + = new ChangeDataCaptureResourceInner().withId("yec") + .withFolder(new ChangeDataCaptureFolder().withName("ffovwmbjlzqsczp")) + .withDescription("dwnapf") + .withSourceConnectionsInfo( + Arrays.asList( + new MapperSourceConnectionsInfo().withSourceEntities(Arrays.asList(new MapperTable() + .withName("ftptnuwj"), new MapperTable().withName("chgcgqyh"), + new MapperTable().withName("eyqrhv"), new MapperTable().withName("dotjvdk"))) + .withConnection(new MapperConnection().withLinkedService( + new LinkedServiceReference().withReferenceName("skukjtasbv") + .withParameters(mapOf("dlqvtwknvg", "datapkxkdtxfk", "gtywatmqaqkue", "datamb"))) + .withLinkedServiceType("groeshoyg") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(false) + .withCommonDslConnectorProperties( + Arrays + .asList(new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), + new MapperDslConnectorProperties()))), + new MapperSourceConnectionsInfo() + .withSourceEntities(Arrays.asList(new MapperTable().withName("aoyte"), + new MapperTable().withName("uvjmv"), new MapperTable().withName("dwckygroe"))) + .withConnection( + new MapperConnection() + .withLinkedService(new LinkedServiceReference().withReferenceName("ljdjuskbrreqy") + .withParameters(mapOf("ysh", "dataeysfaqegplw", "ybwptda", "dataddkvbxgkqu", + "mt", "dataarvvlfnt"))) + .withLinkedServiceType("iwenazero") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(true) + .withCommonDslConnectorProperties(Arrays.asList( + new MapperDslConnectorProperties()))), + new MapperSourceConnectionsInfo() + .withSourceEntities(Arrays.asList(new MapperTable().withName("kdnwqapfgsdpcve"), + new MapperTable().withName("zhhkuuipldqqc"), new MapperTable().withName("valblhtjqv"), + new MapperTable().withName("wehtaemxh"))) + .withConnection(new MapperConnection() + .withLinkedService(new LinkedServiceReference().withReferenceName("ev") + .withParameters( + mapOf("yzatvfuzkaft", "dataivzrrryveimipsk", "syeipqd", "datavvruxwi"))) + .withLinkedServiceType("jtgrqgdgkkileplk") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(false) + .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), + new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), + new MapperDslConnectorProperties()))))) .withTargetConnectionsInfo( Arrays .asList( - new MapperTargetConnectionsInfo().withTargetEntities(Arrays.asList(new MapperTable() - .withName("wfekaumrrqmb"), new MapperTable().withName("kratbnxwbj"), - new MapperTable().withName("birkfpksokdg"))) + new MapperTargetConnectionsInfo() + .withTargetEntities(Arrays.asList(new MapperTable().withName("aedorvvmqf"), + new MapperTable().withName("gbdg"), new MapperTable().withName("gxdgdhpab"))) + .withConnection( + new MapperConnection() + .withLinkedService(new LinkedServiceReference().withReferenceName("jddvjsaqw") + .withParameters(mapOf("f", "datamwllcolsrsxapt", "jbekpeeksnbksdq", + "dataexcgjokjljnhvl"))) + .withLinkedServiceType("vyklxeslkhhus") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(true) + .withCommonDslConnectorProperties(Arrays.asList( + new MapperDslConnectorProperties(), new MapperDslConnectorProperties()))) + .withDataMapperMappings(Arrays.asList( + new DataMapperMapping().withTargetEntityName("wqjwgok") + .withSourceEntityName("ejjjkxybwfdb") + .withSourceConnectionReference(new MapperConnectionReference()) + .withAttributeMappingInfo(new MapperAttributeMappings()) + .withSourceDenormalizeInfo("datatensvkzykj"), + new DataMapperMapping().withTargetEntityName("kns") + .withSourceEntityName("wushcdp") + .withSourceConnectionReference(new MapperConnectionReference()) + .withAttributeMappingInfo(new MapperAttributeMappings()) + .withSourceDenormalizeInfo("dataqrmgjfbpkuw"))) + .withRelationships( + Arrays + .asList("dataiojfizfavkjzwfbc", "dataaykmmf", "datasbfwxr", + "dataxmdewsrsxkrplbj")), + new MapperTargetConnectionsInfo() + .withTargetEntities(Arrays.asList(new MapperTable().withName("wwviyo"), + new MapperTable().withName("uhbrnnhjxsq"), new MapperTable().withName("qkbiwet"))) .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("ijymrhbguzozky") - .withParameters(mapOf("zhhh", "dataf", "mffjkutycyarn", "datao"))) - .withLinkedServiceType("ohguabz").withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true).withCommonDslConnectorProperties(Arrays.asList( + .withLinkedService(new LinkedServiceReference().withReferenceName("c") + .withParameters(mapOf("etzlexbsfledynoj", "dataqyhgf", "fbzkk", "dataziu"))) + .withLinkedServiceType("nhqs") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(true) + .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), + new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), new MapperDslConnectorProperties()))) .withDataMapperMappings( - Arrays.asList(new DataMapperMapping().withTargetEntityName("z") - .withSourceEntityName("oeocnhzqrott").withSourceConnectionReference( - new MapperConnectionReference()) - .withAttributeMappingInfo(new MapperAttributeMappings()) - .withSourceDenormalizeInfo("datayjzp"), - new DataMapperMapping().withTargetEntityName("rl").withSourceEntityName( - "apqinf").withSourceConnectionReference(new MapperConnectionReference()) + Arrays.asList( + new DataMapperMapping().withTargetEntityName("pbafvafhlbylc") + .withSourceEntityName("evxrhyz") + .withSourceConnectionReference(new MapperConnectionReference()) .withAttributeMappingInfo(new MapperAttributeMappings()) - .withSourceDenormalizeInfo("dataglqdhm"))) - .withRelationships(Arrays.asList("dataralcxpjbyypsj", "dataqcjenkyhf", "datazv", - "dataqxfx")), - new MapperTargetConnectionsInfo().withTargetEntities(Arrays.asList(new MapperTable() - .withName("cmpzqjhhhqx"))).withConnection( - new MapperConnection() - .withLinkedService( - new LinkedServiceReference().withReferenceName("cacoyvivbsiz") - .withParameters(mapOf("lzijiufehgmvflnw", "dataszlbscm", - "kxrerlniylylyfwx", "datav"))) - .withLinkedServiceType("tgqztwhghmup").withType( - ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true).withCommonDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties()))) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping().withTargetEntityName("bb") - .withSourceEntityName("ftabenbbklqp") - .withSourceConnectionReference(new MapperConnectionReference()) - .withAttributeMappingInfo(new MapperAttributeMappings()) - .withSourceDenormalizeInfo("dataafeddwwnlza"))) - .withRelationships(Arrays.asList("datau")), + .withSourceDenormalizeInfo("datasofpltd"), + new DataMapperMapping().withTargetEntityName("airrhvhfnracw") + .withSourceEntityName("qigtuujwouhdaws") + .withSourceConnectionReference(new MapperConnectionReference()) + .withAttributeMappingInfo(new MapperAttributeMappings()) + .withSourceDenormalizeInfo("datajbx"), + new DataMapperMapping().withTargetEntityName("ybvitvqkjyaznumt") + .withSourceEntityName("m") + .withSourceConnectionReference(new MapperConnectionReference()) + .withAttributeMappingInfo(new MapperAttributeMappings()) + .withSourceDenormalizeInfo("datahozfn"), + new DataMapperMapping().withTargetEntityName("exlvxnoakiz") + .withSourceEntityName("aikn") + .withSourceConnectionReference(new MapperConnectionReference()) + .withAttributeMappingInfo(new MapperAttributeMappings()) + .withSourceDenormalizeInfo("datauwiguyxlyk"))) + .withRelationships(Arrays.asList("datavx", "datacwxhmpej", "datalkexaonwivkcqh")), new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable().withName("gookrtalvnb"), - new MapperTable().withName("bemeluclvd"), new MapperTable().withName("kyrdnqodx"), - new MapperTable().withName("xhqf"))) + .withTargetEntities(Arrays.asList(new MapperTable().withName("knlccrmmkyup"), + new MapperTable().withName("byqjfkakfqfrkem"), + new MapperTable().withName("dudxjascowvfdjk"), + new MapperTable().withName("phlkksnm"))) .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("zoqgyipemchga") - .withParameters(mapOf("xptlghwzho", "datazuejd", "s", "dataewj", - "vodrrslblxydkxr", "dataliuhqawmoaiancz"))) - .withLinkedServiceType("vbxiwkgfbqlj").withType(ConnectionType.LINKEDSERVICETYPE) + .withLinkedService(new LinkedServiceReference().withReferenceName("fijd") + .withParameters(mapOf("uqwqulsutrjbhxyk", "dataqnwsithuqolyah", "g", + "datahyqezvqq", "ve", "datarftb"))) + .withLinkedServiceType("e") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(false) + .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), + new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), + new MapperDslConnectorProperties()))) + .withDataMapperMappings(Arrays + .asList(new DataMapperMapping().withTargetEntityName("fwhreagkhyxv") + .withSourceEntityName("tvbczsulm") + .withSourceConnectionReference(new MapperConnectionReference()) + .withAttributeMappingInfo(new MapperAttributeMappings()) + .withSourceDenormalizeInfo("datamepjpf"))) + .withRelationships(Arrays.asList("datakgsangpszng", "datafpgylkve", "datajujcngoad")), + new MapperTargetConnectionsInfo() + .withTargetEntities(Arrays.asList(new MapperTable().withName("zrgjfok"))) + .withConnection(new MapperConnection() + .withLinkedService(new LinkedServiceReference().withReferenceName("oitpkpztrgdgx") + .withParameters(mapOf("l", "dataqraswugyxpqitwei", "yltcoqcuj", + "datavskbuhzacaq", "ztjofqcvovjufyc", "datadsxzakuejkmvb", "gthortudaw", + "datajmlbemyejiriux"))) + .withLinkedServiceType("jfel") + .withType(ConnectionType.LINKEDSERVICETYPE) .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays - .asList(new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), - new MapperDslConnectorProperties()))) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping() - .withTargetEntityName("okulehurqlrqf").withSourceEntityName("weyurkphyjd") - .withSourceConnectionReference(new MapperConnectionReference()) - .withAttributeMappingInfo(new MapperAttributeMappings()) - .withSourceDenormalizeInfo("datajuqdbrx"), - new DataMapperMapping().withTargetEntityName("gchbapxkiy").withSourceEntityName("j") + .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), + new MapperDslConnectorProperties()))) + .withDataMapperMappings(Arrays.asList( + new DataMapperMapping().withTargetEntityName("qnzmnhiil") + .withSourceEntityName("lwcjgckbbcccgzpr") .withSourceConnectionReference(new MapperConnectionReference()) .withAttributeMappingInfo(new MapperAttributeMappings()) - .withSourceDenormalizeInfo("databuscgduus"))) - .withRelationships( - Arrays.asList("datacblevpmc", "dataujyxkyxlzgsj", "datakzzltafhbzf")))) - .withPolicy(new MapperPolicy().withMode("vwmbjlzqsczpg").withRecurrence( - new MapperPolicyRecurrence().withFrequency(FrequencyType.HOUR).withInterval(1263601640))) - .withAllowVNetOverride(false).withStatus("wow") - .withAdditionalProperties(mapOf("name", "ptnuwjtkschgc", "etag", "eseyqr", "type", "y")); + .withSourceDenormalizeInfo("datayuffatsgf"), + new DataMapperMapping().withTargetEntityName("p") + .withSourceEntityName("x") + .withSourceConnectionReference(new MapperConnectionReference()) + .withAttributeMappingInfo(new MapperAttributeMappings()) + .withSourceDenormalizeInfo("datahiqdxy"))) + .withRelationships(Arrays.asList("datapnuhzafccnu", "dataiig", "datayl", "dataui")))) + .withPolicy(new MapperPolicy().withMode("xvatvcr") + .withRecurrence( + new MapperPolicyRecurrence().withFrequency(FrequencyType.MINUTE).withInterval(1877089971))) + .withAllowVNetOverride(false) + .withStatus("h") + .withAdditionalProperties(mapOf("name", "yhzlwxaeaovurexd", "etag", "agttm", "type", "sbdweaderzmwnt")); model = BinaryData.fromObject(model).toObject(ChangeDataCaptureResourceInner.class); - Assertions.assertEquals("wispkxk", model.id()); - Assertions.assertEquals("ehbhb", model.folder().name()); - Assertions.assertEquals("sziryrandoyp", model.description()); - Assertions.assertEquals("ormkfqlwxldyk", model.sourceConnectionsInfo().get(0).sourceEntities().get(0).name()); - Assertions.assertEquals("g", model.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("ujvaannggi", model.sourceConnectionsInfo().get(0).connection().linkedServiceType()); + Assertions.assertEquals("yec", model.id()); + Assertions.assertEquals("ffovwmbjlzqsczp", model.folder().name()); + Assertions.assertEquals("dwnapf", model.description()); + Assertions.assertEquals("ftptnuwj", model.sourceConnectionsInfo().get(0).sourceEntities().get(0).name()); + Assertions.assertEquals("skukjtasbv", + model.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); + Assertions.assertEquals("groeshoyg", model.sourceConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.sourceConnectionsInfo().get(0).connection().type()); Assertions.assertEquals(false, model.sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("wfekaumrrqmb", model.targetConnectionsInfo().get(0).targetEntities().get(0).name()); - Assertions.assertEquals("ijymrhbguzozky", + Assertions.assertEquals("aedorvvmqf", model.targetConnectionsInfo().get(0).targetEntities().get(0).name()); + Assertions.assertEquals("jddvjsaqw", model.targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("ohguabz", model.targetConnectionsInfo().get(0).connection().linkedServiceType()); + Assertions.assertEquals("vyklxeslkhhus", model.targetConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.targetConnectionsInfo().get(0).connection().type()); Assertions.assertEquals(true, model.targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("z", + Assertions.assertEquals("wqjwgok", model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).targetEntityName()); - Assertions.assertEquals("oeocnhzqrott", + Assertions.assertEquals("ejjjkxybwfdb", model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).sourceEntityName()); - Assertions.assertEquals("vwmbjlzqsczpg", model.policy().mode()); - Assertions.assertEquals(FrequencyType.HOUR, model.policy().recurrence().frequency()); - Assertions.assertEquals(1263601640, model.policy().recurrence().interval()); + Assertions.assertEquals("xvatvcr", model.policy().mode()); + Assertions.assertEquals(FrequencyType.MINUTE, model.policy().recurrence().frequency()); + Assertions.assertEquals(1877089971, model.policy().recurrence().interval()); Assertions.assertEquals(false, model.allowVNetOverride()); - Assertions.assertEquals("wow", model.status()); + Assertions.assertEquals("h", model.status()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureTests.java index 69c62b8e905ca..602a1ec128a09 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureTests.java @@ -31,280 +31,262 @@ public final class ChangeDataCaptureTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ChangeDataCapture model = BinaryData.fromString( - "{\"folder\":{\"name\":\"fkndl\"},\"description\":\"twknvgm\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{\"name\":\"yw\",\"properties\":{\"schema\":[{},{}],\"dslConnectorProperties\":[{},{},{},{}]}},{\"name\":\"ueatgroe\",\"properties\":{\"schema\":[{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"byfqxkfaoy\",\"properties\":{\"schema\":[{},{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"jmvqmtd\",\"properties\":{\"schema\":[{}],\"dslConnectorProperties\":[{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"jn\",\"parameters\":{\"rreqynkceysfaqe\":\"datajdjusk\",\"ryshwddkvbxgk\":\"datapl\",\"vvlfntymtp\":\"datausybwptdaca\",\"zrsq\":\"dataiwenazero\"}},\"linkedServiceType\":\"sxkdnwqapfgsdpc\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{\"name\":\"h\",\"value\":\"datauipldqq\"},{\"name\":\"ekvalblhtjq\",\"value\":\"datayvwehtaemxh\"},{\"name\":\"ysev\",\"value\":\"dataxivzrrry\"},{\"name\":\"imipskdyzatvfuz\",\"value\":\"dataftjvvruxwigsye\"}]}},{\"sourceEntities\":[{\"name\":\"smjtgrqgdg\",\"properties\":{\"schema\":[{},{},{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"kcsmk\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{},{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"dorvvmqfloy\",\"parameters\":{\"gdexjd\":\"datagwumgxdgdhpa\",\"wllcolsr\":\"datavjsaqwotm\",\"ljnhvlqj\":\"dataxaptefhexcgjok\"}},\"linkedServiceType\":\"kpeeksnbksdqhj\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{\"name\":\"lkhhu\",\"value\":\"datacpoq\"}]}},{\"sourceEntities\":[{\"name\":\"wqjwgok\",\"properties\":{\"schema\":[{},{},{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"xybwfdbkjbzten\",\"properties\":{\"schema\":[{},{},{}],\"dslConnectorProperties\":[{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"jknsxfwu\",\"parameters\":{\"pkuwxeoioj\":\"datadpkupnqrmgjf\"}},\"linkedServiceType\":\"zfav\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{\"name\":\"yay\",\"value\":\"datamfzsbf\"},{\"name\":\"rzx\",\"value\":\"dataewsrsxkrplbjaze\"},{\"name\":\"w\",\"value\":\"datayoyp\"}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{\"name\":\"nnhj\",\"properties\":{\"schema\":[{}],\"dslConnectorProperties\":[{}]}},{\"name\":\"kbiwetpozyc\",\"properties\":{\"schema\":[{},{}],\"dslConnectorProperties\":[{}]}},{\"name\":\"fsetz\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{},{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"dynojpziuwfb\",\"parameters\":{\"qsyclj\":\"datadtn\",\"cbevxrhyzdfw\":\"dataelpkpbafvafhlbyl\",\"mairrh\":\"datasofpltd\"}},\"linkedServiceType\":\"fnrac\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{\"name\":\"uuj\",\"value\":\"datauhd\"}]},\"dataMapperMappings\":[{\"targetEntityName\":\"grbjbxsjybvitvqk\",\"sourceEntityName\":\"az\",\"sourceConnectionReference\":{\"connectionName\":\"tggmuwdchozfnkfe\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{}]},\"sourceDenormalizeInfo\":\"datakizvoa\"}],\"relationships\":[\"dataa\",\"datalnuwiguy\"]},{\"targetEntities\":[{\"name\":\"wphvxz\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"jtlkexaonwivkcqh\",\"properties\":{\"schema\":[{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"ccrmmk\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"yqjf\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{},{},{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"em\",\"parameters\":{\"dxphlk\":\"datadudxjascowvfdjk\",\"dkz\":\"datasnmgzvyfi\",\"uqwqulsutrjbhxyk\":\"dataqnwsithuqolyah\"}},\"linkedServiceType\":\"y\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{\"name\":\"g\",\"value\":\"dataftbcvexreuquow\"},{\"name\":\"jv\",\"value\":\"datahreagk\"},{\"name\":\"xv\",\"value\":\"datatvbczsulm\"},{\"name\":\"glmep\",\"value\":\"datafs\"}]},\"dataMapperMappings\":[{\"targetEntityName\":\"sa\",\"sourceEntityName\":\"psznga\",\"sourceConnectionReference\":{\"connectionName\":\"ylkvecjuj\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{}]},\"sourceDenormalizeInfo\":\"dataedmzrgjfoknub\"},{\"targetEntityName\":\"itpkpztrgdg\",\"sourceEntityName\":\"coqra\",\"sourceConnectionReference\":{\"connectionName\":\"gyxpqit\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{},{}]},\"sourceDenormalizeInfo\":\"dataskbuhzaca\"},{\"targetEntityName\":\"yltcoqcuj\",\"sourceEntityName\":\"sxzakuejkm\",\"sourceConnectionReference\":{\"connectionName\":\"ztjofqcvovjufyc\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{}]},\"sourceDenormalizeInfo\":\"datayeji\"}],\"relationships\":[\"dataxeg\",\"datahortu\",\"dataawlpjfelqerpp\",\"datacbgqnzmnhiil\"]},{\"targetEntities\":[{\"name\":\"cjgckbbcccgzpra\",\"properties\":{\"schema\":[{},{},{}],\"dslConnectorProperties\":[{},{},{}]}},{\"name\":\"a\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"wcxbyubhiqdxyurn\",\"properties\":{\"schema\":[{}],\"dslConnectorProperties\":[{}]}},{\"name\":\"ccnuhiig\",\"properties\":{\"schema\":[{}],\"dslConnectorProperties\":[{},{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"xvatvcr\",\"parameters\":{\"bqxvhcsyhzlwxae\":\"datab\"}},\"linkedServiceType\":\"vurex\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{\"name\":\"ead\",\"value\":\"datazmwntopagt\"},{\"name\":\"v\",\"value\":\"dataagoaqylkjztji\"}]},\"dataMapperMappings\":[{\"targetEntityName\":\"cgm\",\"sourceEntityName\":\"tpfinzcpdltkr\",\"sourceConnectionReference\":{\"connectionName\":\"mtbdrvcqgu\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{}]},\"sourceDenormalizeInfo\":\"dataheqdurelyujlfyou\"}],\"relationships\":[\"datakyeclcdigpta\",\"databrzmqxucycijoclx\",\"datautgjcyz\",\"datazjd\"]}],\"policy\":{\"mode\":\"qjbtxjeaoqaqbzgy\",\"recurrence\":{\"frequency\":\"Minute\",\"interval\":1743289619}},\"allowVNetOverride\":true,\"status\":\"wbqamteuliy\"}") + "{\"folder\":{\"name\":\"igptajbrzmqxucyc\"},\"description\":\"oclxiut\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{\"name\":\"yzjdnrqjbt\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"qbzg\",\"properties\":{\"schema\":[{}],\"dslConnectorProperties\":[{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"tbwbqamteuli\",\"parameters\":{\"uxx\":\"datapkcvmwf\",\"iciijqpkzfbojx\":\"datapmywbormcq\",\"nkwywzwofa\":\"datamcsmyqwixvcp\",\"t\":\"dataickduoiqta\"}},\"linkedServiceType\":\"sknxrwzawnvsbcf\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{\"name\":\"hycvdimwrzre\",\"value\":\"datagy\"},{\"name\":\"utrwpweryekz\",\"value\":\"datahmeott\"},{\"name\":\"jyosxwwh\",\"value\":\"datajtfvpndpmiljpn\"}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{\"name\":\"qllzsauzpjlxeehu\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{},{},{},{}]}},{\"name\":\"aymezx\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"fdsajred\",\"parameters\":{\"afpwzyifrkgwl\":\"datayshtuwgmev\",\"zdyi\":\"dataxeqipx\",\"abnsmj\":\"datasfayorpravkjoges\",\"qpkevmyltjc\":\"datawynqxaekqsykvwj\"}},\"linkedServiceType\":\"pxklurccl\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{\"name\":\"oytzpose\",\"value\":\"dataigpxvkq\"}]},\"dataMapperMappings\":[{\"targetEntityName\":\"xvpif\",\"sourceEntityName\":\"aifyzyzeyuubeids\",\"sourceConnectionReference\":{\"connectionName\":\"ytoithgygvfl\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{}]},\"sourceDenormalizeInfo\":\"dataynkrxwetw\"},{\"targetEntityName\":\"rcyrucpcunnu\",\"sourceEntityName\":\"qumoeno\",\"sourceConnectionReference\":{\"connectionName\":\"ienhqhskndnelq\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{}]},\"sourceDenormalizeInfo\":\"datawf\"}],\"relationships\":[\"datan\",\"datayopetxivc\",\"datarl\"]},{\"targetEntities\":[{\"name\":\"caephbl\",\"properties\":{\"schema\":[{}],\"dslConnectorProperties\":[{}]}},{\"name\":\"bqsdtcjbctvi\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{},{},{},{}]}},{\"name\":\"uo\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{}]}},{\"name\":\"sqhzvbrzcdbanfz\",\"properties\":{\"schema\":[{},{},{}],\"dslConnectorProperties\":[{},{},{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"atkdbmwnrd\",\"parameters\":{\"axljal\":\"databqbnaomhjrmkuh\",\"cjmobcanc\":\"datai\",\"xf\":\"dataexxqcwg\"}},\"linkedServiceType\":\"aknokzwjjzrl\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{\"name\":\"yfytpq\",\"value\":\"datax\"},{\"name\":\"m\",\"value\":\"datajivyqlkjuv\"}]},\"dataMapperMappings\":[{\"targetEntityName\":\"slzoyov\",\"sourceEntityName\":\"dbpqvybefgvm\",\"sourceConnectionReference\":{\"connectionName\":\"kcvtl\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{}]},\"sourceDenormalizeInfo\":\"datacua\"},{\"targetEntityName\":\"rhunlp\",\"sourceEntityName\":\"ykycndzfqi\",\"sourceConnectionReference\":{\"connectionName\":\"euy\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{},{}]},\"sourceDenormalizeInfo\":\"datagltbxoeeo\"},{\"targetEntityName\":\"lnf\",\"sourceEntityName\":\"y\",\"sourceConnectionReference\":{\"connectionName\":\"qdbpbhfckdvezc\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{}]},\"sourceDenormalizeInfo\":\"dataddubbnqfblhkal\"}],\"relationships\":[\"dataavawugiqj\",\"dataiogqgdminictte\",\"datajohiyg\",\"datapnbonhpcz\"]},{\"targetEntities\":[{\"name\":\"tp\",\"properties\":{\"schema\":[{}],\"dslConnectorProperties\":[{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"chkhufm\",\"parameters\":{\"zulo\":\"datamqyjgy\"}},\"linkedServiceType\":\"aeuzanh\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{\"name\":\"wphpzfngqj\",\"value\":\"dataidftujwjjufwbe\"},{\"name\":\"k\",\"value\":\"datarhtssr\"},{\"name\":\"nmdvha\",\"value\":\"datavjytiqswbq\"}]},\"dataMapperMappings\":[{\"targetEntityName\":\"xiytxtdgukvl\",\"sourceEntityName\":\"ktg\",\"sourceConnectionReference\":{\"connectionName\":\"youambewr\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{}]},\"sourceDenormalizeInfo\":\"datagmmuteyxey\"},{\"targetEntityName\":\"qigijiitnspxlzde\",\"sourceEntityName\":\"grijwaiufanrayb\",\"sourceConnectionReference\":{\"connectionName\":\"qfrojsydgrhyd\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{},{}]},\"sourceDenormalizeInfo\":\"dataskieca\"}],\"relationships\":[\"datazmxieqvdsmaklix\",\"datacahyhx\"]}],\"policy\":{\"mode\":\"ybxawoijpodtblx\",\"recurrence\":{\"frequency\":\"Minute\",\"interval\":1810381006}},\"allowVNetOverride\":false,\"status\":\"qhykincn\"}") .toObject(ChangeDataCapture.class); - Assertions.assertEquals("fkndl", model.folder().name()); - Assertions.assertEquals("twknvgm", model.description()); - Assertions.assertEquals("yw", model.sourceConnectionsInfo().get(0).sourceEntities().get(0).name()); - Assertions.assertEquals("jn", + Assertions.assertEquals("igptajbrzmqxucyc", model.folder().name()); + Assertions.assertEquals("oclxiut", model.description()); + Assertions.assertEquals("yzjdnrqjbt", model.sourceConnectionsInfo().get(0).sourceEntities().get(0).name()); + Assertions.assertEquals("tbwbqamteuli", model.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("sxkdnwqapfgsdpc", + Assertions.assertEquals("sknxrwzawnvsbcf", model.sourceConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.sourceConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(true, model.sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("h", + Assertions.assertEquals(false, model.sourceConnectionsInfo().get(0).connection().isInlineDataset()); + Assertions.assertEquals("hycvdimwrzre", model.sourceConnectionsInfo().get(0).connection().commonDslConnectorProperties().get(0).name()); - Assertions.assertEquals("nnhj", model.targetConnectionsInfo().get(0).targetEntities().get(0).name()); - Assertions.assertEquals("dynojpziuwfb", + Assertions.assertEquals("qllzsauzpjlxeehu", + model.targetConnectionsInfo().get(0).targetEntities().get(0).name()); + Assertions.assertEquals("fdsajred", model.targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("fnrac", model.targetConnectionsInfo().get(0).connection().linkedServiceType()); + Assertions.assertEquals("pxklurccl", model.targetConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.targetConnectionsInfo().get(0).connection().type()); Assertions.assertEquals(true, model.targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("uuj", + Assertions.assertEquals("oytzpose", model.targetConnectionsInfo().get(0).connection().commonDslConnectorProperties().get(0).name()); - Assertions.assertEquals("grbjbxsjybvitvqk", + Assertions.assertEquals("xvpif", model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).targetEntityName()); - Assertions.assertEquals("az", + Assertions.assertEquals("aifyzyzeyuubeids", model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).sourceEntityName()); - Assertions.assertEquals("tggmuwdchozfnkfe", model.targetConnectionsInfo().get(0).dataMapperMappings().get(0) - .sourceConnectionReference().connectionName()); + Assertions.assertEquals("ytoithgygvfl", + model.targetConnectionsInfo() + .get(0) + .dataMapperMappings() + .get(0) + .sourceConnectionReference() + .connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).sourceConnectionReference().type()); - Assertions.assertEquals("qjbtxjeaoqaqbzgy", model.policy().mode()); + Assertions.assertEquals("ybxawoijpodtblx", model.policy().mode()); Assertions.assertEquals(FrequencyType.MINUTE, model.policy().recurrence().frequency()); - Assertions.assertEquals(1743289619, model.policy().recurrence().interval()); - Assertions.assertEquals(true, model.allowVNetOverride()); - Assertions.assertEquals("wbqamteuliy", model.status()); + Assertions.assertEquals(1810381006, model.policy().recurrence().interval()); + Assertions.assertEquals(false, model.allowVNetOverride()); + Assertions.assertEquals("qhykincn", model.status()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ChangeDataCapture model - = new ChangeDataCapture().withFolder(new ChangeDataCaptureFolder().withName("fkndl")) - .withDescription("twknvgm") - .withSourceConnectionsInfo(Arrays.asList( - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList( - new MapperTable().withName("yw") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays - .asList(new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties())), - new MapperTable().withName("ueatgroe").withSchema(Arrays.asList(new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())), - new MapperTable().withName("byfqxkfaoy") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())), - new MapperTable().withName("jmvqmtd").withSchema(Arrays.asList(new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("jn") - .withParameters(mapOf("rreqynkceysfaqe", "datajdjusk", "ryshwddkvbxgk", "datapl", - "vvlfntymtp", "datausybwptdaca", "zrsq", "dataiwenazero"))) - .withLinkedServiceType("sxkdnwqapfgsdpc").withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true).withCommonDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("h").withValue("datauipldqq"), - new MapperDslConnectorProperties().withName("ekvalblhtjq") - .withValue("datayvwehtaemxh"), - new MapperDslConnectorProperties().withName("ysev").withValue("dataxivzrrry"), - new MapperDslConnectorProperties().withName("imipskdyzatvfuz") - .withValue("dataftjvvruxwigsye")))), - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList( - new MapperTable().withName("smjtgrqgdg") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())), - new MapperTable().withName("kcsmk") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties())))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("dorvvmqfloy") - .withParameters(mapOf("gdexjd", "datagwumgxdgdhpa", "wllcolsr", "datavjsaqwotm", - "ljnhvlqj", "dataxaptefhexcgjok"))) - .withLinkedServiceType("kpeeksnbksdqhj").withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(false) - .withCommonDslConnectorProperties(Arrays - .asList(new MapperDslConnectorProperties().withName("lkhhu").withValue("datacpoq")))), - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList( - new MapperTable().withName("wqjwgok") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())), - new MapperTable().withName("xybwfdbkjbzten") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("jknsxfwu") - .withParameters(mapOf("pkuwxeoioj", "datadpkupnqrmgjf"))) - .withLinkedServiceType("zfav").withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays.asList( - new MapperDslConnectorProperties().withName("yay").withValue("datamfzsbf"), - new MapperDslConnectorProperties().withName("rzx").withValue("dataewsrsxkrplbjaze"), - new MapperDslConnectorProperties().withName("w").withValue("datayoyp")))))) + = new ChangeDataCapture().withFolder(new ChangeDataCaptureFolder().withName("igptajbrzmqxucyc")) + .withDescription("oclxiut") + .withSourceConnectionsInfo(Arrays.asList(new MapperSourceConnectionsInfo() + .withSourceEntities(Arrays.asList( + new MapperTable().withName("yzjdnrqjbt") + .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), + new MapperTableSchema(), new MapperTableSchema())) + .withDslConnectorProperties( + Arrays.asList(new MapperDslConnectorProperties(), new MapperDslConnectorProperties())), + new MapperTable().withName("qbzg") + .withSchema(Arrays.asList(new MapperTableSchema())) + .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties())))) + .withConnection(new MapperConnection() + .withLinkedService(new LinkedServiceReference().withReferenceName("tbwbqamteuli") + .withParameters(mapOf("uxx", "datapkcvmwf", "iciijqpkzfbojx", "datapmywbormcq", + "nkwywzwofa", "datamcsmyqwixvcp", "t", "dataickduoiqta"))) + .withLinkedServiceType("sknxrwzawnvsbcf") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(false) + .withCommonDslConnectorProperties(Arrays.asList( + new MapperDslConnectorProperties().withName("hycvdimwrzre").withValue("datagy"), + new MapperDslConnectorProperties().withName("utrwpweryekz").withValue("datahmeott"), + new MapperDslConnectorProperties().withName("jyosxwwh").withValue("datajtfvpndpmiljpn")))))) .withTargetConnectionsInfo(Arrays.asList( new MapperTargetConnectionsInfo() .withTargetEntities(Arrays.asList( - new MapperTable().withName("nnhj").withSchema(Arrays.asList(new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties())), - new MapperTable().withName("kbiwetpozyc") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties())), - new MapperTable().withName("fsetz") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties())))) - .withConnection( - new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("dynojpziuwfb") - .withParameters(mapOf("qsyclj", "datadtn", "cbevxrhyzdfw", "dataelpkpbafvafhlbyl", - "mairrh", "datasofpltd"))) - .withLinkedServiceType("fnrac").withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays - .asList(new MapperDslConnectorProperties().withName("uuj").withValue("datauhd")))) - .withDataMapperMappings(Arrays.asList( - new DataMapperMapping().withTargetEntityName("grbjbxsjybvitvqk").withSourceEntityName("az") - .withSourceConnectionReference(new MapperConnectionReference() - .withConnectionName("tggmuwdchozfnkfe").withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo(new MapperAttributeMappings().withAttributeMappings( - Arrays.asList(new MapperAttributeMapping(), new MapperAttributeMapping(), - new MapperAttributeMapping()))) - .withSourceDenormalizeInfo("datakizvoa"))) - .withRelationships(Arrays.asList("dataa", "datalnuwiguy")), - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList( - new MapperTable().withName("wphvxz") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())), - new MapperTable().withName("jtlkexaonwivkcqh") - .withSchema(Arrays.asList(new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())), - new MapperTable().withName("ccrmmk") + new MapperTable().withName("qllzsauzpjlxeehu") .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), new MapperTableSchema(), new MapperTableSchema())) .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), + new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), new MapperDslConnectorProperties())), - new MapperTable().withName("yqjf") + new MapperTable().withName("aymezx") .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), new MapperTableSchema(), new MapperTableSchema())) .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), new MapperDslConnectorProperties())))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("em") - .withParameters(mapOf("dxphlk", "datadudxjascowvfdjk", "dkz", "datasnmgzvyfi", - "uqwqulsutrjbhxyk", "dataqnwsithuqolyah"))) - .withLinkedServiceType("y").withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays.asList( - new MapperDslConnectorProperties().withName("g").withValue("dataftbcvexreuquow"), - new MapperDslConnectorProperties().withName("jv").withValue("datahreagk"), - new MapperDslConnectorProperties().withName("xv").withValue("datatvbczsulm"), - new MapperDslConnectorProperties().withName("glmep").withValue("datafs")))) - .withDataMapperMappings(Arrays.asList( - new DataMapperMapping().withTargetEntityName("sa").withSourceEntityName("psznga") - .withSourceConnectionReference(new MapperConnectionReference() - .withConnectionName("ylkvecjuj").withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo(new MapperAttributeMappings() - .withAttributeMappings(Arrays.asList(new MapperAttributeMapping(), - new MapperAttributeMapping(), new MapperAttributeMapping()))) - .withSourceDenormalizeInfo("dataedmzrgjfoknub"), - new DataMapperMapping().withTargetEntityName("itpkpztrgdg").withSourceEntityName("coqra") - .withSourceConnectionReference(new MapperConnectionReference() - .withConnectionName("gyxpqit").withType(ConnectionType.LINKEDSERVICETYPE)) + .withConnection( + new MapperConnection() + .withLinkedService(new LinkedServiceReference().withReferenceName("fdsajred") + .withParameters(mapOf("afpwzyifrkgwl", "datayshtuwgmev", "zdyi", "dataxeqipx", + "abnsmj", "datasfayorpravkjoges", "qpkevmyltjc", "datawynqxaekqsykvwj"))) + .withLinkedServiceType("pxklurccl") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(true) + .withCommonDslConnectorProperties(Arrays.asList( + new MapperDslConnectorProperties().withName("oytzpose").withValue("dataigpxvkq")))) + .withDataMapperMappings( + Arrays.asList( + new DataMapperMapping().withTargetEntityName("xvpif") + .withSourceEntityName("aifyzyzeyuubeids") + .withSourceConnectionReference( + new MapperConnectionReference().withConnectionName("ytoithgygvfl") + .withType(ConnectionType.LINKEDSERVICETYPE)) + .withAttributeMappingInfo(new MapperAttributeMappings().withAttributeMappings( + Arrays.asList(new MapperAttributeMapping(), new MapperAttributeMapping()))) + .withSourceDenormalizeInfo("dataynkrxwetw"), + new DataMapperMapping().withTargetEntityName("rcyrucpcunnu") + .withSourceEntityName("qumoeno") + .withSourceConnectionReference( + new MapperConnectionReference().withConnectionName("ienhqhskndnelq") + .withType(ConnectionType.LINKEDSERVICETYPE)) + .withAttributeMappingInfo(new MapperAttributeMappings() + .withAttributeMappings(Arrays.asList(new MapperAttributeMapping(), + new MapperAttributeMapping(), new MapperAttributeMapping()))) + .withSourceDenormalizeInfo("datawf"))) + .withRelationships(Arrays.asList("datan", "datayopetxivc", "datarl")), + new MapperTargetConnectionsInfo() + .withTargetEntities( + Arrays.asList( + new MapperTable().withName("caephbl") + .withSchema(Arrays.asList(new MapperTableSchema())) + .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties())), + new MapperTable().withName("bqsdtcjbctvi") + .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), + new MapperTableSchema(), new MapperTableSchema())) + .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), + new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), + new MapperDslConnectorProperties())), + new MapperTable().withName("uo") + .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), + new MapperTableSchema(), new MapperTableSchema())) + .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties())), + new MapperTable().withName("sqhzvbrzcdbanfz") + .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), + new MapperTableSchema())) + .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), + new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), + new MapperDslConnectorProperties())))) + .withConnection( + new MapperConnection() + .withLinkedService(new LinkedServiceReference().withReferenceName("atkdbmwnrd") + .withParameters(mapOf("axljal", "databqbnaomhjrmkuh", "cjmobcanc", "datai", "xf", + "dataexxqcwg"))) + .withLinkedServiceType("aknokzwjjzrl") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(false) + .withCommonDslConnectorProperties(Arrays.asList( + new MapperDslConnectorProperties().withName("yfytpq").withValue("datax"), + new MapperDslConnectorProperties().withName("m").withValue("datajivyqlkjuv")))) + .withDataMapperMappings(Arrays.asList(new DataMapperMapping().withTargetEntityName("slzoyov") + .withSourceEntityName("dbpqvybefgvm") + .withSourceConnectionReference(new MapperConnectionReference().withConnectionName("kcvtl") + .withType(ConnectionType.LINKEDSERVICETYPE)) + .withAttributeMappingInfo(new MapperAttributeMappings() + .withAttributeMappings(Arrays.asList(new MapperAttributeMapping(), + new MapperAttributeMapping(), new MapperAttributeMapping()))) + .withSourceDenormalizeInfo("datacua"), + new DataMapperMapping().withTargetEntityName("rhunlp") + .withSourceEntityName("ykycndzfqi") + .withSourceConnectionReference(new MapperConnectionReference().withConnectionName("euy") + .withType(ConnectionType.LINKEDSERVICETYPE)) .withAttributeMappingInfo(new MapperAttributeMappings().withAttributeMappings( Arrays.asList(new MapperAttributeMapping(), new MapperAttributeMapping(), new MapperAttributeMapping(), new MapperAttributeMapping()))) - .withSourceDenormalizeInfo("dataskbuhzaca"), - new DataMapperMapping().withTargetEntityName("yltcoqcuj").withSourceEntityName("sxzakuejkm") - .withSourceConnectionReference(new MapperConnectionReference() - .withConnectionName("ztjofqcvovjufyc").withType(ConnectionType.LINKEDSERVICETYPE)) + .withSourceDenormalizeInfo("datagltbxoeeo"), + new DataMapperMapping().withTargetEntityName("lnf") + .withSourceEntityName("y") + .withSourceConnectionReference( + new MapperConnectionReference().withConnectionName("qdbpbhfckdvezc") + .withType(ConnectionType.LINKEDSERVICETYPE)) .withAttributeMappingInfo(new MapperAttributeMappings() - .withAttributeMappings(Arrays.asList(new MapperAttributeMapping()))) - .withSourceDenormalizeInfo("datayeji"))) + .withAttributeMappings(Arrays.asList(new MapperAttributeMapping(), + new MapperAttributeMapping(), new MapperAttributeMapping()))) + .withSourceDenormalizeInfo("dataddubbnqfblhkal"))) .withRelationships( - Arrays.asList("dataxeg", "datahortu", "dataawlpjfelqerpp", "datacbgqnzmnhiil")), + Arrays.asList("dataavawugiqj", "dataiogqgdminictte", "datajohiyg", "datapnbonhpcz")), new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList( - new MapperTable().withName("cjgckbbcccgzpra") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties())), - new MapperTable().withName("a") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())), - new MapperTable().withName("wcxbyubhiqdxyurn") + .withTargetEntities( + Arrays.asList(new MapperTable().withName("tp") .withSchema(Arrays.asList(new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties())), - new MapperTable().withName("ccnuhiig").withSchema(Arrays.asList(new MapperTableSchema())) .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties())))) + new MapperDslConnectorProperties())))) .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("xvatvcr") - .withParameters(mapOf("bqxvhcsyhzlwxae", "datab"))) - .withLinkedServiceType("vurex").withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) + .withLinkedService(new LinkedServiceReference().withReferenceName("chkhufm") + .withParameters(mapOf("zulo", "datamqyjgy"))) + .withLinkedServiceType("aeuzanh") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(false) .withCommonDslConnectorProperties(Arrays.asList( - new MapperDslConnectorProperties().withName("ead").withValue("datazmwntopagt"), - new MapperDslConnectorProperties().withName("v").withValue("dataagoaqylkjztji")))) + new MapperDslConnectorProperties().withName("wphpzfngqj") + .withValue("dataidftujwjjufwbe"), + new MapperDslConnectorProperties().withName("k").withValue("datarhtssr"), + new MapperDslConnectorProperties().withName("nmdvha").withValue("datavjytiqswbq")))) .withDataMapperMappings(Arrays.asList( - new DataMapperMapping().withTargetEntityName("cgm").withSourceEntityName("tpfinzcpdltkr") - .withSourceConnectionReference(new MapperConnectionReference() - .withConnectionName("mtbdrvcqgu").withType(ConnectionType.LINKEDSERVICETYPE)) + new DataMapperMapping().withTargetEntityName("xiytxtdgukvl") + .withSourceEntityName("ktg") + .withSourceConnectionReference( + new MapperConnectionReference().withConnectionName("youambewr") + .withType(ConnectionType.LINKEDSERVICETYPE)) + .withAttributeMappingInfo(new MapperAttributeMappings() + .withAttributeMappings(Arrays.asList(new MapperAttributeMapping()))) + .withSourceDenormalizeInfo("datagmmuteyxey"), + new DataMapperMapping().withTargetEntityName("qigijiitnspxlzde") + .withSourceEntityName("grijwaiufanrayb") + .withSourceConnectionReference( + new MapperConnectionReference().withConnectionName("qfrojsydgrhyd") + .withType(ConnectionType.LINKEDSERVICETYPE)) .withAttributeMappingInfo(new MapperAttributeMappings().withAttributeMappings( - Arrays.asList(new MapperAttributeMapping(), new MapperAttributeMapping()))) - .withSourceDenormalizeInfo("dataheqdurelyujlfyou"))) - .withRelationships( - Arrays.asList("datakyeclcdigpta", "databrzmqxucycijoclx", "datautgjcyz", "datazjd")))) - .withPolicy(new MapperPolicy().withMode("qjbtxjeaoqaqbzgy").withRecurrence( - new MapperPolicyRecurrence().withFrequency(FrequencyType.MINUTE).withInterval(1743289619))) - .withAllowVNetOverride(true).withStatus("wbqamteuliy"); + Arrays.asList(new MapperAttributeMapping(), new MapperAttributeMapping(), + new MapperAttributeMapping(), new MapperAttributeMapping()))) + .withSourceDenormalizeInfo("dataskieca"))) + .withRelationships(Arrays.asList("datazmxieqvdsmaklix", "datacahyhx")))) + .withPolicy(new MapperPolicy().withMode("ybxawoijpodtblx") + .withRecurrence( + new MapperPolicyRecurrence().withFrequency(FrequencyType.MINUTE).withInterval(1810381006))) + .withAllowVNetOverride(false) + .withStatus("qhykincn"); model = BinaryData.fromObject(model).toObject(ChangeDataCapture.class); - Assertions.assertEquals("fkndl", model.folder().name()); - Assertions.assertEquals("twknvgm", model.description()); - Assertions.assertEquals("yw", model.sourceConnectionsInfo().get(0).sourceEntities().get(0).name()); - Assertions.assertEquals("jn", + Assertions.assertEquals("igptajbrzmqxucyc", model.folder().name()); + Assertions.assertEquals("oclxiut", model.description()); + Assertions.assertEquals("yzjdnrqjbt", model.sourceConnectionsInfo().get(0).sourceEntities().get(0).name()); + Assertions.assertEquals("tbwbqamteuli", model.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("sxkdnwqapfgsdpc", + Assertions.assertEquals("sknxrwzawnvsbcf", model.sourceConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.sourceConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(true, model.sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("h", + Assertions.assertEquals(false, model.sourceConnectionsInfo().get(0).connection().isInlineDataset()); + Assertions.assertEquals("hycvdimwrzre", model.sourceConnectionsInfo().get(0).connection().commonDslConnectorProperties().get(0).name()); - Assertions.assertEquals("nnhj", model.targetConnectionsInfo().get(0).targetEntities().get(0).name()); - Assertions.assertEquals("dynojpziuwfb", + Assertions.assertEquals("qllzsauzpjlxeehu", + model.targetConnectionsInfo().get(0).targetEntities().get(0).name()); + Assertions.assertEquals("fdsajred", model.targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("fnrac", model.targetConnectionsInfo().get(0).connection().linkedServiceType()); + Assertions.assertEquals("pxklurccl", model.targetConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.targetConnectionsInfo().get(0).connection().type()); Assertions.assertEquals(true, model.targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("uuj", + Assertions.assertEquals("oytzpose", model.targetConnectionsInfo().get(0).connection().commonDslConnectorProperties().get(0).name()); - Assertions.assertEquals("grbjbxsjybvitvqk", + Assertions.assertEquals("xvpif", model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).targetEntityName()); - Assertions.assertEquals("az", + Assertions.assertEquals("aifyzyzeyuubeids", model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).sourceEntityName()); - Assertions.assertEquals("tggmuwdchozfnkfe", model.targetConnectionsInfo().get(0).dataMapperMappings().get(0) - .sourceConnectionReference().connectionName()); + Assertions.assertEquals("ytoithgygvfl", + model.targetConnectionsInfo() + .get(0) + .dataMapperMappings() + .get(0) + .sourceConnectionReference() + .connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).sourceConnectionReference().type()); - Assertions.assertEquals("qjbtxjeaoqaqbzgy", model.policy().mode()); + Assertions.assertEquals("ybxawoijpodtblx", model.policy().mode()); Assertions.assertEquals(FrequencyType.MINUTE, model.policy().recurrence().frequency()); - Assertions.assertEquals(1743289619, model.policy().recurrence().interval()); - Assertions.assertEquals(true, model.allowVNetOverride()); - Assertions.assertEquals("wbqamteuliy", model.status()); + Assertions.assertEquals(1810381006, model.policy().recurrence().interval()); + Assertions.assertEquals(false, model.allowVNetOverride()); + Assertions.assertEquals("qhykincn", model.status()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesCreateOrUpdateWithResponseMockTests.java index cb44ea2e81510..b68c682c73ad0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesCreateOrUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesCreateOrUpdateWithResponseMockTests.java @@ -6,11 +6,9 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureFolder; import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureResource; @@ -25,107 +23,123 @@ import com.azure.resourcemanager.datafactory.models.MapperSourceConnectionsInfo; import com.azure.resourcemanager.datafactory.models.MapperTable; import com.azure.resourcemanager.datafactory.models.MapperTargetConnectionsInfo; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.Arrays; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ChangeDataCapturesCreateOrUpdateWithResponseMockTests { @Test public void testCreateOrUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"folder\":{\"name\":\"dizjd\"},\"description\":\"nehdmywnl\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{}],\"connection\":{\"linkedService\":{\"referenceName\":\"ovlufkygkykl\"},\"linkedServiceType\":\"kdblpeutahm\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{}]}},{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"bhiwiktglaux\"},\"linkedServiceType\":\"l\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{}]}},{\"sourceEntities\":[{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"hqgkfcspo\"},\"linkedServiceType\":\"gnbkixit\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{},{}]}},{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"woxtevonmq\"},\"linkedServiceType\":\"ib\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"phgaa\"},\"linkedServiceType\":\"hzg\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{}]},\"dataMapperMappings\":[{},{},{}],\"relationships\":[\"datajhfifdvxbd\",\"datallvmqyplp\",\"datayzaoqf\",\"dataashugvezulyd\"]},{\"targetEntities\":[{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"gkljukrn\"},\"linkedServiceType\":\"wccq\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{}]},\"dataMapperMappings\":[{},{},{}],\"relationships\":[\"datamez\"]}],\"policy\":{\"mode\":\"if\",\"recurrence\":{\"frequency\":\"Hour\",\"interval\":820437174}},\"allowVNetOverride\":false,\"status\":\"au\"},\"name\":\"rrznuljodvept\",\"type\":\"sncpdtgzkxttiv\",\"etag\":\"tmtwrentmj\",\"\":{\"hlgxiwtehsab\":\"dataaaetiwgbdhv\",\"dpvsauvi\":\"databfbikruvray\"},\"id\":\"svwrcbmxmbutxtt\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"folder\":{\"name\":\"szisd\"},\"description\":\"pimzwzxtshv\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"lkofxwbtlfxhhg\"},\"linkedServiceType\":\"cehzuozvu\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{},{}]}},{\"sourceEntities\":[{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"nqzzadydnoetbku\"},\"linkedServiceType\":\"utwsabryo\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{}]}},{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"etam\"},\"linkedServiceType\":\"dcwdpqzminxipozz\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{}]}},{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"mzura\"},\"linkedServiceType\":\"ekpetxywerylj\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"jknxxuw\"},\"linkedServiceType\":\"fjldshvaentatu\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{},{}]},\"dataMapperMappings\":[{},{}],\"relationships\":[\"datauponspv\",\"datajjm\"]},{\"targetEntities\":[{}],\"connection\":{\"linkedService\":{\"referenceName\":\"cgelhy\"},\"linkedServiceType\":\"cwriibvtqq\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{}]},\"dataMapperMappings\":[{},{},{},{}],\"relationships\":[\"dataunwnuzktmlkofm\",\"dataviq\"]},{\"targetEntities\":[{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"dztpcrgpyz\"},\"linkedServiceType\":\"rscbproevodo\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{}]},\"dataMapperMappings\":[{}],\"relationships\":[\"datadfuptppdzad\",\"datazx\",\"datamyulgqf\"]},{\"targetEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"itzwyjklynxxgc\"},\"linkedServiceType\":\"okkiqccshf\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{}]},\"dataMapperMappings\":[{},{}],\"relationships\":[\"dataqoaeshnzidczx\",\"dataxqgy\"]}],\"policy\":{\"mode\":\"bufhn\",\"recurrence\":{\"frequency\":\"Minute\",\"interval\":2006538711}},\"allowVNetOverride\":true,\"status\":\"eezsxyl\"},\"name\":\"fqy\",\"type\":\"bpjtypeinqptfh\",\"etag\":\"flrhssmfsbg\",\"\":{\"rs\":\"datawptay\",\"glsfzhvzyhqqhioi\":\"datarrkmucrg\",\"dwciewoixkfxguis\":\"dataeaimvezltzpyj\",\"de\":\"dataojqgxeywaee\"},\"id\":\"bzrf\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); ChangeDataCaptureResource response - = manager.changeDataCaptures().define("wlheukb").withExistingFactory("jjxqofpdutnkykbm", "kugzchsxsnipnl") + = manager.changeDataCaptures() + .define("wvphp") + .withExistingFactory("u", "pxekl") .withSourceConnectionsInfo( Arrays.asList( - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable())) + new MapperSourceConnectionsInfo().withSourceEntities(Arrays.asList(new MapperTable())) .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("d")) - .withLinkedServiceType("vesepclpzwogawz").withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), - new MapperDslConnectorProperties()))), + .withLinkedService(new LinkedServiceReference().withReferenceName("pazlbn")) + .withLinkedServiceType("ksvramim") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(false) + .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties()))), new MapperSourceConnectionsInfo() .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable(), new MapperTable())) .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("cokrugrpj")) - .withLinkedServiceType("uxv").withType(ConnectionType.LINKEDSERVICETYPE) + .withLinkedService(new LinkedServiceReference().withReferenceName("atimalsh")) + .withLinkedServiceType("dna") + .withType(ConnectionType.LINKEDSERVICETYPE) .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties()))), + .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), + new MapperDslConnectorProperties()))), + new MapperSourceConnectionsInfo() + .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable())) + .withConnection( + new MapperConnection() + .withLinkedService(new LinkedServiceReference().withReferenceName("wocugfllcai")) + .withLinkedServiceType("ktqfp") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(false) + .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), + new MapperDslConnectorProperties()))), new MapperSourceConnectionsInfo() .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable(), new MapperTable())) .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("l")) - .withLinkedServiceType("c").withType(ConnectionType.LINKEDSERVICETYPE) + .withLinkedService(new LinkedServiceReference().withReferenceName("thw")) + .withLinkedServiceType("nglsrlwcfmhz") + .withType(ConnectionType.LINKEDSERVICETYPE) .withIsInlineDataset(false) .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties()))))) - .withTargetConnectionsInfo(Arrays.asList(new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("vscob")) - .withLinkedServiceType("ekrownnxqx").withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties()))) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping())) - .withRelationships(Arrays.asList("datainqi", "dataaklsvvggxl", "datadkiwkmbvuedio")))) - .withPolicy(new MapperPolicy().withMode("bcwvutsgmkkdydsj").withRecurrence( - new MapperPolicyRecurrence().withFrequency(FrequencyType.MINUTE).withInterval(2132778650))) - .withFolder(new ChangeDataCaptureFolder().withName("dnz")).withDescription("zathxzfqjwvcxcx") - .withAllowVNetOverride(true).withStatus("ew").withIfMatch("txjrhatpebewli").create(); + new MapperDslConnectorProperties()))))) + .withTargetConnectionsInfo(Arrays.asList( + new MapperTargetConnectionsInfo() + .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) + .withConnection(new MapperConnection() + .withLinkedService(new LinkedServiceReference().withReferenceName("rclc")) + .withLinkedServiceType("kejncogooqwavrzi") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(true) + .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), + new MapperDslConnectorProperties(), new MapperDslConnectorProperties()))) + .withDataMapperMappings( + Arrays.asList(new DataMapperMapping(), new DataMapperMapping(), new DataMapperMapping())) + .withRelationships( + Arrays.asList("dataplgtqumuytavachs", "datakejuahevtfvvnin", "datapbkg", "datagwmb")), + new MapperTargetConnectionsInfo().withTargetEntities(Arrays.asList(new MapperTable())) + .withConnection(new MapperConnection() + .withLinkedService(new LinkedServiceReference().withReferenceName("mseswflyktrbgt")) + .withLinkedServiceType("imldceuhhludjwf") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(true) + .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties()))) + .withDataMapperMappings( + Arrays.asList(new DataMapperMapping(), new DataMapperMapping(), new DataMapperMapping())) + .withRelationships(Arrays.asList("datauryim", "datanhugttypgdlzuz")))) + .withPolicy(new MapperPolicy().withMode("kcqparvylu") + .withRecurrence( + new MapperPolicyRecurrence().withFrequency(FrequencyType.HOUR).withInterval(2032913621))) + .withFolder(new ChangeDataCaptureFolder().withName("ryvqwqanhfekk")) + .withDescription("erbfuxbdtgjrssrb") + .withAllowVNetOverride(true) + .withStatus("qsx") + .withIfMatch("wyzclashtgzlwqer") + .create(); - Assertions.assertEquals("svwrcbmxmbutxtt", response.id()); - Assertions.assertEquals("dizjd", response.folder().name()); - Assertions.assertEquals("nehdmywnl", response.description()); - Assertions.assertEquals("ovlufkygkykl", + Assertions.assertEquals("bzrf", response.id()); + Assertions.assertEquals("szisd", response.folder().name()); + Assertions.assertEquals("pimzwzxtshv", response.description()); + Assertions.assertEquals("lkofxwbtlfxhhg", response.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("kdblpeutahm", - response.sourceConnectionsInfo().get(0).connection().linkedServiceType()); + Assertions.assertEquals("cehzuozvu", response.sourceConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, response.sourceConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(true, response.sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("phgaa", + Assertions.assertEquals(false, response.sourceConnectionsInfo().get(0).connection().isInlineDataset()); + Assertions.assertEquals("jknxxuw", response.targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("hzg", response.targetConnectionsInfo().get(0).connection().linkedServiceType()); + Assertions.assertEquals("fjldshvaentatu", + response.targetConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, response.targetConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(true, response.targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("if", response.policy().mode()); - Assertions.assertEquals(FrequencyType.HOUR, response.policy().recurrence().frequency()); - Assertions.assertEquals(820437174, response.policy().recurrence().interval()); - Assertions.assertEquals(false, response.allowVNetOverride()); - Assertions.assertEquals("au", response.status()); + Assertions.assertEquals(false, response.targetConnectionsInfo().get(0).connection().isInlineDataset()); + Assertions.assertEquals("bufhn", response.policy().mode()); + Assertions.assertEquals(FrequencyType.MINUTE, response.policy().recurrence().frequency()); + Assertions.assertEquals(2006538711, response.policy().recurrence().interval()); + Assertions.assertEquals(true, response.allowVNetOverride()); + Assertions.assertEquals("eezsxyl", response.status()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesDeleteWithResponseMockTests.java index f87476f006def..45e516187450b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesDeleteWithResponseMockTests.java @@ -6,47 +6,29 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ChangeDataCapturesDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.changeDataCaptures().deleteWithResponse("syizabjix", "ftl", "jesxbovnjswyj", - com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .deleteWithResponse("plhzmezcsm", "kfgjqgbhulsqlets", "vmbpmdxbiueus", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesGetWithResponseMockTests.java index f78f568a9e27f..967143a54aab6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesGetWithResponseMockTests.java @@ -6,73 +6,56 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureResource; import com.azure.resourcemanager.datafactory.models.ConnectionType; import com.azure.resourcemanager.datafactory.models.FrequencyType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ChangeDataCapturesGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"folder\":{\"name\":\"himegpgmug\"},\"description\":\"ds\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"owa\"},\"linkedServiceType\":\"gqmzxyitfbfp\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{},{}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"ktauqav\"},\"linkedServiceType\":\"lyntvou\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{}]},\"dataMapperMappings\":[{}],\"relationships\":[\"dataiquodafliefhfj\"]}],\"policy\":{\"mode\":\"msqmbdkvsmwbml\",\"recurrence\":{\"frequency\":\"Second\",\"interval\":1820521308}},\"allowVNetOverride\":false,\"status\":\"ltfxpzw\"},\"name\":\"tnkkjzjihndqz\",\"type\":\"dhuqlevty\",\"etag\":\"dehdveflkjqxsad\",\"\":{\"lnekyz\":\"datannoulmeqfyuorq\"},\"id\":\"jyzd\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ChangeDataCaptureResource response = manager.changeDataCaptures().getWithResponse("hlqpnddhzmckd", - "wmsmhnzmvruqxk", "lo", "pguckpwewepqdco", com.azure.core.util.Context.NONE).getValue(); - - Assertions.assertEquals("jyzd", response.id()); - Assertions.assertEquals("himegpgmug", response.folder().name()); - Assertions.assertEquals("ds", response.description()); - Assertions.assertEquals("owa", + = "{\"properties\":{\"folder\":{\"name\":\"kgrdmlf\"},\"description\":\"jveysui\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"pmyrkpi\"},\"linkedServiceType\":\"wcninypuoze\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{},{}]}},{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"k\"},\"linkedServiceType\":\"iytnivtorqkvr\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{},{}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"byakicpdwi\"},\"linkedServiceType\":\"nugil\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{}]},\"dataMapperMappings\":[{},{},{}],\"relationships\":[\"dataqvgfqrpocieehf\"]},{\"targetEntities\":[{}],\"connection\":{\"linkedService\":{\"referenceName\":\"djeqyrca\"},\"linkedServiceType\":\"q\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{}]},\"dataMapperMappings\":[{},{}],\"relationships\":[\"datanr\",\"databf\",\"datauej\",\"dataqwsxsovhgtaelgfo\"]}],\"policy\":{\"mode\":\"huyawsyxsjprc\",\"recurrence\":{\"frequency\":\"Second\",\"interval\":1515513738}},\"allowVNetOverride\":false,\"status\":\"mooa\"},\"name\":\"glnkrkoub\",\"type\":\"aeafkf\",\"etag\":\"unlgddbeoidm\",\"\":{\"rawpnlagqgwdk\":\"datadpkzsqzwicecdkw\",\"fyhymwghdh\":\"datasqzahhydyerrf\"},\"id\":\"lb\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + ChangeDataCaptureResource response = manager.changeDataCaptures() + .getWithResponse("dhslrtndbch", "gupwxy", "tnsck", "vshiiecsvdtibgts", com.azure.core.util.Context.NONE) + .getValue(); + + Assertions.assertEquals("lb", response.id()); + Assertions.assertEquals("kgrdmlf", response.folder().name()); + Assertions.assertEquals("jveysui", response.description()); + Assertions.assertEquals("pmyrkpi", response.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("gqmzxyitfbfp", + Assertions.assertEquals("wcninypuoze", response.sourceConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, response.sourceConnectionsInfo().get(0).connection().type()); Assertions.assertEquals(true, response.sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("ktauqav", + Assertions.assertEquals("byakicpdwi", response.targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("lyntvou", response.targetConnectionsInfo().get(0).connection().linkedServiceType()); + Assertions.assertEquals("nugil", response.targetConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, response.targetConnectionsInfo().get(0).connection().type()); Assertions.assertEquals(false, response.targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("msqmbdkvsmwbml", response.policy().mode()); + Assertions.assertEquals("huyawsyxsjprc", response.policy().mode()); Assertions.assertEquals(FrequencyType.SECOND, response.policy().recurrence().frequency()); - Assertions.assertEquals(1820521308, response.policy().recurrence().interval()); + Assertions.assertEquals(1515513738, response.policy().recurrence().interval()); Assertions.assertEquals(false, response.allowVNetOverride()); - Assertions.assertEquals("ltfxpzw", response.status()); + Assertions.assertEquals("mooa", response.status()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesListByFactoryMockTests.java index e4da1b26022ef..4aa1bc8f3b096 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesListByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesListByFactoryMockTests.java @@ -6,77 +6,59 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureResource; import com.azure.resourcemanager.datafactory.models.ConnectionType; import com.azure.resourcemanager.datafactory.models.FrequencyType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ChangeDataCapturesListByFactoryMockTests { @Test public void testListByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"folder\":{\"name\":\"whohwsm\"},\"description\":\"qvximgjksbpudjh\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{}],\"connection\":{\"linkedService\":{\"referenceName\":\"vceudxhnwg\"},\"linkedServiceType\":\"dxwbh\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{},{}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{}],\"connection\":{\"linkedService\":{\"referenceName\":\"hyjex\"},\"linkedServiceType\":\"wrwu\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{}]},\"dataMapperMappings\":[{}],\"relationships\":[\"databuiuspbeqs\",\"databscahrfhxrvar\",\"datatbmanqdbuf\"]},{\"targetEntities\":[{}],\"connection\":{\"linkedService\":{\"referenceName\":\"hxngqpbbybhjozn\"},\"linkedServiceType\":\"uvqnopupxbviemy\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{}]},\"dataMapperMappings\":[{},{},{},{}],\"relationships\":[\"datazgsxqv\"]},{\"targetEntities\":[{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"fzyrleihkndedhm\"},\"linkedServiceType\":\"pjnvkp\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{}]},\"dataMapperMappings\":[{}],\"relationships\":[\"dataiiecig\"]},{\"targetEntities\":[{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"hzft\"},\"linkedServiceType\":\"zsacmtcevwzai\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{},{}]},\"dataMapperMappings\":[{},{},{},{}],\"relationships\":[\"datakwwotrqhqahfz\",\"dataihgm\"]}],\"policy\":{\"mode\":\"dq\",\"recurrence\":{\"frequency\":\"Minute\",\"interval\":1802582831}},\"allowVNetOverride\":false,\"status\":\"jjiecc\"},\"name\":\"iklmcrtcscjufpc\",\"type\":\"ykyxjhqti\",\"etag\":\"osuq\",\"\":{\"ikkuhrrpx\":\"datacxwhcydnaqam\"},\"id\":\"j\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"properties\":{\"folder\":{\"name\":\"icuc\"},\"description\":\"lhdwmqmdivx\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"qpczcimpdjn\"},\"linkedServiceType\":\"w\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{},{}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{}],\"connection\":{\"linkedService\":{\"referenceName\":\"gjptabdyvjhant\"},\"linkedServiceType\":\"zkiowzifhl\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{}]},\"dataMapperMappings\":[{}],\"relationships\":[\"datadhxopjvqgwr\",\"datacpatpxrwsqfnuqeh\",\"dataypixghahz\"]},{\"targetEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"ciewbywblgja\"},\"linkedServiceType\":\"ggvxjwevnrknb\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{}]},\"dataMapperMappings\":[{}],\"relationships\":[\"datawqvu\"]}],\"policy\":{\"mode\":\"jctwtgozaivfa\",\"recurrence\":{\"frequency\":\"Second\",\"interval\":1938405349}},\"allowVNetOverride\":true,\"status\":\"pyllswjzhsfuvo\"},\"name\":\"h\",\"type\":\"lnb\",\"etag\":\"pehjgeqfpzh\",\"\":{\"lqt\":\"datalbziud\",\"rpzubem\":\"datacrverpvyusxx\",\"mqhujsb\":\"datadpryvnr\"},\"id\":\"bauljmnp\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - PagedIterable response = manager.changeDataCaptures().listByFactory("dszpoewfanfejk", - "utnaavtjhikcp", com.azure.core.util.Context.NONE); + PagedIterable response + = manager.changeDataCaptures().listByFactory("xpcma", "ao", com.azure.core.util.Context.NONE); - Assertions.assertEquals("j", response.iterator().next().id()); - Assertions.assertEquals("whohwsm", response.iterator().next().folder().name()); - Assertions.assertEquals("qvximgjksbpudjh", response.iterator().next().description()); - Assertions.assertEquals("vceudxhnwg", + Assertions.assertEquals("bauljmnp", response.iterator().next().id()); + Assertions.assertEquals("icuc", response.iterator().next().folder().name()); + Assertions.assertEquals("lhdwmqmdivx", response.iterator().next().description()); + Assertions.assertEquals("qpczcimpdjn", response.iterator().next().sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("dxwbh", + Assertions.assertEquals("w", response.iterator().next().sourceConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, response.iterator().next().sourceConnectionsInfo().get(0).connection().type()); Assertions.assertEquals(true, response.iterator().next().sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("hyjex", + Assertions.assertEquals("gjptabdyvjhant", response.iterator().next().targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("wrwu", + Assertions.assertEquals("zkiowzifhl", response.iterator().next().targetConnectionsInfo().get(0).connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, response.iterator().next().targetConnectionsInfo().get(0).connection().type()); Assertions.assertEquals(false, response.iterator().next().targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("dq", response.iterator().next().policy().mode()); - Assertions.assertEquals(FrequencyType.MINUTE, response.iterator().next().policy().recurrence().frequency()); - Assertions.assertEquals(1802582831, response.iterator().next().policy().recurrence().interval()); - Assertions.assertEquals(false, response.iterator().next().allowVNetOverride()); - Assertions.assertEquals("jjiecc", response.iterator().next().status()); + Assertions.assertEquals("jctwtgozaivfa", response.iterator().next().policy().mode()); + Assertions.assertEquals(FrequencyType.SECOND, response.iterator().next().policy().recurrence().frequency()); + Assertions.assertEquals(1938405349, response.iterator().next().policy().recurrence().interval()); + Assertions.assertEquals(true, response.iterator().next().allowVNetOverride()); + Assertions.assertEquals("pyllswjzhsfuvo", response.iterator().next().status()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStartWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStartWithResponseMockTests.java index 86b63f4baee77..30eef459c740c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStartWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStartWithResponseMockTests.java @@ -6,47 +6,30 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ChangeDataCapturesStartWithResponseMockTests { @Test public void testStartWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.changeDataCaptures().startWithResponse("jgwrn", "dxab", "xqpsjtipvszn", - com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .startWithResponse("yucdmakwbybgvbfd", "ssylzsuxvwbkahd", "mpisqjwidkhud", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStatusWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStatusWithResponseMockTests.java index 2692b7af662d1..30d124aee920f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStatusWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStatusWithResponseMockTests.java @@ -6,49 +6,32 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ChangeDataCapturesStatusWithResponseMockTests { @Test public void testStatusWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); + String responseStr = "\"ej\""; - String responseStr = "\"bthmrfytxvxurm\""; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); String response = manager.changeDataCaptures() - .statusWithResponse("hcwkocdq", "zlbqkjqk", "hs", com.azure.core.util.Context.NONE).getValue(); + .statusWithResponse("izhqabtbvjgwrbk", "kjhxiwcksinmhz", "r", com.azure.core.util.Context.NONE) + .getValue(); - Assertions.assertEquals("bthmrfytxvxurm", response); + Assertions.assertEquals("ej", response); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStopWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStopWithResponseMockTests.java index 71505c088e8ed..bd4052dd167fe 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStopWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStopWithResponseMockTests.java @@ -6,47 +6,29 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ChangeDataCapturesStopWithResponseMockTests { @Test public void testStopWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.changeDataCaptures().stopWithResponse("sbpueymimicjjy", "tupqtlgzuykt", "bbvbsl", - com.azure.core.util.Context.NONE); + manager.changeDataCaptures() + .stopWithResponse("ffdtzhldswt", "mlptiflyoqgymvft", "zfdzj", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CmkIdentityDefinitionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CmkIdentityDefinitionTests.java index 5bddc381a80ca..ade33497f0c6d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CmkIdentityDefinitionTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CmkIdentityDefinitionTests.java @@ -12,14 +12,14 @@ public final class CmkIdentityDefinitionTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CmkIdentityDefinition model - = BinaryData.fromString("{\"userAssignedIdentity\":\"lexxbczwtru\"}").toObject(CmkIdentityDefinition.class); - Assertions.assertEquals("lexxbczwtru", model.userAssignedIdentity()); + = BinaryData.fromString("{\"userAssignedIdentity\":\"ied\"}").toObject(CmkIdentityDefinition.class); + Assertions.assertEquals("ied", model.userAssignedIdentity()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CmkIdentityDefinition model = new CmkIdentityDefinition().withUserAssignedIdentity("lexxbczwtru"); + CmkIdentityDefinition model = new CmkIdentityDefinition().withUserAssignedIdentity("ied"); model = BinaryData.fromObject(model).toObject(CmkIdentityDefinition.class); - Assertions.assertEquals("lexxbczwtru", model.userAssignedIdentity()); + Assertions.assertEquals("ied", model.userAssignedIdentity()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTests.java index 6ba79f78b3136..82a97008e25bc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTests.java @@ -19,34 +19,40 @@ public final class CommonDataServiceForAppsEntityDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CommonDataServiceForAppsEntityDataset model = BinaryData.fromString( - "{\"type\":\"CommonDataServiceForAppsEntity\",\"typeProperties\":{\"entityName\":\"datagxnwfmzvztau\"},\"description\":\"pamqxfcssanybz\",\"structure\":\"datahvdfe\",\"schema\":\"datayj\",\"linkedServiceName\":{\"referenceName\":\"v\",\"parameters\":{\"nzxezriwgo\":\"datalywkhookj\"}},\"parameters\":{\"ap\":{\"type\":\"Bool\",\"defaultValue\":\"dataqksa\"},\"benwsdfp\":{\"type\":\"SecureString\",\"defaultValue\":\"datacit\"},\"pmvzpireszya\":{\"type\":\"Int\",\"defaultValue\":\"dataahlfrcqk\"},\"cjjlwkyeahhhut\":{\"type\":\"Float\",\"defaultValue\":\"datamlbmfggeokfe\"}},\"annotations\":[\"datanrfcqu\",\"datam\",\"dataihpinow\"],\"folder\":{\"name\":\"jpxp\"},\"\":{\"lgbbfjmdgjvxlh\":\"datadwyqqidqi\",\"eftyaphqeofytl\":\"datapm\",\"qvjfdgfqpmquxpjh\":\"datanlowmcmcqixuanc\",\"dcio\":\"datafaar\"}}") + "{\"type\":\"duvtvod\",\"typeProperties\":{\"entityName\":\"dataizme\"},\"description\":\"xpqhmlq\",\"structure\":\"datatbl\",\"schema\":\"datavrj\",\"linkedServiceName\":{\"referenceName\":\"znotd\",\"parameters\":{\"bmitaftazgcxsvq\":\"datavpbqsdqkpsbqs\",\"ow\":\"datacqufylam\"}},\"parameters\":{\"xiknsgofuns\":{\"type\":\"Object\",\"defaultValue\":\"datayutehlkarvtipquk\"},\"xn\":{\"type\":\"Object\",\"defaultValue\":\"datacekggvmfnnb\"},\"cvgifwsunj\":{\"type\":\"SecureString\",\"defaultValue\":\"datakkedeetxtp\"},\"zsnfzyviiwsuanzh\":{\"type\":\"Bool\",\"defaultValue\":\"dataaciwmmpdtqdonb\"}},\"annotations\":[\"datauifkzqqhb\"],\"folder\":{\"name\":\"oilmkfbeoiipjpng\"},\"\":{\"dlpsx\":\"datauoi\",\"wimqnryclocfm\":\"datatug\"}}") .toObject(CommonDataServiceForAppsEntityDataset.class); - Assertions.assertEquals("pamqxfcssanybz", model.description()); - Assertions.assertEquals("v", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("ap").type()); - Assertions.assertEquals("jpxp", model.folder().name()); + Assertions.assertEquals("xpqhmlq", model.description()); + Assertions.assertEquals("znotd", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("xiknsgofuns").type()); + Assertions.assertEquals("oilmkfbeoiipjpng", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CommonDataServiceForAppsEntityDataset model = new CommonDataServiceForAppsEntityDataset() - .withDescription("pamqxfcssanybz").withStructure("datahvdfe").withSchema("datayj") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("v") - .withParameters(mapOf("nzxezriwgo", "datalywkhookj"))) - .withParameters(mapOf("ap", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataqksa"), "benwsdfp", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datacit"), - "pmvzpireszya", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataahlfrcqk"), - "cjjlwkyeahhhut", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datamlbmfggeokfe"))) - .withAnnotations(Arrays.asList("datanrfcqu", "datam", "dataihpinow")) - .withFolder(new DatasetFolder().withName("jpxp")).withEntityName("datagxnwfmzvztau"); + CommonDataServiceForAppsEntityDataset model + = new CommonDataServiceForAppsEntityDataset().withDescription("xpqhmlq") + .withStructure("datatbl") + .withSchema("datavrj") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("znotd") + .withParameters(mapOf("bmitaftazgcxsvq", "datavpbqsdqkpsbqs", "ow", "datacqufylam"))) + .withParameters(mapOf("xiknsgofuns", + new ParameterSpecification().withType(ParameterType.OBJECT) + .withDefaultValue("datayutehlkarvtipquk"), + "xn", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datacekggvmfnnb"), + "cvgifwsunj", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("datakkedeetxtp"), + "zsnfzyviiwsuanzh", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataaciwmmpdtqdonb"))) + .withAnnotations(Arrays.asList("datauifkzqqhb")) + .withFolder(new DatasetFolder().withName("oilmkfbeoiipjpng")) + .withEntityName("dataizme"); model = BinaryData.fromObject(model).toObject(CommonDataServiceForAppsEntityDataset.class); - Assertions.assertEquals("pamqxfcssanybz", model.description()); - Assertions.assertEquals("v", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("ap").type()); - Assertions.assertEquals("jpxp", model.folder().name()); + Assertions.assertEquals("xpqhmlq", model.description()); + Assertions.assertEquals("znotd", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("xiknsgofuns").type()); + Assertions.assertEquals("oilmkfbeoiipjpng", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTypePropertiesTests.java index fdb358b321237..708d15efb89b6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class CommonDataServiceForAppsEntityDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CommonDataServiceForAppsEntityDatasetTypeProperties model - = BinaryData.fromString("{\"entityName\":\"dataufzg\"}") + = BinaryData.fromString("{\"entityName\":\"datawxvjele\"}") .toObject(CommonDataServiceForAppsEntityDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { CommonDataServiceForAppsEntityDatasetTypeProperties model - = new CommonDataServiceForAppsEntityDatasetTypeProperties().withEntityName("dataufzg"); + = new CommonDataServiceForAppsEntityDatasetTypeProperties().withEntityName("datawxvjele"); model = BinaryData.fromObject(model).toObject(CommonDataServiceForAppsEntityDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsSourceTests.java index 36b1a4a4f2c4d..05c3f6128c07f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsSourceTests.java @@ -11,15 +11,19 @@ public final class CommonDataServiceForAppsSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CommonDataServiceForAppsSource model = BinaryData.fromString( - "{\"type\":\"CommonDataServiceForAppsSource\",\"query\":\"datayoyjptkyfrkzg\",\"additionalColumns\":\"datawyqkkd\",\"sourceRetryCount\":\"dataxdrgim\",\"sourceRetryWait\":\"dataoffyboonlovfundk\",\"maxConcurrentConnections\":\"datamyxmsbtq\",\"disableMetricsCollection\":\"datagn\",\"\":{\"dg\":\"databjxgj\",\"hzihlzljqcmmgsm\":\"datawrjuiuzlf\",\"ybdz\":\"dataujunqwkjfmt\"}}") + "{\"type\":\"bvieqon\",\"query\":\"dataerdqrfassiiilc\",\"additionalColumns\":\"datagahscayyxg\",\"sourceRetryCount\":\"dataukznxdivqopxuno\",\"sourceRetryWait\":\"datatkmknacnfzcytbhd\",\"maxConcurrentConnections\":\"dataagw\",\"disableMetricsCollection\":\"datamwggzf\",\"\":{\"gdfyos\":\"datax\",\"acqjgedxpb\":\"datawigvqgc\"}}") .toObject(CommonDataServiceForAppsSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CommonDataServiceForAppsSource model = new CommonDataServiceForAppsSource().withSourceRetryCount("dataxdrgim") - .withSourceRetryWait("dataoffyboonlovfundk").withMaxConcurrentConnections("datamyxmsbtq") - .withDisableMetricsCollection("datagn").withQuery("datayoyjptkyfrkzg").withAdditionalColumns("datawyqkkd"); + CommonDataServiceForAppsSource model + = new CommonDataServiceForAppsSource().withSourceRetryCount("dataukznxdivqopxuno") + .withSourceRetryWait("datatkmknacnfzcytbhd") + .withMaxConcurrentConnections("dataagw") + .withDisableMetricsCollection("datamwggzf") + .withQuery("dataerdqrfassiiilc") + .withAdditionalColumns("datagahscayyxg"); model = BinaryData.fromObject(model).toObject(CommonDataServiceForAppsSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CompressionReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CompressionReadSettingsTests.java index dc1016ed46404..d2f096076a090 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CompressionReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CompressionReadSettingsTests.java @@ -13,14 +13,13 @@ public final class CompressionReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CompressionReadSettings model = BinaryData.fromString( - "{\"type\":\"CompressionReadSettings\",\"\":{\"rkyaovc\":\"datadvxsgdaajlhgsuq\",\"taingadkrkny\":\"datadsrxhpqlxnbd\",\"fgwbuxqzf\":\"datajngdfzqcjfqm\"}}") + "{\"type\":\"pndou\",\"\":{\"xouknzhmzassrsqz\":\"datacfgqinao\",\"jhklttliuwd\":\"dataknbtxtdmutdrrqq\",\"aqdswfnomciwhu\":\"datatwqjft\"}}") .toObject(CompressionReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CompressionReadSettings model - = new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings")); + CompressionReadSettings model = new CompressionReadSettings().withAdditionalProperties(mapOf("type", "pndou")); model = BinaryData.fromObject(model).toObject(CompressionReadSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurObjectDatasetTests.java index 7852fda1b8c9d..f996099fbf25e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurObjectDatasetTests.java @@ -19,36 +19,33 @@ public final class ConcurObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ConcurObjectDataset model = BinaryData.fromString( - "{\"type\":\"ConcurObject\",\"typeProperties\":{\"tableName\":\"datadfvjsknrbxzepirt\"},\"description\":\"piqdqbvxqto\",\"structure\":\"datawbopvhcbtza\",\"schema\":\"datajxcontickfk\",\"linkedServiceName\":{\"referenceName\":\"thueocsgvuqzgbjw\",\"parameters\":{\"abes\":\"datadmpwewpmiolea\",\"ecmbaaj\":\"datayzwphbjks\",\"zkfekdesb\":\"datafwrdkql\",\"b\":\"datajqtl\"}},\"parameters\":{\"rr\":{\"type\":\"Array\",\"defaultValue\":\"datauibs\"},\"rcpzhbwcxybtdzyc\":{\"type\":\"String\",\"defaultValue\":\"dataeqrypyurvshhovtu\"},\"wczsrazcbybic\":{\"type\":\"Bool\",\"defaultValue\":\"dataoegjzgpljb\"},\"pua\":{\"type\":\"SecureString\",\"defaultValue\":\"datah\"}},\"annotations\":[\"datai\"],\"folder\":{\"name\":\"mu\"},\"\":{\"enndzgthdzit\":\"datawuycuo\",\"vswtwonadezm\":\"datazffpherwj\"}}") + "{\"type\":\"xotudamkiyao\",\"typeProperties\":{\"tableName\":\"datayvsbfsinvabdjul\"},\"description\":\"hulzugifgsp\",\"structure\":\"datadlnoc\",\"schema\":\"dataygimiz\",\"linkedServiceName\":{\"referenceName\":\"ukjbwmgk\",\"parameters\":{\"yjskdiylg\":\"datamspppoeszthjt\",\"sybqowgvmxwbo\":\"datazuqix\"}},\"parameters\":{\"h\":{\"type\":\"String\",\"defaultValue\":\"dataoexb\"},\"umzyyh\":{\"type\":\"Float\",\"defaultValue\":\"datakajwscmneevlumq\"},\"ivjqu\":{\"type\":\"Array\",\"defaultValue\":\"dataa\"}},\"annotations\":[\"databgbzgfhzdzahk\",\"dataxvcbicfecthotbk\"],\"folder\":{\"name\":\"ztpxj\"},\"\":{\"jdpayx\":\"datak\"}}") .toObject(ConcurObjectDataset.class); - Assertions.assertEquals("piqdqbvxqto", model.description()); - Assertions.assertEquals("thueocsgvuqzgbjw", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("rr").type()); - Assertions.assertEquals("mu", model.folder().name()); + Assertions.assertEquals("hulzugifgsp", model.description()); + Assertions.assertEquals("ukjbwmgk", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("h").type()); + Assertions.assertEquals("ztpxj", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ConcurObjectDataset model = new ConcurObjectDataset().withDescription("piqdqbvxqto") - .withStructure("datawbopvhcbtza").withSchema("datajxcontickfk") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("thueocsgvuqzgbjw") - .withParameters(mapOf("abes", "datadmpwewpmiolea", "ecmbaaj", "datayzwphbjks", "zkfekdesb", - "datafwrdkql", "b", "datajqtl"))) - .withParameters( - mapOf("rr", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datauibs"), - "rcpzhbwcxybtdzyc", - new ParameterSpecification().withType(ParameterType.STRING) - .withDefaultValue("dataeqrypyurvshhovtu"), - "wczsrazcbybic", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataoegjzgpljb"), "pua", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datah"))) - .withAnnotations(Arrays.asList("datai")).withFolder(new DatasetFolder().withName("mu")) - .withTableName("datadfvjsknrbxzepirt"); + ConcurObjectDataset model = new ConcurObjectDataset().withDescription("hulzugifgsp") + .withStructure("datadlnoc") + .withSchema("dataygimiz") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ukjbwmgk") + .withParameters(mapOf("yjskdiylg", "datamspppoeszthjt", "sybqowgvmxwbo", "datazuqix"))) + .withParameters(mapOf("h", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataoexb"), "umzyyh", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datakajwscmneevlumq"), + "ivjqu", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataa"))) + .withAnnotations(Arrays.asList("databgbzgfhzdzahk", "dataxvcbicfecthotbk")) + .withFolder(new DatasetFolder().withName("ztpxj")) + .withTableName("datayvsbfsinvabdjul"); model = BinaryData.fromObject(model).toObject(ConcurObjectDataset.class); - Assertions.assertEquals("piqdqbvxqto", model.description()); - Assertions.assertEquals("thueocsgvuqzgbjw", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("rr").type()); - Assertions.assertEquals("mu", model.folder().name()); + Assertions.assertEquals("hulzugifgsp", model.description()); + Assertions.assertEquals("ukjbwmgk", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("h").type()); + Assertions.assertEquals("ztpxj", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurSourceTests.java index 5994be2ab7bae..e299d6ae6e8e1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurSourceTests.java @@ -11,15 +11,19 @@ public final class ConcurSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ConcurSource model = BinaryData.fromString( - "{\"type\":\"ConcurSource\",\"query\":\"datadhismwjkq\",\"queryTimeout\":\"datayuygybshch\",\"additionalColumns\":\"dataeaitzgewwqwibt\",\"sourceRetryCount\":\"datahcgbzrlf\",\"sourceRetryWait\":\"datawusqupk\",\"maxConcurrentConnections\":\"datappmwozwjinxyrtec\",\"disableMetricsCollection\":\"datazslttk\",\"\":{\"mahuw\":\"datakujceeczhsdpfoa\",\"ewrryt\":\"dataodddq\",\"s\":\"datasocqkdclbzqnao\",\"cbhezau\":\"datamp\"}}") + "{\"type\":\"hdcilinbu\",\"query\":\"datajzknkffzdyozn\",\"queryTimeout\":\"datawe\",\"additionalColumns\":\"datawxmcsxidazslwhuy\",\"sourceRetryCount\":\"datapfperheipl\",\"sourceRetryWait\":\"dataswhqrdvqva\",\"maxConcurrentConnections\":\"dataauuw\",\"disableMetricsCollection\":\"datagofumbp\",\"\":{\"omavbotaoaixi\":\"datadmfjgklm\",\"mqwut\":\"datacwyinfywtqvjno\",\"xugd\":\"datayaeyyiwraowpdm\"}}") .toObject(ConcurSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ConcurSource model = new ConcurSource().withSourceRetryCount("datahcgbzrlf").withSourceRetryWait("datawusqupk") - .withMaxConcurrentConnections("datappmwozwjinxyrtec").withDisableMetricsCollection("datazslttk") - .withQueryTimeout("datayuygybshch").withAdditionalColumns("dataeaitzgewwqwibt").withQuery("datadhismwjkq"); + ConcurSource model = new ConcurSource().withSourceRetryCount("datapfperheipl") + .withSourceRetryWait("dataswhqrdvqva") + .withMaxConcurrentConnections("dataauuw") + .withDisableMetricsCollection("datagofumbp") + .withQueryTimeout("datawe") + .withAdditionalColumns("datawxmcsxidazslwhuy") + .withQuery("datajzknkffzdyozn"); model = BinaryData.fromObject(model).toObject(ConcurSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConnectionStatePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConnectionStatePropertiesTests.java index 0efc656103cdb..449b2460ffb68 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConnectionStatePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConnectionStatePropertiesTests.java @@ -10,8 +10,8 @@ public final class ConnectionStatePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - ConnectionStateProperties model = BinaryData.fromString( - "{\"actionsRequired\":\"ppdbwnupgahxkum\",\"description\":\"jcaacfdmmcpugm\",\"status\":\"qepvufhbzeh\"}") + ConnectionStateProperties model = BinaryData + .fromString("{\"actionsRequired\":\"klelssxb\",\"description\":\"c\",\"status\":\"zujksrlsmdes\"}") .toObject(ConnectionStateProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ControlActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ControlActivityTests.java index 05b88e4d2f970..f186cbf108f50 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ControlActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ControlActivityTests.java @@ -20,50 +20,43 @@ public final class ControlActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ControlActivity model = BinaryData.fromString( - "{\"type\":\"Container\",\"name\":\"xzylqhewhc\",\"description\":\"ex\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"wggmitdwolfmfaz\",\"dependencyConditions\":[\"Succeeded\",\"Failed\"],\"\":{\"jttzfswohddliikk\":\"datad\",\"gemtnbkevuuky\":\"datasqpli\"}},{\"activity\":\"iksncr\",\"dependencyConditions\":[\"Failed\"],\"\":{\"f\":\"databzqtuhga\",\"ivhyujqxyfb\":\"datayftohdlpcixpxxny\"}},{\"activity\":\"azylaya\",\"dependencyConditions\":[\"Completed\",\"Completed\",\"Skipped\"],\"\":{\"nrkmcivhww\":\"dataduwwjohg\",\"wkmrjfs\":\"datayejhwbdfcfy\",\"qowqo\":\"databcfqaqov\",\"sbjhhadndow\":\"dataispasxwiic\"}},{\"activity\":\"x\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Skipped\",\"Succeeded\"],\"\":{\"prygjgyov\":\"dataieehpvqfifrr\",\"g\":\"datapgqiisma\",\"kkcxc\":\"datatcoykr\",\"utyszhzlvkmirn\":\"datajkronxmtrhwwdfnc\"}}],\"userProperties\":[{\"name\":\"zarmep\",\"value\":\"datamo\"},{\"name\":\"tfpkskxsyohfrl\",\"value\":\"dataynkgnychuzhng\"},{\"name\":\"tbhjgliioeodgn\",\"value\":\"dataoewfg\"},{\"name\":\"wmm\",\"value\":\"dataixtyavvexj\"}],\"\":{\"vnoqtwqciq\":\"datakonbgegwxjgkrpp\"}}") + "{\"type\":\"jxdrgxpuxpzslmf\",\"name\":\"ypzrycchqzkfges\",\"description\":\"gszjhekbmdh\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"hg\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"mebfgbznlihbk\":\"datauvwys\",\"ftvyzuyqzjfvb\":\"datapaeyw\",\"vsxmyzss\":\"datayyjvzlscyz\"}},{\"activity\":\"bs\",\"dependencyConditions\":[\"Completed\"],\"\":{\"wqiparctshe\":\"dataxazebmmjaiga\",\"fawhoosrsol\":\"datagtdvhokx\",\"nudifierxx\":\"datahvmfoejbgiqhjpe\",\"jllfgmdoaihl\":\"datarsdvuirqfk\"}},{\"activity\":\"rsqcivmirybwga\",\"dependencyConditions\":[\"Completed\"],\"\":{\"emazgtbynxshc\":\"datay\",\"geqojzv\":\"datawe\",\"vsvwnpcxdkmtvpa\":\"datazxxkojjphbobu\"}}],\"userProperties\":[{\"name\":\"bnyhmlpzd\",\"value\":\"datavotuc\"},{\"name\":\"zbpocu\",\"value\":\"dataqzf\"}],\"\":{\"qsgert\":\"dataemzdnvnooklgren\",\"bd\":\"dataicemgsnc\",\"slbnunpxswmcc\":\"datakphaed\"}}") .toObject(ControlActivity.class); - Assertions.assertEquals("xzylqhewhc", model.name()); - Assertions.assertEquals("ex", model.description()); + Assertions.assertEquals("ypzrycchqzkfges", model.name()); + Assertions.assertEquals("gszjhekbmdh", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("wggmitdwolfmfaz", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("zarmep", model.userProperties().get(0).name()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("hg", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("bnyhmlpzd", model.userProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ControlActivity model - = new ControlActivity().withName("xzylqhewhc").withDescription("ex").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("wggmitdwolfmfaz") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency() - .withActivity("iksncr").withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("azylaya") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("x") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("zarmep").withValue("datamo"), - new UserProperty().withName("tfpkskxsyohfrl").withValue("dataynkgnychuzhng"), - new UserProperty().withName("tbhjgliioeodgn").withValue("dataoewfg"), - new UserProperty().withName("wmm").withValue("dataixtyavvexj"))); + ControlActivity model = new ControlActivity().withName("ypzrycchqzkfges") + .withDescription("gszjhekbmdh") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("hg") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("bs") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("rsqcivmirybwga") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("bnyhmlpzd").withValue("datavotuc"), + new UserProperty().withName("zbpocu").withValue("dataqzf"))); model = BinaryData.fromObject(model).toObject(ControlActivity.class); - Assertions.assertEquals("xzylqhewhc", model.name()); - Assertions.assertEquals("ex", model.description()); + Assertions.assertEquals("ypzrycchqzkfges", model.name()); + Assertions.assertEquals("gszjhekbmdh", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("wggmitdwolfmfaz", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("zarmep", model.userProperties().get(0).name()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("hg", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("bnyhmlpzd", model.userProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityLogSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityLogSettingsTests.java index 8c009230ab0b2..517ae20048226 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityLogSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityLogSettingsTests.java @@ -11,14 +11,14 @@ public final class CopyActivityLogSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CopyActivityLogSettings model = BinaryData - .fromString("{\"logLevel\":\"datasghdovcpbwfnap\",\"enableReliableLogging\":\"datagvhsixzcdau\"}") + .fromString("{\"logLevel\":\"datamflwfxdkpwdpmy\",\"enableReliableLogging\":\"datacugchtwxifudlrxb\"}") .toObject(CopyActivityLogSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CopyActivityLogSettings model = new CopyActivityLogSettings().withLogLevel("datasghdovcpbwfnap") - .withEnableReliableLogging("datagvhsixzcdau"); + CopyActivityLogSettings model = new CopyActivityLogSettings().withLogLevel("datamflwfxdkpwdpmy") + .withEnableReliableLogging("datacugchtwxifudlrxb"); model = BinaryData.fromObject(model).toObject(CopyActivityLogSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTests.java index 836f063d0ade9..59ee10d631468 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTests.java @@ -32,117 +32,141 @@ public final class CopyActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CopyActivity model = BinaryData.fromString( - "{\"type\":\"Copy\",\"typeProperties\":{\"source\":{\"type\":\"CopySource\",\"sourceRetryCount\":\"datapq\",\"sourceRetryWait\":\"datakrmlb\",\"maxConcurrentConnections\":\"dataqogzaw\",\"disableMetricsCollection\":\"dataqdnxum\",\"\":{\"qwsyntcwrluqa\":\"datanvscyu\"}},\"sink\":{\"type\":\"CopySink\",\"writeBatchSize\":\"datalygfvbfejb\",\"writeBatchTimeout\":\"dataklgpifvpsmvksa\",\"sinkRetryCount\":\"datamsnplqfivxfqmdjz\",\"sinkRetryWait\":\"datavmkplrjkmpaxoey\",\"maxConcurrentConnections\":\"dataofaogvmqzagrqcqh\",\"disableMetricsCollection\":\"dataskmkdr\",\"\":{\"ldwcxjvexlutxcmc\":\"datapn\",\"yypvhdulds\":\"datacotqocn\"}},\"translator\":\"databzzbrufk\",\"enableStaging\":\"datalkpvaagrdf\",\"stagingSettings\":{\"linkedServiceName\":{\"referenceName\":\"glqdsphvosucryh\",\"parameters\":{\"dofshgmqxwop\":\"datathzfotfrfhrjka\",\"aittbmobrxhwpg\":\"datanitrmzvnrfkzn\",\"fgqibb\":\"datarnxrjmilogcn\",\"fwyrsfjjsoyu\":\"dataplrtxhzt\"}},\"path\":\"databuyd\",\"enableCompression\":\"datahknttk\",\"\":{\"osbijikjfjibuwh\":\"datalehenjstiwd\",\"xav\":\"datapojujpifxtg\",\"iwx\":\"datajx\"}},\"parallelCopies\":\"dataauh\",\"dataIntegrationUnits\":\"datachphovu\",\"enableSkipIncompatibleRow\":\"datasczwcxlncoh\",\"redirectIncompatibleRowSettings\":{\"linkedServiceName\":\"datafvyriawfwwsgdkbd\",\"path\":\"datas\",\"\":{\"ybuqmk\":\"datawmfcortoxsthjyyi\",\"vyilh\":\"datawdok\",\"srhxoyrgvrtcctm\":\"datasotdiox\",\"kdep\":\"dataglbplqhbrar\"}},\"logStorageSettings\":{\"linkedServiceName\":{\"referenceName\":\"ufyqcqfouhyeyxg\",\"parameters\":{\"pwwsiooz\":\"dataochpxps\",\"v\":\"dataugbdkxlwck\",\"sgkeexsozpkvy\":\"datazkzjjtapvqjebtd\"}},\"path\":\"datatytwtfqpmpyww\",\"logLevel\":\"dataukqmjcwdo\",\"enableReliableLogging\":\"datadqun\",\"\":{\"rbn\":\"datacocchdxjrrb\",\"p\":\"dataqpsquou\"}},\"logSettings\":{\"enableCopyActivityLog\":\"datatuhdoimojcm\",\"copyActivityLogSettings\":{\"logLevel\":\"datadxwvorzhzfocgf\",\"enableReliableLogging\":\"datatornvbhuyolwifbd\"},\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"yv\",\"parameters\":{\"yu\":\"datawbbvkthrexzvejq\"}},\"path\":\"datalokeqe\"}},\"preserveRules\":[\"datap\",\"dataiehvgchsg\"],\"preserve\":[\"datawerbpobvjhunicg\",\"dataxceivvmdtk\"],\"validateDataConsistency\":\"dataqhznutrx\",\"skipErrorFile\":{\"fileMissing\":\"datatrnniarjezjhy\",\"dataInconsistency\":\"dataqf\"}},\"inputs\":[{\"referenceName\":\"esqykqfserls\",\"parameters\":{\"tagwmzgvnojgm\":\"datashh\",\"oi\":\"databkali\",\"owxsxbxd\":\"datakehpdssvlubdp\"}},{\"referenceName\":\"mixu\",\"parameters\":{\"qqa\":\"datakcqm\"}},{\"referenceName\":\"ftghmtbufkcnkgh\",\"parameters\":{\"eayodrvwnqb\":\"datairshl\",\"slfxejpocsgig\":\"dataxyofftxzovbhqel\",\"dyjwmglgstr\":\"dataabtx\",\"bcbefohnymfhmlji\":\"datafhehdzov\"}},{\"referenceName\":\"kgfvzvmtjcxi\",\"parameters\":{\"lceetuivmbugizwy\":\"datazxdb\",\"ls\":\"datafhfptbdxtvlpj\"}}],\"outputs\":[{\"referenceName\":\"uzytxeaqig\",\"parameters\":{\"tgcidbrjwi\":\"datatutet\",\"grcmcqppledx\":\"dataouepaqnfy\",\"welutrvd\":\"dataecttub\"}},{\"referenceName\":\"hwpxps\",\"parameters\":{\"fmndrdqqjkeg\":\"dataltslfccyavy\",\"erzkhiovh\":\"datamldkci\"}},{\"referenceName\":\"kwfolpjre\",\"parameters\":{\"iranxqnzssvn\":\"datav\",\"wdoq\":\"datadtqykz\",\"ryoxmfrxfxycji\":\"dataejltrnqxicyo\",\"kipklfwnhfk\":\"datalvchfumlfgmlzxxk\"}}],\"linkedServiceName\":{\"referenceName\":\"lrtffswqdkvljitb\",\"parameters\":{\"laro\":\"datalr\"}},\"policy\":{\"timeout\":\"datamucr\",\"retry\":\"databgsdxtwqqukgonl\",\"retryIntervalInSeconds\":1871793318,\"secureInput\":true,\"secureOutput\":true,\"\":{\"lupccfwqisouqy\":\"datatunrwxsqv\",\"etctjh\":\"dataxzjehdklvqtmzoci\"}},\"name\":\"gmoazsjsuevf\",\"description\":\"ntrccvxqbxgq\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"xekibv\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Failed\",\"Completed\"],\"\":{\"jthmibqgld\":\"datasvzwbktalobxld\",\"yevjealxlewlwb\":\"datattkalpqln\",\"dzowdqvqfl\":\"dataufqfdkkvijilfqv\",\"njdyoxform\":\"dataaqoaopzqpf\"}},{\"activity\":\"ee\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"xokdbvtpqttus\":\"dataeszxtesnhxp\",\"xzcnw\":\"dataxxbzmpvueijrnnwg\"}}],\"userProperties\":[{\"name\":\"zxlu\",\"value\":\"datamkwbwmgqzq\"},{\"name\":\"snycchpcjztziuu\",\"value\":\"datayvpcfvi\"},{\"name\":\"jxciu\",\"value\":\"dataetcxgd\"}],\"\":{\"lwavtzbphxxvf\":\"datale\",\"mo\":\"datajdrqgio\"}}") + "{\"type\":\"zfwdmae\",\"typeProperties\":{\"source\":{\"type\":\"fzgpvdlx\",\"sourceRetryCount\":\"dataotclcuxzllnwmgqc\",\"sourceRetryWait\":\"datagjequox\",\"maxConcurrentConnections\":\"datagfspwhfhdguuvg\",\"disableMetricsCollection\":\"datavz\",\"\":{\"m\":\"dataytqzx\",\"rmeufhkoe\":\"datanwpwrfetjg\"}},\"sink\":{\"type\":\"rjmicha\",\"writeBatchSize\":\"dataen\",\"writeBatchTimeout\":\"dataqjvdde\",\"sinkRetryCount\":\"datavrjhtpxydiuviup\",\"sinkRetryWait\":\"datatnsyrrybdyqiv\",\"maxConcurrentConnections\":\"datasuhozihd\",\"disableMetricsCollection\":\"datadjwth\",\"\":{\"irnjgsovzbd\":\"dataijgasnafdjinw\",\"bjjl\":\"datarcepanhygcahi\",\"hehzxzaz\":\"datasvjzbggsnanojt\"}},\"translator\":\"dataronsxjdgaim\",\"enableStaging\":\"datansowszbeflhx\",\"stagingSettings\":{\"linkedServiceName\":{\"referenceName\":\"rgokyngarwzutzjx\",\"parameters\":{\"miixngekcwe\":\"datasmnatnpolueylqys\",\"f\":\"databqtkdginmhlgp\",\"wrwvbqv\":\"dataqccey\"}},\"path\":\"dataqgqrsopqgiqf\",\"enableCompression\":\"datatl\",\"\":{\"hfaabibvslo\":\"datazcgugslpvyktf\",\"jzashhiztfmibwzu\":\"datadkpvvkqlkhdxn\",\"wtbfxxsfjn\":\"dataydajck\"}},\"parallelCopies\":\"datascjig\",\"dataIntegrationUnits\":\"datakdsvayyhtiy\",\"enableSkipIncompatibleRow\":\"datahmniz\",\"redirectIncompatibleRowSettings\":{\"linkedServiceName\":\"databtehkytl\",\"path\":\"datamyznwrcfqwkqul\",\"\":{\"jxlssosndn\":\"datavqohwiwhrq\",\"cdd\":\"datapxnsjj\"}},\"logStorageSettings\":{\"linkedServiceName\":{\"referenceName\":\"g\",\"parameters\":{\"xb\":\"datani\",\"jgczpdio\":\"datasjhpm\",\"cwmabehr\":\"datadtjylimzvjwjhmtc\"}},\"path\":\"dataskzw\",\"logLevel\":\"datah\",\"enableReliableLogging\":\"datahz\",\"\":{\"njwvc\":\"dataaesoxoavlwwpvj\"}},\"logSettings\":{\"enableCopyActivityLog\":\"dataqlceflgsndurhqoz\",\"copyActivityLogSettings\":{\"logLevel\":\"datawkdehjlo\",\"enableReliableLogging\":\"datacwo\"},\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"uxedpqwz\",\"parameters\":{\"j\":\"datamgbxjgxrh\",\"ebdfmdjnfeealp\":\"dataubcvuc\",\"mwdrvkbcsvn\":\"datauclkbw\",\"jrz\":\"datavkhfzldzz\"}},\"path\":\"dataqengopd\"}},\"preserveRules\":[\"datandliodajxvszdyv\",\"datamiufbwreaw\",\"datanzhsmueedbhnkle\",\"datavzd\"],\"preserve\":[\"dataqajia\",\"datatcyrdtrdukdmsktu\"],\"validateDataConsistency\":\"datah\",\"skipErrorFile\":{\"fileMissing\":\"datavy\",\"dataInconsistency\":\"datayfbsgrzwdwdudxq\"}},\"inputs\":[{\"referenceName\":\"rpsplw\",\"parameters\":{\"krnkuuot\":\"datacseybvtgcoznnjqx\",\"ybmgm\":\"datay\"}},{\"referenceName\":\"kxkmtuyn\",\"parameters\":{\"hqtqqshbip\":\"datatfjpisv\"}},{\"referenceName\":\"huhujkzj\",\"parameters\":{\"oqldnhwdfxgec\":\"datagphipt\",\"z\":\"datackkd\",\"xdzmpjfbdm\":\"datahsnimom\",\"xxnmyxzh\":\"dataawhb\"}},{\"referenceName\":\"ocqoydqyzhfnyl\",\"parameters\":{\"wywayjinlsk\":\"datadsa\",\"qltygyeyxm\":\"dataprnkn\",\"moswcxlgzquq\":\"datawgnwxtmhwgen\"}}],\"outputs\":[{\"referenceName\":\"svqpifzavctywa\",\"parameters\":{\"vnpgsqlanuhmsrnp\":\"dataczprzrsqcu\",\"pdlsoajqxy\":\"dataoaghoeqiwpd\"}},{\"referenceName\":\"lh\",\"parameters\":{\"pikgqjdoglecj\":\"datapyybwlys\"}},{\"referenceName\":\"gyivsiirx\",\"parameters\":{\"iniidaxbesbwci\":\"datapqp\",\"uasjrs\":\"datayjch\",\"omihgksqwzuosyyx\":\"dataq\",\"yrowgi\":\"datardxzudfarzayr\"}}],\"linkedServiceName\":{\"referenceName\":\"vxbovpoeuufwsmde\",\"parameters\":{\"mfydphlslnomqbd\":\"databxzjedycjisxsp\"}},\"policy\":{\"timeout\":\"dataqfzlbpe\",\"retry\":\"datajp\",\"retryIntervalInSeconds\":1909933114,\"secureInput\":false,\"secureOutput\":false,\"\":{\"dchdsxvkmgppxz\":\"datatpgzybezmyjq\",\"bwddpjsokosugr\":\"datajysmtsktous\"}},\"name\":\"whqafzgzmon\",\"description\":\"nie\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"hfmognnwx\",\"dependencyConditions\":[\"Completed\",\"Failed\"],\"\":{\"jyy\":\"dataam\"}},{\"activity\":\"fpnbyxygubvi\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Succeeded\"],\"\":{\"iuf\":\"datasxhvzg\"}}],\"userProperties\":[{\"name\":\"gsdxjx\",\"value\":\"dataddxoatlprsrkennn\"}],\"\":{\"njtfplgxc\":\"datavlgsadpv\"}}") .toObject(CopyActivity.class); - Assertions.assertEquals("gmoazsjsuevf", model.name()); - Assertions.assertEquals("ntrccvxqbxgq", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("xekibv", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("zxlu", model.userProperties().get(0).name()); - Assertions.assertEquals("lrtffswqdkvljitb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1871793318, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("esqykqfserls", model.inputs().get(0).referenceName()); - Assertions.assertEquals("uzytxeaqig", model.outputs().get(0).referenceName()); - Assertions.assertEquals("glqdsphvosucryh", model.stagingSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("ufyqcqfouhyeyxg", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("yv", model.logSettings().logLocationSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("whqafzgzmon", model.name()); + Assertions.assertEquals("nie", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("hfmognnwx", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("gsdxjx", model.userProperties().get(0).name()); + Assertions.assertEquals("vxbovpoeuufwsmde", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1909933114, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals(false, model.policy().secureOutput()); + Assertions.assertEquals("rpsplw", model.inputs().get(0).referenceName()); + Assertions.assertEquals("svqpifzavctywa", model.outputs().get(0).referenceName()); + Assertions.assertEquals("rgokyngarwzutzjx", model.stagingSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("g", model.logStorageSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("uxedpqwz", + model.logSettings().logLocationSettings().linkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CopyActivity model = new CopyActivity().withName("gmoazsjsuevf").withDescription("ntrccvxqbxgq") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("xekibv") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ee") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("zxlu").withValue("datamkwbwmgqzq"), - new UserProperty().withName("snycchpcjztziuu").withValue("datayvpcfvi"), - new UserProperty().withName("jxciu").withValue("dataetcxgd"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lrtffswqdkvljitb") - .withParameters(mapOf("laro", "datalr"))) - .withPolicy(new ActivityPolicy().withTimeout("datamucr").withRetry("databgsdxtwqqukgonl") - .withRetryIntervalInSeconds(1871793318).withSecureInput(true).withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withInputs(Arrays.asList( - new DatasetReference().withReferenceName("esqykqfserls").withParameters( - mapOf("tagwmzgvnojgm", "datashh", "oi", "databkali", "owxsxbxd", "datakehpdssvlubdp")), - new DatasetReference().withReferenceName("mixu").withParameters(mapOf("qqa", "datakcqm")), - new DatasetReference().withReferenceName("ftghmtbufkcnkgh") - .withParameters(mapOf("eayodrvwnqb", "datairshl", "slfxejpocsgig", "dataxyofftxzovbhqel", - "dyjwmglgstr", "dataabtx", "bcbefohnymfhmlji", "datafhehdzov")), - new DatasetReference().withReferenceName("kgfvzvmtjcxi") - .withParameters(mapOf("lceetuivmbugizwy", "datazxdb", "ls", "datafhfptbdxtvlpj")))) - .withOutputs( - Arrays - .asList( - new DatasetReference().withReferenceName("uzytxeaqig") - .withParameters(mapOf("tgcidbrjwi", "datatutet", "grcmcqppledx", "dataouepaqnfy", - "welutrvd", "dataecttub")), - new DatasetReference().withReferenceName("hwpxps") - .withParameters(mapOf("fmndrdqqjkeg", "dataltslfccyavy", "erzkhiovh", "datamldkci")), - new DatasetReference().withReferenceName("kwfolpjre") - .withParameters(mapOf("iranxqnzssvn", "datav", "wdoq", "datadtqykz", "ryoxmfrxfxycji", - "dataejltrnqxicyo", "kipklfwnhfk", "datalvchfumlfgmlzxxk")))) - .withSource(new CopySource().withSourceRetryCount("datapq").withSourceRetryWait("datakrmlb") - .withMaxConcurrentConnections("dataqogzaw").withDisableMetricsCollection("dataqdnxum") - .withAdditionalProperties(mapOf("type", "CopySource"))) - .withSink(new CopySink().withWriteBatchSize("datalygfvbfejb").withWriteBatchTimeout("dataklgpifvpsmvksa") - .withSinkRetryCount("datamsnplqfivxfqmdjz").withSinkRetryWait("datavmkplrjkmpaxoey") - .withMaxConcurrentConnections("dataofaogvmqzagrqcqh").withDisableMetricsCollection("dataskmkdr") - .withAdditionalProperties(mapOf("type", "CopySink"))) - .withTranslator("databzzbrufk").withEnableStaging("datalkpvaagrdf") - .withStagingSettings(new StagingSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("glqdsphvosucryh") - .withParameters(mapOf("dofshgmqxwop", "datathzfotfrfhrjka", "aittbmobrxhwpg", "datanitrmzvnrfkzn", - "fgqibb", "datarnxrjmilogcn", "fwyrsfjjsoyu", "dataplrtxhzt"))) - .withPath("databuyd").withEnableCompression("datahknttk").withAdditionalProperties(mapOf())) - .withParallelCopies("dataauh").withDataIntegrationUnits("datachphovu") - .withEnableSkipIncompatibleRow("datasczwcxlncoh") - .withRedirectIncompatibleRowSettings(new RedirectIncompatibleRowSettings() - .withLinkedServiceName("datafvyriawfwwsgdkbd").withPath("datas").withAdditionalProperties(mapOf())) - .withLogStorageSettings( - new LogStorageSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ufyqcqfouhyeyxg") - .withParameters(mapOf("pwwsiooz", "dataochpxps", "v", "dataugbdkxlwck", "sgkeexsozpkvy", - "datazkzjjtapvqjebtd"))) - .withPath("datatytwtfqpmpyww").withLogLevel("dataukqmjcwdo").withEnableReliableLogging("datadqun") + CopyActivity model + = new CopyActivity().withName("whqafzgzmon") + .withDescription("nie") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("hfmognnwx") + .withDependencyConditions( + Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("fpnbyxygubvi") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, + DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("gsdxjx").withValue("dataddxoatlprsrkennn"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vxbovpoeuufwsmde") + .withParameters(mapOf("mfydphlslnomqbd", "databxzjedycjisxsp"))) + .withPolicy(new ActivityPolicy() + .withTimeout("dataqfzlbpe") + .withRetry("datajp") + .withRetryIntervalInSeconds(1909933114) + .withSecureInput(false) + .withSecureOutput(false) .withAdditionalProperties(mapOf())) - .withLogSettings(new LogSettings().withEnableCopyActivityLog("datatuhdoimojcm") - .withCopyActivityLogSettings(new CopyActivityLogSettings().withLogLevel("datadxwvorzhzfocgf") - .withEnableReliableLogging("datatornvbhuyolwifbd")) - .withLogLocationSettings( - new LogLocationSettings().withLinkedServiceName(new LinkedServiceReference().withReferenceName("yv") - .withParameters(mapOf("yu", "datawbbvkthrexzvejq"))).withPath("datalokeqe"))) - .withPreserveRules(Arrays.asList("datap", "dataiehvgchsg")) - .withPreserve(Arrays.asList("datawerbpobvjhunicg", "dataxceivvmdtk")) - .withValidateDataConsistency("dataqhznutrx").withSkipErrorFile( - new SkipErrorFile().withFileMissing("datatrnniarjezjhy").withDataInconsistency("dataqf")); + .withInputs(Arrays.asList(new DatasetReference() + .withReferenceName("rpsplw") + .withParameters(mapOf("krnkuuot", "datacseybvtgcoznnjqx", "ybmgm", "datay")), + new DatasetReference().withReferenceName("kxkmtuyn") + .withParameters(mapOf("hqtqqshbip", "datatfjpisv")), + new DatasetReference().withReferenceName("huhujkzj") + .withParameters(mapOf("oqldnhwdfxgec", "datagphipt", "z", "datackkd", "xdzmpjfbdm", + "datahsnimom", "xxnmyxzh", "dataawhb")), + new DatasetReference().withReferenceName("ocqoydqyzhfnyl") + .withParameters(mapOf("wywayjinlsk", "datadsa", "qltygyeyxm", "dataprnkn", "moswcxlgzquq", + "datawgnwxtmhwgen")))) + .withOutputs(Arrays.asList(new DatasetReference() + .withReferenceName("svqpifzavctywa") + .withParameters(mapOf("vnpgsqlanuhmsrnp", "dataczprzrsqcu", "pdlsoajqxy", "dataoaghoeqiwpd")), + new DatasetReference().withReferenceName("lh") + .withParameters(mapOf("pikgqjdoglecj", "datapyybwlys")), + new DatasetReference().withReferenceName("gyivsiirx") + .withParameters(mapOf("iniidaxbesbwci", "datapqp", "uasjrs", "datayjch", "omihgksqwzuosyyx", + "dataq", "yrowgi", "datardxzudfarzayr")))) + .withSource(new CopySource().withSourceRetryCount("dataotclcuxzllnwmgqc") + .withSourceRetryWait("datagjequox") + .withMaxConcurrentConnections("datagfspwhfhdguuvg") + .withDisableMetricsCollection("datavz") + .withAdditionalProperties(mapOf("type", "fzgpvdlx"))) + .withSink(new CopySink().withWriteBatchSize("dataen") + .withWriteBatchTimeout("dataqjvdde") + .withSinkRetryCount("datavrjhtpxydiuviup") + .withSinkRetryWait("datatnsyrrybdyqiv") + .withMaxConcurrentConnections("datasuhozihd") + .withDisableMetricsCollection("datadjwth") + .withAdditionalProperties(mapOf("type", "rjmicha"))) + .withTranslator("dataronsxjdgaim") + .withEnableStaging("datansowszbeflhx") + .withStagingSettings(new StagingSettings() + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rgokyngarwzutzjx") + .withParameters(mapOf("miixngekcwe", "datasmnatnpolueylqys", "f", "databqtkdginmhlgp", + "wrwvbqv", "dataqccey"))) + .withPath("dataqgqrsopqgiqf") + .withEnableCompression("datatl") + .withAdditionalProperties(mapOf())) + .withParallelCopies("datascjig") + .withDataIntegrationUnits("datakdsvayyhtiy") + .withEnableSkipIncompatibleRow("datahmniz") + .withRedirectIncompatibleRowSettings( + new RedirectIncompatibleRowSettings().withLinkedServiceName("databtehkytl") + .withPath("datamyznwrcfqwkqul") + .withAdditionalProperties(mapOf())) + .withLogStorageSettings(new LogStorageSettings() + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("g") + .withParameters( + mapOf("xb", "datani", "jgczpdio", "datasjhpm", "cwmabehr", "datadtjylimzvjwjhmtc"))) + .withPath("dataskzw") + .withLogLevel("datah") + .withEnableReliableLogging("datahz") + .withAdditionalProperties(mapOf())) + .withLogSettings(new LogSettings().withEnableCopyActivityLog("dataqlceflgsndurhqoz") + .withCopyActivityLogSettings( + new CopyActivityLogSettings().withLogLevel("datawkdehjlo").withEnableReliableLogging("datacwo")) + .withLogLocationSettings(new LogLocationSettings() + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("uxedpqwz") + .withParameters(mapOf("j", "datamgbxjgxrh", "ebdfmdjnfeealp", "dataubcvuc", "mwdrvkbcsvn", + "datauclkbw", "jrz", "datavkhfzldzz"))) + .withPath("dataqengopd"))) + .withPreserveRules( + Arrays.asList("datandliodajxvszdyv", "datamiufbwreaw", "datanzhsmueedbhnkle", "datavzd")) + .withPreserve(Arrays.asList("dataqajia", "datatcyrdtrdukdmsktu")) + .withValidateDataConsistency("datah") + .withSkipErrorFile( + new SkipErrorFile().withFileMissing("datavy").withDataInconsistency("datayfbsgrzwdwdudxq")); model = BinaryData.fromObject(model).toObject(CopyActivity.class); - Assertions.assertEquals("gmoazsjsuevf", model.name()); - Assertions.assertEquals("ntrccvxqbxgq", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("xekibv", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("zxlu", model.userProperties().get(0).name()); - Assertions.assertEquals("lrtffswqdkvljitb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1871793318, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("esqykqfserls", model.inputs().get(0).referenceName()); - Assertions.assertEquals("uzytxeaqig", model.outputs().get(0).referenceName()); - Assertions.assertEquals("glqdsphvosucryh", model.stagingSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("ufyqcqfouhyeyxg", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("yv", model.logSettings().logLocationSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("whqafzgzmon", model.name()); + Assertions.assertEquals("nie", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("hfmognnwx", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("gsdxjx", model.userProperties().get(0).name()); + Assertions.assertEquals("vxbovpoeuufwsmde", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1909933114, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals(false, model.policy().secureOutput()); + Assertions.assertEquals("rpsplw", model.inputs().get(0).referenceName()); + Assertions.assertEquals("svqpifzavctywa", model.outputs().get(0).referenceName()); + Assertions.assertEquals("rgokyngarwzutzjx", model.stagingSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("g", model.logStorageSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("uxedpqwz", + model.logSettings().logLocationSettings().linkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTypePropertiesTests.java index 996f54baba331..ac3efc077a50f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTypePropertiesTests.java @@ -25,55 +25,67 @@ public final class CopyActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CopyActivityTypeProperties model = BinaryData.fromString( - "{\"source\":{\"type\":\"CopySource\",\"sourceRetryCount\":\"datajmtdnymbeeysk\",\"sourceRetryWait\":\"datalcaklesjgxdhgezy\",\"maxConcurrentConnections\":\"datahaokhbqmxgglkq\",\"disableMetricsCollection\":\"datapbynetyxuxopoc\",\"\":{\"sbouhmng\":\"datafggggl\",\"vswcpspaoxigpdi\":\"datacnkgius\"}},\"sink\":{\"type\":\"CopySink\",\"writeBatchSize\":\"datappgpqsm\",\"writeBatchTimeout\":\"datautnlynplxx\",\"sinkRetryCount\":\"dataogxk\",\"sinkRetryWait\":\"databcxbgfwwcfwlwnjg\",\"maxConcurrentConnections\":\"datamopcmetd\",\"disableMetricsCollection\":\"dataugimvief\",\"\":{\"n\":\"datasmiyjkhjuuepnjuq\",\"pxqs\":\"datajb\",\"isdwtug\":\"dataaxvq\"}},\"translator\":\"dataxup\",\"enableStaging\":\"datagcbwiw\",\"stagingSettings\":{\"linkedServiceName\":{\"referenceName\":\"joxxllhkzunnw\",\"parameters\":{\"evhyuuih\":\"dataxyawxkd\",\"qmcvu\":\"datap\",\"fiiif\":\"dataekubljnizwztlcr\"}},\"path\":\"datanfarmficqrd\",\"enableCompression\":\"datavtrulzlrm\",\"\":{\"xcgcdfel\":\"datasmpmhlcxb\",\"ygosuhroicjt\":\"dataapbdbicjzntiblx\"}},\"parallelCopies\":\"datadymoanpkcmdixiux\",\"dataIntegrationUnits\":\"datacalgspz\",\"enableSkipIncompatibleRow\":\"datafprzlvhohzkcsjd\",\"redirectIncompatibleRowSettings\":{\"linkedServiceName\":\"dataposmnmkypeqm\",\"path\":\"dataeox\",\"\":{\"hw\":\"dataulavxeaymfl\",\"a\":\"datafunptsryp\",\"tbxcj\":\"datakbwbxvs\"}},\"logStorageSettings\":{\"linkedServiceName\":{\"referenceName\":\"ad\",\"parameters\":{\"nthluze\":\"datajquliovrc\",\"reasuwe\":\"datavalezkyfykm\",\"gtyt\":\"dataq\"}},\"path\":\"datacc\",\"logLevel\":\"datafutfqffwvnjgjry\",\"enableReliableLogging\":\"datahizwdswikyewv\",\"\":{\"ip\":\"datawqzwsg\",\"gjrfkqf\":\"dataqemeft\",\"kxk\":\"datadrel\"}},\"logSettings\":{\"enableCopyActivityLog\":\"datauaregjoybnoisbm\",\"copyActivityLogSettings\":{\"logLevel\":\"dataenrcqickhvps\",\"enableReliableLogging\":\"datauiuvingmonq\"},\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"ntyuq\",\"parameters\":{\"kydqy\":\"datayojzvaykfjgakays\"}},\"path\":\"datawmfwr\"}},\"preserveRules\":[\"datazg\",\"datavmuot\",\"datasea\"],\"preserve\":[\"datajampvwxlkhprlt\"],\"validateDataConsistency\":\"datap\",\"skipErrorFile\":{\"fileMissing\":\"datarbyqh\",\"dataInconsistency\":\"dataxbo\"}}") + "{\"source\":{\"type\":\"ctbxpuisfjamgn\",\"sourceRetryCount\":\"dataosusiyycoflj\",\"sourceRetryWait\":\"datadmwa\",\"maxConcurrentConnections\":\"datapbuqkdieuopwsa\",\"disableMetricsCollection\":\"datahmizcfk\",\"\":{\"ygzkdbmjzobc\":\"datamoonnriah\",\"kptteojxhwg\":\"datavbbuuipel\",\"vkjdhsl\":\"dataahxgrpwjgk\"}},\"sink\":{\"type\":\"mm\",\"writeBatchSize\":\"datatzxsvwqiwgjwrhu\",\"writeBatchTimeout\":\"dataaaaxigafa\",\"sinkRetryCount\":\"datatoo\",\"sinkRetryWait\":\"datazdoblpdtcyv\",\"maxConcurrentConnections\":\"datahboplavgfbvro\",\"disableMetricsCollection\":\"datauexqweyslwlppoh\",\"\":{\"gb\":\"datagalexyiygkadtwd\",\"vxcjdobsgv\":\"dataxt\"}},\"translator\":\"datajkwltnsnhuvmok\",\"enableStaging\":\"datasclpnbidnlodk\",\"stagingSettings\":{\"linkedServiceName\":{\"referenceName\":\"qnkptixa\",\"parameters\":{\"mhoplqtzgtpsbym\":\"datazgaevrygggccp\"}},\"path\":\"datactorqzbq\",\"enableCompression\":\"datagfqqrarolc\",\"\":{\"tdif\":\"databynlbwcnnfpfg\",\"ojtzarlii\":\"datayfjslehgeeyg\",\"uqcmunhfarb\":\"datartvqvidsjhxxx\"}},\"parallelCopies\":\"datayvypuio\",\"dataIntegrationUnits\":\"dataowwerwywlxhiuw\",\"enableSkipIncompatibleRow\":\"dataxgejytqnzrcbha\",\"redirectIncompatibleRowSettings\":{\"linkedServiceName\":\"datactjvl\",\"path\":\"datanzgzfs\",\"\":{\"baiaqqfu\":\"dataytydrdc\"}},\"logStorageSettings\":{\"linkedServiceName\":{\"referenceName\":\"muclxgc\",\"parameters\":{\"hzhervvlibro\":\"datasuyqcvykags\",\"loedjzrvfrfs\":\"dataq\"}},\"path\":\"databfgwujwowt\",\"logLevel\":\"datauepszzn\",\"enableReliableLogging\":\"datajnsp\",\"\":{\"nmavf\":\"dataoygutqtjwiv\"}},\"logSettings\":{\"enableCopyActivityLog\":\"datadww\",\"copyActivityLogSettings\":{\"logLevel\":\"datae\",\"enableReliableLogging\":\"datadcpiwcgcwms\"},\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"lpq\",\"parameters\":{\"pu\":\"datahdwjfxopzclka\",\"vcqnjjfm\":\"datamga\",\"joonvtv\":\"datavcfbflyzcqomlybs\",\"dhum\":\"datafpumlob\"}},\"path\":\"dataqwckapoetdfzjwje\"}},\"preserveRules\":[\"dataihan\",\"datawipdqozvv\",\"dataq\",\"dataopvhwmt\"],\"preserve\":[\"datarjvqvuvipsnfeago\",\"datadbsasc\",\"datatdwijxkxlt\"],\"validateDataConsistency\":\"datadwiffagfeq\",\"skipErrorFile\":{\"fileMissing\":\"datapg\",\"dataInconsistency\":\"dataltthsuzxyl\"}}") .toObject(CopyActivityTypeProperties.class); - Assertions.assertEquals("joxxllhkzunnw", model.stagingSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("ad", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("ntyuq", model.logSettings().logLocationSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("qnkptixa", model.stagingSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("muclxgc", model.logStorageSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("lpq", model.logSettings().logLocationSettings().linkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { CopyActivityTypeProperties model = new CopyActivityTypeProperties() - .withSource(new CopySource().withSourceRetryCount("datajmtdnymbeeysk") - .withSourceRetryWait("datalcaklesjgxdhgezy").withMaxConcurrentConnections("datahaokhbqmxgglkq") - .withDisableMetricsCollection("datapbynetyxuxopoc") - .withAdditionalProperties(mapOf("type", "CopySource"))) - .withSink(new CopySink().withWriteBatchSize("datappgpqsm").withWriteBatchTimeout("datautnlynplxx") - .withSinkRetryCount("dataogxk").withSinkRetryWait("databcxbgfwwcfwlwnjg") - .withMaxConcurrentConnections("datamopcmetd").withDisableMetricsCollection("dataugimvief") - .withAdditionalProperties(mapOf("type", "CopySink"))) - .withTranslator("dataxup").withEnableStaging("datagcbwiw") + .withSource(new CopySource().withSourceRetryCount("dataosusiyycoflj") + .withSourceRetryWait("datadmwa") + .withMaxConcurrentConnections("datapbuqkdieuopwsa") + .withDisableMetricsCollection("datahmizcfk") + .withAdditionalProperties(mapOf("type", "ctbxpuisfjamgn"))) + .withSink(new CopySink().withWriteBatchSize("datatzxsvwqiwgjwrhu") + .withWriteBatchTimeout("dataaaaxigafa") + .withSinkRetryCount("datatoo") + .withSinkRetryWait("datazdoblpdtcyv") + .withMaxConcurrentConnections("datahboplavgfbvro") + .withDisableMetricsCollection("datauexqweyslwlppoh") + .withAdditionalProperties(mapOf("type", "mm"))) + .withTranslator("datajkwltnsnhuvmok") + .withEnableStaging("datasclpnbidnlodk") .withStagingSettings(new StagingSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("joxxllhkzunnw") - .withParameters(mapOf("evhyuuih", "dataxyawxkd", "qmcvu", "datap", "fiiif", "dataekubljnizwztlcr"))) - .withPath("datanfarmficqrd").withEnableCompression("datavtrulzlrm").withAdditionalProperties(mapOf())) - .withParallelCopies("datadymoanpkcmdixiux").withDataIntegrationUnits("datacalgspz") - .withEnableSkipIncompatibleRow( - "datafprzlvhohzkcsjd") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("qnkptixa") + .withParameters(mapOf("mhoplqtzgtpsbym", "datazgaevrygggccp"))) + .withPath("datactorqzbq") + .withEnableCompression("datagfqqrarolc") + .withAdditionalProperties(mapOf())) + .withParallelCopies("datayvypuio") + .withDataIntegrationUnits("dataowwerwywlxhiuw") + .withEnableSkipIncompatibleRow("dataxgejytqnzrcbha") .withRedirectIncompatibleRowSettings( - new RedirectIncompatibleRowSettings().withLinkedServiceName("dataposmnmkypeqm").withPath( - "dataeox").withAdditionalProperties( - mapOf())) + new RedirectIncompatibleRowSettings() + .withLinkedServiceName("datactjvl") + .withPath("datanzgzfs") + .withAdditionalProperties(mapOf())) .withLogStorageSettings(new LogStorageSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ad") - .withParameters(mapOf("nthluze", "datajquliovrc", "reasuwe", "datavalezkyfykm", "gtyt", "dataq"))) - .withPath("datacc").withLogLevel("datafutfqffwvnjgjry").withEnableReliableLogging( - "datahizwdswikyewv") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("muclxgc") + .withParameters(mapOf("hzhervvlibro", "datasuyqcvykags", "loedjzrvfrfs", "dataq"))) + .withPath("databfgwujwowt") + .withLogLevel("datauepszzn") + .withEnableReliableLogging("datajnsp") .withAdditionalProperties(mapOf())) - .withLogSettings(new LogSettings().withEnableCopyActivityLog("datauaregjoybnoisbm") - .withCopyActivityLogSettings(new CopyActivityLogSettings().withLogLevel("dataenrcqickhvps") - .withEnableReliableLogging("datauiuvingmonq")) - .withLogLocationSettings(new LogLocationSettings().withLinkedServiceName(new LinkedServiceReference() - .withReferenceName("ntyuq").withParameters(mapOf("kydqy", "datayojzvaykfjgakays"))) - .withPath("datawmfwr"))) - .withPreserveRules(Arrays.asList("datazg", "datavmuot", "datasea")) - .withPreserve(Arrays.asList("datajampvwxlkhprlt")).withValidateDataConsistency("datap") - .withSkipErrorFile(new SkipErrorFile().withFileMissing("datarbyqh").withDataInconsistency("dataxbo")); + .withLogSettings(new LogSettings().withEnableCopyActivityLog("datadww") + .withCopyActivityLogSettings( + new CopyActivityLogSettings().withLogLevel("datae").withEnableReliableLogging("datadcpiwcgcwms")) + .withLogLocationSettings(new LogLocationSettings() + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lpq") + .withParameters(mapOf("pu", "datahdwjfxopzclka", "vcqnjjfm", "datamga", "joonvtv", + "datavcfbflyzcqomlybs", "dhum", "datafpumlob"))) + .withPath("dataqwckapoetdfzjwje"))) + .withPreserveRules(Arrays.asList("dataihan", "datawipdqozvv", "dataq", "dataopvhwmt")) + .withPreserve(Arrays.asList("datarjvqvuvipsnfeago", "datadbsasc", "datatdwijxkxlt")) + .withValidateDataConsistency("datadwiffagfeq") + .withSkipErrorFile(new SkipErrorFile().withFileMissing("datapg").withDataInconsistency("dataltthsuzxyl")); model = BinaryData.fromObject(model).toObject(CopyActivityTypeProperties.class); - Assertions.assertEquals("joxxllhkzunnw", model.stagingSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("ad", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("ntyuq", model.logSettings().logLocationSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("qnkptixa", model.stagingSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("muclxgc", model.logStorageSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("lpq", model.logSettings().logLocationSettings().linkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyComputeScalePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyComputeScalePropertiesTests.java index c71d91fa83fce..68a530a8d6f15 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyComputeScalePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyComputeScalePropertiesTests.java @@ -13,20 +13,22 @@ public final class CopyComputeScalePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - CopyComputeScaleProperties model = BinaryData.fromString( - "{\"dataIntegrationUnit\":557038666,\"timeToLive\":1218097718,\"\":{\"hx\":\"datab\",\"soebdltni\":\"datacszdwpaveuxgmi\",\"eilhggajfeudb\":\"dataiimerffhgvcymdd\",\"ih\":\"databmoljirchhwl\"}}") + CopyComputeScaleProperties model = BinaryData + .fromString( + "{\"dataIntegrationUnit\":743562037,\"timeToLive\":277540849,\"\":{\"tfstdwqdvzc\":\"dataaszji\"}}") .toObject(CopyComputeScaleProperties.class); - Assertions.assertEquals(557038666, model.dataIntegrationUnit()); - Assertions.assertEquals(1218097718, model.timeToLive()); + Assertions.assertEquals(743562037, model.dataIntegrationUnit()); + Assertions.assertEquals(277540849, model.timeToLive()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CopyComputeScaleProperties model = new CopyComputeScaleProperties().withDataIntegrationUnit(557038666) - .withTimeToLive(1218097718).withAdditionalProperties(mapOf()); + CopyComputeScaleProperties model = new CopyComputeScaleProperties().withDataIntegrationUnit(743562037) + .withTimeToLive(277540849) + .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(CopyComputeScaleProperties.class); - Assertions.assertEquals(557038666, model.dataIntegrationUnit()); - Assertions.assertEquals(1218097718, model.timeToLive()); + Assertions.assertEquals(743562037, model.dataIntegrationUnit()); + Assertions.assertEquals(277540849, model.timeToLive()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySinkTests.java index 74136207f142a..ee324628c5ab4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySinkTests.java @@ -13,15 +13,19 @@ public final class CopySinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CopySink model = BinaryData.fromString( - "{\"type\":\"CopySink\",\"writeBatchSize\":\"dataofo\",\"writeBatchTimeout\":\"databiuikpotjjfec\",\"sinkRetryCount\":\"datakqjpovjvvxpsnr\",\"sinkRetryWait\":\"datar\",\"maxConcurrentConnections\":\"datah\",\"disableMetricsCollection\":\"dataqfl\",\"\":{\"ylbtkxei\":\"datacxyiqppacjiqrlla\",\"aaxttexaugojv\":\"datazgn\",\"bo\":\"datajezr\"}}") + "{\"type\":\"ayo\",\"writeBatchSize\":\"dataetzcxlisvqfb\",\"writeBatchTimeout\":\"dataizxp\",\"sinkRetryCount\":\"datapsaploex\",\"sinkRetryWait\":\"datamvlocd\",\"maxConcurrentConnections\":\"datahkob\",\"disableMetricsCollection\":\"datahhipn\",\"\":{\"n\":\"datadyriw\"}}") .toObject(CopySink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CopySink model = new CopySink().withWriteBatchSize("dataofo").withWriteBatchTimeout("databiuikpotjjfec") - .withSinkRetryCount("datakqjpovjvvxpsnr").withSinkRetryWait("datar").withMaxConcurrentConnections("datah") - .withDisableMetricsCollection("dataqfl").withAdditionalProperties(mapOf("type", "CopySink")); + CopySink model = new CopySink().withWriteBatchSize("dataetzcxlisvqfb") + .withWriteBatchTimeout("dataizxp") + .withSinkRetryCount("datapsaploex") + .withSinkRetryWait("datamvlocd") + .withMaxConcurrentConnections("datahkob") + .withDisableMetricsCollection("datahhipn") + .withAdditionalProperties(mapOf("type", "ayo")); model = BinaryData.fromObject(model).toObject(CopySink.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySourceTests.java index 59d3029314588..861f03cb5856c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySourceTests.java @@ -13,15 +13,17 @@ public final class CopySourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CopySource model = BinaryData.fromString( - "{\"type\":\"CopySource\",\"sourceRetryCount\":\"dataqww\",\"sourceRetryWait\":\"dataqjyiwuveryavb\",\"maxConcurrentConnections\":\"datavcogupshoofasky\",\"disableMetricsCollection\":\"datadjoorbuuhbcck\",\"\":{\"kquhmblnismviaa\":\"datatelmdrmasvghphl\",\"povzespdip\":\"datadexsrglxljuyvk\"}}") + "{\"type\":\"wjwbht\",\"sourceRetryCount\":\"databch\",\"sourceRetryWait\":\"dataneuzueikadhu\",\"maxConcurrentConnections\":\"datax\",\"disableMetricsCollection\":\"datagljcywyrzx\",\"\":{\"yejyavxgmogcnw\":\"datahlxxkviyjru\",\"nlceggyqlvnhm\":\"datakqqxpnjqtzdahvet\",\"wsyfsgikgcbjclf\":\"datautkwwtymbcc\",\"zebifktnx\":\"datapfdsatrzqmtuxwtd\"}}") .toObject(CopySource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CopySource model = new CopySource().withSourceRetryCount("dataqww").withSourceRetryWait("dataqjyiwuveryavb") - .withMaxConcurrentConnections("datavcogupshoofasky").withDisableMetricsCollection("datadjoorbuuhbcck") - .withAdditionalProperties(mapOf("type", "CopySource")); + CopySource model = new CopySource().withSourceRetryCount("databch") + .withSourceRetryWait("dataneuzueikadhu") + .withMaxConcurrentConnections("datax") + .withDisableMetricsCollection("datagljcywyrzx") + .withAdditionalProperties(mapOf("type", "wjwbht")); model = BinaryData.fromObject(model).toObject(CopySource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyTranslatorTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyTranslatorTests.java index 9c9a4b5e3efa8..305d7fa296e4e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyTranslatorTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyTranslatorTests.java @@ -12,13 +12,14 @@ public final class CopyTranslatorTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - CopyTranslator model = BinaryData.fromString("{\"type\":\"CopyTranslator\",\"\":{\"mszxyfaidz\":\"datalx\"}}") - .toObject(CopyTranslator.class); + CopyTranslator model + = BinaryData.fromString("{\"type\":\"kwvcogqjimxcth\",\"\":{\"ezufxuugvdbpjo\":\"datarkinutdhbmizbev\"}}") + .toObject(CopyTranslator.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CopyTranslator model = new CopyTranslator().withAdditionalProperties(mapOf("type", "CopyTranslator")); + CopyTranslator model = new CopyTranslator().withAdditionalProperties(mapOf("type", "kwvcogqjimxcth")); model = BinaryData.fromObject(model).toObject(CopyTranslator.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTests.java index af701837281e1..c83febdfbffcb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTests.java @@ -19,30 +19,34 @@ public final class CosmosDbMongoDbApiCollectionDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CosmosDbMongoDbApiCollectionDataset model = BinaryData.fromString( - "{\"type\":\"CosmosDbMongoDbApiCollection\",\"typeProperties\":{\"collection\":\"datadwhslxebaj\"},\"description\":\"knmstbdoprwkamp\",\"structure\":\"datawpbldz\",\"schema\":\"dataudrcycm\",\"linkedServiceName\":{\"referenceName\":\"huzymhlhihqk\",\"parameters\":{\"aiildcpud\":\"datakmnbzko\",\"drobujnjgy\":\"datahquxsyjofpgv\",\"njgcp\":\"datauxmqxigidul\",\"ghxhkyqzjsdkpvn\":\"datakgrhnytslgsazuqz\"}},\"parameters\":{\"hflyuvbgtz\":{\"type\":\"Array\",\"defaultValue\":\"dataffxsfybntmveh\"}},\"annotations\":[\"dataweuydyb\",\"dataairvhpqsv\"],\"folder\":{\"name\":\"ogeatrcnqnvn\"},\"\":{\"iznzs\":\"datafcsjvjnk\"}}") + "{\"type\":\"wfsaa\",\"typeProperties\":{\"collection\":\"dataxcmmhipbvskcitly\"},\"description\":\"fgb\",\"structure\":\"datavm\",\"schema\":\"datad\",\"linkedServiceName\":{\"referenceName\":\"liuajklnacgdnx\",\"parameters\":{\"jaojpzngdrzige\":\"datanmzr\",\"dkhwqdmohhe\":\"datawsadsqyu\",\"yevyigdeipnfizej\":\"datayuunx\",\"acndjzwhaj\":\"datali\"}},\"parameters\":{\"hdyncradxs\":{\"type\":\"String\",\"defaultValue\":\"datao\"},\"pfapmqnmelyk\":{\"type\":\"Object\",\"defaultValue\":\"datae\"},\"n\":{\"type\":\"SecureString\",\"defaultValue\":\"dataihiclmsln\"}},\"annotations\":[\"datazlbbbajdexquaw\"],\"folder\":{\"name\":\"zbf\"},\"\":{\"vvxn\":\"datajizwhpnbmajvvyx\",\"ybfmlngfwhrmvl\":\"datakzixbk\",\"nrzblxna\":\"dataknujmw\",\"kovohwvpr\":\"datahsmfndcbsyhludzj\"}}") .toObject(CosmosDbMongoDbApiCollectionDataset.class); - Assertions.assertEquals("knmstbdoprwkamp", model.description()); - Assertions.assertEquals("huzymhlhihqk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("hflyuvbgtz").type()); - Assertions.assertEquals("ogeatrcnqnvn", model.folder().name()); + Assertions.assertEquals("fgb", model.description()); + Assertions.assertEquals("liuajklnacgdnx", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("hdyncradxs").type()); + Assertions.assertEquals("zbf", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CosmosDbMongoDbApiCollectionDataset model = new CosmosDbMongoDbApiCollectionDataset() - .withDescription("knmstbdoprwkamp").withStructure("datawpbldz").withSchema("dataudrcycm") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("huzymhlhihqk") - .withParameters(mapOf("aiildcpud", "datakmnbzko", "drobujnjgy", "datahquxsyjofpgv", "njgcp", - "datauxmqxigidul", "ghxhkyqzjsdkpvn", "datakgrhnytslgsazuqz"))) - .withParameters(mapOf("hflyuvbgtz", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataffxsfybntmveh"))) - .withAnnotations(Arrays.asList("dataweuydyb", "dataairvhpqsv")) - .withFolder(new DatasetFolder().withName("ogeatrcnqnvn")).withCollection("datadwhslxebaj"); + CosmosDbMongoDbApiCollectionDataset model = new CosmosDbMongoDbApiCollectionDataset().withDescription("fgb") + .withStructure("datavm") + .withSchema("datad") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("liuajklnacgdnx") + .withParameters(mapOf("jaojpzngdrzige", "datanmzr", "dkhwqdmohhe", "datawsadsqyu", "yevyigdeipnfizej", + "datayuunx", "acndjzwhaj", "datali"))) + .withParameters(mapOf("hdyncradxs", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datao"), "pfapmqnmelyk", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datae"), "n", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataihiclmsln"))) + .withAnnotations(Arrays.asList("datazlbbbajdexquaw")) + .withFolder(new DatasetFolder().withName("zbf")) + .withCollection("dataxcmmhipbvskcitly"); model = BinaryData.fromObject(model).toObject(CosmosDbMongoDbApiCollectionDataset.class); - Assertions.assertEquals("knmstbdoprwkamp", model.description()); - Assertions.assertEquals("huzymhlhihqk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("hflyuvbgtz").type()); - Assertions.assertEquals("ogeatrcnqnvn", model.folder().name()); + Assertions.assertEquals("fgb", model.description()); + Assertions.assertEquals("liuajklnacgdnx", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("hdyncradxs").type()); + Assertions.assertEquals("zbf", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTypePropertiesTests.java index d4f12bbacbeee..78a272e22cf85 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTypePropertiesTests.java @@ -10,14 +10,15 @@ public final class CosmosDbMongoDbApiCollectionDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - CosmosDbMongoDbApiCollectionDatasetTypeProperties model = BinaryData.fromString("{\"collection\":\"databiba\"}") - .toObject(CosmosDbMongoDbApiCollectionDatasetTypeProperties.class); + CosmosDbMongoDbApiCollectionDatasetTypeProperties model + = BinaryData.fromString("{\"collection\":\"datafdvtdurmdt\"}") + .toObject(CosmosDbMongoDbApiCollectionDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { CosmosDbMongoDbApiCollectionDatasetTypeProperties model - = new CosmosDbMongoDbApiCollectionDatasetTypeProperties().withCollection("databiba"); + = new CosmosDbMongoDbApiCollectionDatasetTypeProperties().withCollection("datafdvtdurmdt"); model = BinaryData.fromObject(model).toObject(CosmosDbMongoDbApiCollectionDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTests.java index fc131b86e0563..572a180bb08e6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTests.java @@ -18,28 +18,30 @@ public final class CosmosDbMongoDbApiLinkedServiceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CosmosDbMongoDbApiLinkedService model = BinaryData.fromString( - "{\"type\":\"CosmosDbMongoDbApi\",\"typeProperties\":{\"isServerVersionAbove32\":\"dataqhedbqrl\",\"connectionString\":\"datayhzyf\",\"database\":\"datau\"},\"connectVia\":{\"referenceName\":\"krzvd\",\"parameters\":{\"wx\":\"dataevbkkgqfjwgphh\",\"fqzwysmsqqmdajsq\":\"datahw\"}},\"description\":\"xftyifadsliifdr\",\"parameters\":{\"rfvq\":{\"type\":\"Float\",\"defaultValue\":\"dataspbibsnxmfooinp\"},\"cbrewtf\":{\"type\":\"Int\",\"defaultValue\":\"datawgkm\"}},\"annotations\":[\"datatsxnikcgitt\"],\"\":{\"invfcdsijsinybn\":\"datapihtepasjeb\",\"ayxujzoxgakqt\":\"datalx\",\"mah\":\"datarjucazwe\"}}") + "{\"type\":\"ysefilncyqnkpxe\",\"typeProperties\":{\"isServerVersionAbove32\":\"dataktcr\",\"connectionString\":\"datact\",\"database\":\"dataedzyzbvsjut\"},\"connectVia\":{\"referenceName\":\"pzdwerjckzxdlupg\",\"parameters\":{\"pglntnsiuxy\":\"datamndzbfoia\"}},\"description\":\"jwsdxyzgr\",\"parameters\":{\"kobxvexusjfjuphj\":{\"type\":\"String\",\"defaultValue\":\"dataxgomhenqnovt\"},\"atl\":{\"type\":\"Float\",\"defaultValue\":\"dataksvjtgpy\"}},\"annotations\":[\"datasxqmmxjdkvy\",\"datailrlfgowvvxjqru\",\"datak\"],\"\":{\"nsqeewfuw\":\"datayfe\",\"azi\":\"datamkca\",\"wmebmx\":\"datahwy\",\"txkurp\":\"datawcf\"}}") .toObject(CosmosDbMongoDbApiLinkedService.class); - Assertions.assertEquals("krzvd", model.connectVia().referenceName()); - Assertions.assertEquals("xftyifadsliifdr", model.description()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("rfvq").type()); + Assertions.assertEquals("pzdwerjckzxdlupg", model.connectVia().referenceName()); + Assertions.assertEquals("jwsdxyzgr", model.description()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("kobxvexusjfjuphj").type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { CosmosDbMongoDbApiLinkedService model = new CosmosDbMongoDbApiLinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("krzvd") - .withParameters(mapOf("wx", "dataevbkkgqfjwgphh", "fqzwysmsqqmdajsq", "datahw"))) - .withDescription("xftyifadsliifdr") - .withParameters(mapOf("rfvq", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataspbibsnxmfooinp"), - "cbrewtf", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datawgkm"))) - .withAnnotations(Arrays.asList("datatsxnikcgitt")).withIsServerVersionAbove32("dataqhedbqrl") - .withConnectionString("datayhzyf").withDatabase("datau"); + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("pzdwerjckzxdlupg") + .withParameters(mapOf("pglntnsiuxy", "datamndzbfoia"))) + .withDescription("jwsdxyzgr") + .withParameters(mapOf("kobxvexusjfjuphj", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataxgomhenqnovt"), "atl", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataksvjtgpy"))) + .withAnnotations(Arrays.asList("datasxqmmxjdkvy", "datailrlfgowvvxjqru", "datak")) + .withIsServerVersionAbove32("dataktcr") + .withConnectionString("datact") + .withDatabase("dataedzyzbvsjut"); model = BinaryData.fromObject(model).toObject(CosmosDbMongoDbApiLinkedService.class); - Assertions.assertEquals("krzvd", model.connectVia().referenceName()); - Assertions.assertEquals("xftyifadsliifdr", model.description()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("rfvq").type()); + Assertions.assertEquals("pzdwerjckzxdlupg", model.connectVia().referenceName()); + Assertions.assertEquals("jwsdxyzgr", model.description()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("kobxvexusjfjuphj").type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTypePropertiesTests.java index ca4d87111afdd..37839b707ac25 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTypePropertiesTests.java @@ -11,14 +11,16 @@ public final class CosmosDbMongoDbApiLinkedServiceTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CosmosDbMongoDbApiLinkedServiceTypeProperties model = BinaryData.fromString( - "{\"isServerVersionAbove32\":\"dataxgce\",\"connectionString\":\"datax\",\"database\":\"dataanudvqannenxg\"}") + "{\"isServerVersionAbove32\":\"dataksixhornvydx\",\"connectionString\":\"datakdsqeayjdqtti\",\"database\":\"datawixsdxxflwfvah\"}") .toObject(CosmosDbMongoDbApiLinkedServiceTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CosmosDbMongoDbApiLinkedServiceTypeProperties model = new CosmosDbMongoDbApiLinkedServiceTypeProperties() - .withIsServerVersionAbove32("dataxgce").withConnectionString("datax").withDatabase("dataanudvqannenxg"); + CosmosDbMongoDbApiLinkedServiceTypeProperties model + = new CosmosDbMongoDbApiLinkedServiceTypeProperties().withIsServerVersionAbove32("dataksixhornvydx") + .withConnectionString("datakdsqeayjdqtti") + .withDatabase("datawixsdxxflwfvah"); model = BinaryData.fromObject(model).toObject(CosmosDbMongoDbApiLinkedServiceTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSinkTests.java index 7e1f092bc3b45..0fb3b894bb14e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSinkTests.java @@ -11,16 +11,19 @@ public final class CosmosDbMongoDbApiSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CosmosDbMongoDbApiSink model = BinaryData.fromString( - "{\"type\":\"CosmosDbMongoDbApiSink\",\"writeBehavior\":\"dataxvjjwlwysrs\",\"writeBatchSize\":\"datahciazwebts\",\"writeBatchTimeout\":\"dataqkanuxjud\",\"sinkRetryCount\":\"datazodnxlcdgkc\",\"sinkRetryWait\":\"dataancjlkrskzw\",\"maxConcurrentConnections\":\"databafqzihmvw\",\"disableMetricsCollection\":\"datajwvqiahoqjz\",\"\":{\"hgwzbystwuuwe\":\"datawdlrtcfulmz\",\"qichzcajity\":\"datantjssjbpnatpym\"}}") + "{\"type\":\"tdmrqravpx\",\"writeBehavior\":\"dataavoqcyl\",\"writeBatchSize\":\"dataewpngcocbo\",\"writeBatchTimeout\":\"datasjobzvsugentr\",\"sinkRetryCount\":\"databwtivgdcrrbsw\",\"sinkRetryWait\":\"dataizmxvdtkmwyiko\",\"maxConcurrentConnections\":\"dataepkknyvn\",\"disableMetricsCollection\":\"dataglia\",\"\":{\"sg\":\"datajdhbqwcu\",\"kzwijqxwmjl\":\"dataefna\",\"fsqruyqaqemozj\":\"dataosqhnwbqc\",\"pclmkeswtkhfcnce\":\"datahixcivjokauj\"}}") .toObject(CosmosDbMongoDbApiSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CosmosDbMongoDbApiSink model = new CosmosDbMongoDbApiSink().withWriteBatchSize("datahciazwebts") - .withWriteBatchTimeout("dataqkanuxjud").withSinkRetryCount("datazodnxlcdgkc") - .withSinkRetryWait("dataancjlkrskzw").withMaxConcurrentConnections("databafqzihmvw") - .withDisableMetricsCollection("datajwvqiahoqjz").withWriteBehavior("dataxvjjwlwysrs"); + CosmosDbMongoDbApiSink model = new CosmosDbMongoDbApiSink().withWriteBatchSize("dataewpngcocbo") + .withWriteBatchTimeout("datasjobzvsugentr") + .withSinkRetryCount("databwtivgdcrrbsw") + .withSinkRetryWait("dataizmxvdtkmwyiko") + .withMaxConcurrentConnections("dataepkknyvn") + .withDisableMetricsCollection("dataglia") + .withWriteBehavior("dataavoqcyl"); model = BinaryData.fromObject(model).toObject(CosmosDbMongoDbApiSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSourceTests.java index 3e447950a1c2f..19dd8073d4c63 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSourceTests.java @@ -14,19 +14,25 @@ public final class CosmosDbMongoDbApiSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CosmosDbMongoDbApiSource model = BinaryData.fromString( - "{\"type\":\"CosmosDbMongoDbApiSource\",\"filter\":\"dataeztmdyb\",\"cursorMethods\":{\"project\":\"dataj\",\"sort\":\"dataogtnfla\",\"skip\":\"datapghfvkqijmyqo\",\"limit\":\"datasfaoc\",\"\":{\"dpyohnmru\":\"datarr\",\"eywbhxhawkwcf\":\"datavlwhtfscoups\"}},\"batchSize\":\"dataqexd\",\"queryTimeout\":\"datacvkwwjjotfun\",\"additionalColumns\":\"datauejxvrwalekqedof\",\"sourceRetryCount\":\"databxmlai\",\"sourceRetryWait\":\"datavhlpfjibblm\",\"maxConcurrentConnections\":\"datavzdaycmene\",\"disableMetricsCollection\":\"datayzlslvgqle\",\"\":{\"t\":\"datawbbellcjd\",\"dpmy\":\"datacvddfmflwfxdkp\"}}") + "{\"type\":\"afks\",\"filter\":\"datappsnljduwkb\",\"cursorMethods\":{\"project\":\"datamrhnghvlv\",\"sort\":\"dataxbjqiabitevv\",\"skip\":\"dataiyp\",\"limit\":\"datajz\",\"\":{\"lhmneykxewemtaz\":\"datadfyvgcftaqydcr\",\"jzpxo\":\"datar\",\"erxmlfnugl\":\"datae\",\"snitlmcaehjhwk\":\"datayrkrtdkpoxzwg\"}},\"batchSize\":\"datajzq\",\"queryTimeout\":\"dataypanwejbngojna\",\"additionalColumns\":\"datawytkwtf\",\"sourceRetryCount\":\"datamfhruhw\",\"sourceRetryWait\":\"datanrdf\",\"maxConcurrentConnections\":\"databhrvonea\",\"disableMetricsCollection\":\"datajmjig\",\"\":{\"rxtoxlx\":\"datadfrtasa\",\"ycissh\":\"datajijttsyrxynnfsk\"}}") .toObject(CosmosDbMongoDbApiSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CosmosDbMongoDbApiSource model = new CosmosDbMongoDbApiSource().withSourceRetryCount("databxmlai") - .withSourceRetryWait("datavhlpfjibblm").withMaxConcurrentConnections("datavzdaycmene") - .withDisableMetricsCollection("datayzlslvgqle").withFilter("dataeztmdyb") - .withCursorMethods(new MongoDbCursorMethodsProperties().withProject("dataj").withSort("dataogtnfla") - .withSkip("datapghfvkqijmyqo").withLimit("datasfaoc").withAdditionalProperties(mapOf())) - .withBatchSize("dataqexd").withQueryTimeout("datacvkwwjjotfun") - .withAdditionalColumns("datauejxvrwalekqedof"); + CosmosDbMongoDbApiSource model = new CosmosDbMongoDbApiSource().withSourceRetryCount("datamfhruhw") + .withSourceRetryWait("datanrdf") + .withMaxConcurrentConnections("databhrvonea") + .withDisableMetricsCollection("datajmjig") + .withFilter("datappsnljduwkb") + .withCursorMethods(new MongoDbCursorMethodsProperties().withProject("datamrhnghvlv") + .withSort("dataxbjqiabitevv") + .withSkip("dataiyp") + .withLimit("datajz") + .withAdditionalProperties(mapOf())) + .withBatchSize("datajzq") + .withQueryTimeout("dataypanwejbngojna") + .withAdditionalColumns("datawytkwtf"); model = BinaryData.fromObject(model).toObject(CosmosDbMongoDbApiSource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTests.java index 8c8b43a22e2b8..c1b57236f5549 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTests.java @@ -19,34 +19,31 @@ public final class CosmosDbSqlApiCollectionDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CosmosDbSqlApiCollectionDataset model = BinaryData.fromString( - "{\"type\":\"CosmosDbSqlApiCollection\",\"typeProperties\":{\"collectionName\":\"datalvsmfjihv\"},\"description\":\"cqrttjf\",\"structure\":\"datammfjew\",\"schema\":\"dataq\",\"linkedServiceName\":{\"referenceName\":\"avdostw\",\"parameters\":{\"elvxgwzz\":\"datafm\",\"zvzrbvgwxhlx\":\"datawdtlcjgpvc\"}},\"parameters\":{\"vhhplkhwwdk\":{\"type\":\"Array\",\"defaultValue\":\"datadrwynbgovazoym\"},\"yxryearmhpwbuk\":{\"type\":\"Object\",\"defaultValue\":\"dataeqmgkcswz\"}},\"annotations\":[\"datamfasgtlvhqpoilos\",\"dataaemcezevftmh\",\"datal\"],\"folder\":{\"name\":\"jy\"},\"\":{\"miwtpcflcez\":\"datatm\",\"fpf\":\"datawwvwiftdjtv\"}}") + "{\"type\":\"grcavqc\",\"typeProperties\":{\"collectionName\":\"datargdqyxaj\"},\"description\":\"zoq\",\"structure\":\"datamqcwwsjnkiixepbn\",\"schema\":\"dataqww\",\"linkedServiceName\":{\"referenceName\":\"fgsqxile\",\"parameters\":{\"uqynttwk\":\"datasewrzne\",\"yd\":\"datahajksbsyogjmqjhg\"}},\"parameters\":{\"xtdlxwmvcdkucp\":{\"type\":\"Array\",\"defaultValue\":\"datavjbssfcriqxz\"}},\"annotations\":[\"dataafr\",\"datargorogeuvmkrspn\"],\"folder\":{\"name\":\"semlzofrsnq\"},\"\":{\"rpub\":\"datapetexutikelpmw\"}}") .toObject(CosmosDbSqlApiCollectionDataset.class); - Assertions.assertEquals("cqrttjf", model.description()); - Assertions.assertEquals("avdostw", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("vhhplkhwwdk").type()); - Assertions.assertEquals("jy", model.folder().name()); + Assertions.assertEquals("zoq", model.description()); + Assertions.assertEquals("fgsqxile", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("xtdlxwmvcdkucp").type()); + Assertions.assertEquals("semlzofrsnq", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CosmosDbSqlApiCollectionDataset model - = new CosmosDbSqlApiCollectionDataset().withDescription("cqrttjf").withStructure("datammfjew") - .withSchema("dataq") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("avdostw") - .withParameters(mapOf("elvxgwzz", "datafm", "zvzrbvgwxhlx", "datawdtlcjgpvc"))) - .withParameters( - mapOf("vhhplkhwwdk", - new ParameterSpecification().withType(ParameterType.ARRAY) - .withDefaultValue("datadrwynbgovazoym"), - "yxryearmhpwbuk", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataeqmgkcswz"))) - .withAnnotations(Arrays.asList("datamfasgtlvhqpoilos", "dataaemcezevftmh", "datal")) - .withFolder(new DatasetFolder().withName("jy")).withCollectionName("datalvsmfjihv"); + CosmosDbSqlApiCollectionDataset model = new CosmosDbSqlApiCollectionDataset().withDescription("zoq") + .withStructure("datamqcwwsjnkiixepbn") + .withSchema("dataqww") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("fgsqxile") + .withParameters(mapOf("uqynttwk", "datasewrzne", "yd", "datahajksbsyogjmqjhg"))) + .withParameters(mapOf("xtdlxwmvcdkucp", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datavjbssfcriqxz"))) + .withAnnotations(Arrays.asList("dataafr", "datargorogeuvmkrspn")) + .withFolder(new DatasetFolder().withName("semlzofrsnq")) + .withCollectionName("datargdqyxaj"); model = BinaryData.fromObject(model).toObject(CosmosDbSqlApiCollectionDataset.class); - Assertions.assertEquals("cqrttjf", model.description()); - Assertions.assertEquals("avdostw", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("vhhplkhwwdk").type()); - Assertions.assertEquals("jy", model.folder().name()); + Assertions.assertEquals("zoq", model.description()); + Assertions.assertEquals("fgsqxile", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("xtdlxwmvcdkucp").type()); + Assertions.assertEquals("semlzofrsnq", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTypePropertiesTests.java index 0fd4edad92eed..29972998f7eae 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class CosmosDbSqlApiCollectionDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CosmosDbSqlApiCollectionDatasetTypeProperties model - = BinaryData.fromString("{\"collectionName\":\"dataruptsyqcjnq\"}") + = BinaryData.fromString("{\"collectionName\":\"dataudhvos\"}") .toObject(CosmosDbSqlApiCollectionDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { CosmosDbSqlApiCollectionDatasetTypeProperties model - = new CosmosDbSqlApiCollectionDatasetTypeProperties().withCollectionName("dataruptsyqcjnq"); + = new CosmosDbSqlApiCollectionDatasetTypeProperties().withCollectionName("dataudhvos"); model = BinaryData.fromObject(model).toObject(CosmosDbSqlApiCollectionDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSinkTests.java index 840ae2de970b7..c9be48d462cb3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSinkTests.java @@ -11,16 +11,19 @@ public final class CosmosDbSqlApiSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CosmosDbSqlApiSink model = BinaryData.fromString( - "{\"type\":\"CosmosDbSqlApiSink\",\"writeBehavior\":\"datauciwbiwygwpwqu\",\"writeBatchSize\":\"dataqgslspihuxgvvio\",\"writeBatchTimeout\":\"dataoolkmfi\",\"sinkRetryCount\":\"datafbbrndaquxvufr\",\"sinkRetryWait\":\"dataaehssosowav\",\"maxConcurrentConnections\":\"datasieyeblkgupgnst\",\"disableMetricsCollection\":\"datajxfmh\",\"\":{\"qgtjffc\":\"datartvkhufk\",\"tzgk\":\"datare\"}}") + "{\"type\":\"wqtbztogihpylf\",\"writeBehavior\":\"dataprnqjxsexzxbiwn\",\"writeBatchSize\":\"datayes\",\"writeBatchTimeout\":\"datalsparbjsvqy\",\"sinkRetryCount\":\"datagemkz\",\"sinkRetryWait\":\"datalvnosblc\",\"maxConcurrentConnections\":\"datatwac\",\"disableMetricsCollection\":\"datahkpdcv\",\"\":{\"tcijuntm\":\"datak\"}}") .toObject(CosmosDbSqlApiSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CosmosDbSqlApiSink model = new CosmosDbSqlApiSink().withWriteBatchSize("dataqgslspihuxgvvio") - .withWriteBatchTimeout("dataoolkmfi").withSinkRetryCount("datafbbrndaquxvufr") - .withSinkRetryWait("dataaehssosowav").withMaxConcurrentConnections("datasieyeblkgupgnst") - .withDisableMetricsCollection("datajxfmh").withWriteBehavior("datauciwbiwygwpwqu"); + CosmosDbSqlApiSink model = new CosmosDbSqlApiSink().withWriteBatchSize("datayes") + .withWriteBatchTimeout("datalsparbjsvqy") + .withSinkRetryCount("datagemkz") + .withSinkRetryWait("datalvnosblc") + .withMaxConcurrentConnections("datatwac") + .withDisableMetricsCollection("datahkpdcv") + .withWriteBehavior("dataprnqjxsexzxbiwn"); model = BinaryData.fromObject(model).toObject(CosmosDbSqlApiSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSourceTests.java index fd09a5706641b..1b9a5b2b13d41 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSourceTests.java @@ -11,17 +11,21 @@ public final class CosmosDbSqlApiSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CosmosDbSqlApiSource model = BinaryData.fromString( - "{\"type\":\"CosmosDbSqlApiSource\",\"query\":\"dataplqnilozf\",\"pageSize\":\"datavsf\",\"preferredRegions\":\"datacarfdmlie\",\"detectDatetime\":\"datawocufcshqfc\",\"additionalColumns\":\"datanxfof\",\"sourceRetryCount\":\"datadroqktegi\",\"sourceRetryWait\":\"datakzctqbvntlzvgjme\",\"maxConcurrentConnections\":\"dataoydyislepd\",\"disableMetricsCollection\":\"dataiklnt\",\"\":{\"ytbjbm\":\"datanump\"}}") + "{\"type\":\"mekmsn\",\"query\":\"datayrmwshrzm\",\"pageSize\":\"datacnn\",\"preferredRegions\":\"datajxvotidlwmewrgul\",\"detectDatetime\":\"datagpkunvygu\",\"additionalColumns\":\"datannvmguzqmx\",\"sourceRetryCount\":\"datajbefszfrxfywjy\",\"sourceRetryWait\":\"datacqmdeecdh\",\"maxConcurrentConnections\":\"datasiz\",\"disableMetricsCollection\":\"datappmykgr\",\"\":{\"qtfyuyg\":\"datamadhismwj\"}}") .toObject(CosmosDbSqlApiSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CosmosDbSqlApiSource model = new CosmosDbSqlApiSource().withSourceRetryCount("datadroqktegi") - .withSourceRetryWait("datakzctqbvntlzvgjme").withMaxConcurrentConnections("dataoydyislepd") - .withDisableMetricsCollection("dataiklnt").withQuery("dataplqnilozf").withPageSize("datavsf") - .withPreferredRegions("datacarfdmlie").withDetectDatetime("datawocufcshqfc") - .withAdditionalColumns("datanxfof"); + CosmosDbSqlApiSource model = new CosmosDbSqlApiSource().withSourceRetryCount("datajbefszfrxfywjy") + .withSourceRetryWait("datacqmdeecdh") + .withMaxConcurrentConnections("datasiz") + .withDisableMetricsCollection("datappmykgr") + .withQuery("datayrmwshrzm") + .withPageSize("datacnn") + .withPreferredRegions("datajxvotidlwmewrgul") + .withDetectDatetime("datagpkunvygu") + .withAdditionalColumns("datannvmguzqmx"); model = BinaryData.fromObject(model).toObject(CosmosDbSqlApiSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseSourceTests.java index 2960a8ad173c5..12b5841831e2b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseSourceTests.java @@ -11,16 +11,19 @@ public final class CouchbaseSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CouchbaseSource model = BinaryData.fromString( - "{\"type\":\"CouchbaseSource\",\"query\":\"dataaysxhfupvqjkqlaf\",\"queryTimeout\":\"dataywmcipuye\",\"additionalColumns\":\"datahd\",\"sourceRetryCount\":\"datagaifg\",\"sourceRetryWait\":\"datakgqwmp\",\"maxConcurrentConnections\":\"dataxpcxqcbnkxhcodh\",\"disableMetricsCollection\":\"databxllfwxdou\",\"\":{\"ofhk\":\"datapaqjahjxgedtmz\",\"rfassiii\":\"dataywtacgukierd\",\"ayyxgcgb\":\"datacmrgahs\",\"vqopxun\":\"dataieqonsbukznxd\"}}") + "{\"type\":\"h\",\"query\":\"dataoyznjddshazlom\",\"queryTimeout\":\"datajwwbrhjhcwcfftsz\",\"additionalColumns\":\"datavy\",\"sourceRetryCount\":\"datavh\",\"sourceRetryWait\":\"dataeoikxjpuwg\",\"maxConcurrentConnections\":\"datasaqfnbxuw\",\"disableMetricsCollection\":\"datambuslegtsqz\",\"\":{\"hvde\":\"dataoru\",\"ylgpznb\":\"datadcdjhfxbkgpnxu\"}}") .toObject(CouchbaseSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CouchbaseSource model - = new CouchbaseSource().withSourceRetryCount("datagaifg").withSourceRetryWait("datakgqwmp") - .withMaxConcurrentConnections("dataxpcxqcbnkxhcodh").withDisableMetricsCollection("databxllfwxdou") - .withQueryTimeout("dataywmcipuye").withAdditionalColumns("datahd").withQuery("dataaysxhfupvqjkqlaf"); + CouchbaseSource model = new CouchbaseSource().withSourceRetryCount("datavh") + .withSourceRetryWait("dataeoikxjpuwg") + .withMaxConcurrentConnections("datasaqfnbxuw") + .withDisableMetricsCollection("datambuslegtsqz") + .withQueryTimeout("datajwwbrhjhcwcfftsz") + .withAdditionalColumns("datavy") + .withQuery("dataoyznjddshazlom"); model = BinaryData.fromObject(model).toObject(CouchbaseSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseTableDatasetTests.java index 45552c62752f8..545be3ed85fdd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseTableDatasetTests.java @@ -19,31 +19,32 @@ public final class CouchbaseTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CouchbaseTableDataset model = BinaryData.fromString( - "{\"type\":\"CouchbaseTable\",\"typeProperties\":{\"tableName\":\"datafybxmmrvn\"},\"description\":\"qkrrsguogkcb\",\"structure\":\"datatpyabensjflwp\",\"schema\":\"datavvqtmvif\",\"linkedServiceName\":{\"referenceName\":\"cv\",\"parameters\":{\"t\":\"dataalb\"}},\"parameters\":{\"hmvjcnnlsb\":{\"type\":\"Int\",\"defaultValue\":\"datasnxajptcdfmzxaox\"},\"ovlznklel\":{\"type\":\"Float\",\"defaultValue\":\"dataqxhpaqoqbvejoyso\"}},\"annotations\":[\"datadlqqhn\",\"dataqrykkxakruupti\",\"datacg\",\"datapz\"],\"folder\":{\"name\":\"ccnpxiemacmzt\"},\"\":{\"ocnqbblr\":\"dataxsnnbrysgktf\",\"fwxud\":\"databofzghfu\",\"cqxu\":\"datanoh\"}}") + "{\"type\":\"qjobsynnen\",\"typeProperties\":{\"tableName\":\"dataqztjfkgb\"},\"description\":\"njqhdheosx\",\"structure\":\"datafudmpfhwyp\",\"schema\":\"datatjtntc\",\"linkedServiceName\":{\"referenceName\":\"gpdbbglaecc\",\"parameters\":{\"kvrmp\":\"datafspvjrds\",\"gmc\":\"dataftyptwjwiyyeo\"}},\"parameters\":{\"fzzi\":{\"type\":\"Int\",\"defaultValue\":\"dataphwfnugsl\"},\"mvueprpmofxnwcg\":{\"type\":\"Bool\",\"defaultValue\":\"datag\"}},\"annotations\":[\"dataxixtxxxajsehbknn\"],\"folder\":{\"name\":\"yjfawpcb\"},\"\":{\"nivco\":\"datazehcz\",\"who\":\"datasxvppkjealkdb\",\"o\":\"datavbmyzuqfdqdktrj\"}}") .toObject(CouchbaseTableDataset.class); - Assertions.assertEquals("qkrrsguogkcb", model.description()); - Assertions.assertEquals("cv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("hmvjcnnlsb").type()); - Assertions.assertEquals("ccnpxiemacmzt", model.folder().name()); + Assertions.assertEquals("njqhdheosx", model.description()); + Assertions.assertEquals("gpdbbglaecc", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("fzzi").type()); + Assertions.assertEquals("yjfawpcb", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CouchbaseTableDataset model = new CouchbaseTableDataset().withDescription("qkrrsguogkcb") - .withStructure("datatpyabensjflwp").withSchema("datavvqtmvif") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("cv").withParameters(mapOf("t", "dataalb"))) - .withParameters(mapOf("hmvjcnnlsb", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datasnxajptcdfmzxaox"), - "ovlznklel", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataqxhpaqoqbvejoyso"))) - .withAnnotations(Arrays.asList("datadlqqhn", "dataqrykkxakruupti", "datacg", "datapz")) - .withFolder(new DatasetFolder().withName("ccnpxiemacmzt")).withTableName("datafybxmmrvn"); + CouchbaseTableDataset model = new CouchbaseTableDataset().withDescription("njqhdheosx") + .withStructure("datafudmpfhwyp") + .withSchema("datatjtntc") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("gpdbbglaecc") + .withParameters(mapOf("kvrmp", "datafspvjrds", "gmc", "dataftyptwjwiyyeo"))) + .withParameters(mapOf("fzzi", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataphwfnugsl"), + "mvueprpmofxnwcg", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datag"))) + .withAnnotations(Arrays.asList("dataxixtxxxajsehbknn")) + .withFolder(new DatasetFolder().withName("yjfawpcb")) + .withTableName("dataqztjfkgb"); model = BinaryData.fromObject(model).toObject(CouchbaseTableDataset.class); - Assertions.assertEquals("qkrrsguogkcb", model.description()); - Assertions.assertEquals("cv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("hmvjcnnlsb").type()); - Assertions.assertEquals("ccnpxiemacmzt", model.folder().name()); + Assertions.assertEquals("njqhdheosx", model.description()); + Assertions.assertEquals("gpdbbglaecc", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("fzzi").type()); + Assertions.assertEquals("yjfawpcb", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionRequestTests.java index e0d00792a584f..7bbe6ae7dd5fc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionRequestTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionRequestTests.java @@ -6,48 +6,35 @@ import com.azure.core.util.BinaryData; import com.azure.resourcemanager.datafactory.models.CreateDataFlowDebugSessionRequest; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDebugResource; -import java.util.HashMap; -import java.util.Map; +import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntime; import org.junit.jupiter.api.Assertions; public final class CreateDataFlowDebugSessionRequestTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CreateDataFlowDebugSessionRequest model = BinaryData.fromString( - "{\"computeType\":\"foudor\",\"coreCount\":199814192,\"timeToLive\":1885072328,\"integrationRuntime\":{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"otwypundmb\",\"\":{\"or\":\"datagcmjkavl\",\"jltfvnzcyjtotpv\":\"datamftpmdtz\",\"qwthmky\":\"datapvpbdbzqgqqiheds\",\"gqcwdhohsdtmc\":\"databcysih\"}},\"name\":\"sufco\"}}") + "{\"computeType\":\"iwbuqny\",\"coreCount\":1310458507,\"timeToLive\":416565759,\"integrationRuntime\":{\"properties\":{\"type\":\"SelfHosted\",\"description\":\"crpfbcunez\",\"\":{\"xjwet\":\"datazelfwyfwl\"}},\"name\":\"sihclafzvaylp\"}}") .toObject(CreateDataFlowDebugSessionRequest.class); - Assertions.assertEquals("foudor", model.computeType()); - Assertions.assertEquals(199814192, model.coreCount()); - Assertions.assertEquals(1885072328, model.timeToLive()); - Assertions.assertEquals("sufco", model.integrationRuntime().name()); - Assertions.assertEquals("otwypundmb", model.integrationRuntime().properties().description()); + Assertions.assertEquals("iwbuqny", model.computeType()); + Assertions.assertEquals(1310458507, model.coreCount()); + Assertions.assertEquals(416565759, model.timeToLive()); + Assertions.assertEquals("sihclafzvaylp", model.integrationRuntime().name()); + Assertions.assertEquals("crpfbcunez", model.integrationRuntime().properties().description()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CreateDataFlowDebugSessionRequest model = new CreateDataFlowDebugSessionRequest().withComputeType("foudor") - .withCoreCount(199814192).withTimeToLive(1885072328).withIntegrationRuntime( - new IntegrationRuntimeDebugResource().withName("sufco").withProperties(new IntegrationRuntime() - .withDescription("otwypundmb").withAdditionalProperties(mapOf("type", "IntegrationRuntime")))); + CreateDataFlowDebugSessionRequest model = new CreateDataFlowDebugSessionRequest().withComputeType("iwbuqny") + .withCoreCount(1310458507) + .withTimeToLive(416565759) + .withIntegrationRuntime(new IntegrationRuntimeDebugResource().withName("sihclafzvaylp") + .withProperties(new SelfHostedIntegrationRuntime().withDescription("crpfbcunez"))); model = BinaryData.fromObject(model).toObject(CreateDataFlowDebugSessionRequest.class); - Assertions.assertEquals("foudor", model.computeType()); - Assertions.assertEquals(199814192, model.coreCount()); - Assertions.assertEquals(1885072328, model.timeToLive()); - Assertions.assertEquals("sufco", model.integrationRuntime().name()); - Assertions.assertEquals("otwypundmb", model.integrationRuntime().properties().description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; + Assertions.assertEquals("iwbuqny", model.computeType()); + Assertions.assertEquals(1310458507, model.coreCount()); + Assertions.assertEquals(416565759, model.timeToLive()); + Assertions.assertEquals("sihclafzvaylp", model.integrationRuntime().name()); + Assertions.assertEquals("crpfbcunez", model.integrationRuntime().properties().description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionResponseInnerTests.java index 40d08323ef0ef..7a484fdfaa5df 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionResponseInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionResponseInnerTests.java @@ -12,18 +12,18 @@ public final class CreateDataFlowDebugSessionResponseInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CreateDataFlowDebugSessionResponseInner model - = BinaryData.fromString("{\"status\":\"zvd\",\"sessionId\":\"zdix\"}") + = BinaryData.fromString("{\"status\":\"ruunzzjgehkf\",\"sessionId\":\"m\"}") .toObject(CreateDataFlowDebugSessionResponseInner.class); - Assertions.assertEquals("zvd", model.status()); - Assertions.assertEquals("zdix", model.sessionId()); + Assertions.assertEquals("ruunzzjgehkf", model.status()); + Assertions.assertEquals("m", model.sessionId()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { CreateDataFlowDebugSessionResponseInner model - = new CreateDataFlowDebugSessionResponseInner().withStatus("zvd").withSessionId("zdix"); + = new CreateDataFlowDebugSessionResponseInner().withStatus("ruunzzjgehkf").withSessionId("m"); model = BinaryData.fromObject(model).toObject(CreateDataFlowDebugSessionResponseInner.class); - Assertions.assertEquals("zvd", model.status()); - Assertions.assertEquals("zdix", model.sessionId()); + Assertions.assertEquals("ruunzzjgehkf", model.status()); + Assertions.assertEquals("m", model.sessionId()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateLinkedIntegrationRuntimeRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateLinkedIntegrationRuntimeRequestTests.java index 5c71a8869e7ff..e67bf49849d94 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateLinkedIntegrationRuntimeRequestTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateLinkedIntegrationRuntimeRequestTests.java @@ -12,22 +12,24 @@ public final class CreateLinkedIntegrationRuntimeRequestTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CreateLinkedIntegrationRuntimeRequest model = BinaryData.fromString( - "{\"name\":\"ni\",\"subscriptionId\":\"x\",\"dataFactoryName\":\"kpycgklwndnhjd\",\"dataFactoryLocation\":\"whvylw\"}") + "{\"name\":\"ashsfwxos\",\"subscriptionId\":\"z\",\"dataFactoryName\":\"ugicjooxdjebw\",\"dataFactoryLocation\":\"cwwfvovbvme\"}") .toObject(CreateLinkedIntegrationRuntimeRequest.class); - Assertions.assertEquals("ni", model.name()); - Assertions.assertEquals("x", model.subscriptionId()); - Assertions.assertEquals("kpycgklwndnhjd", model.dataFactoryName()); - Assertions.assertEquals("whvylw", model.dataFactoryLocation()); + Assertions.assertEquals("ashsfwxos", model.name()); + Assertions.assertEquals("z", model.subscriptionId()); + Assertions.assertEquals("ugicjooxdjebw", model.dataFactoryName()); + Assertions.assertEquals("cwwfvovbvme", model.dataFactoryLocation()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CreateLinkedIntegrationRuntimeRequest model = new CreateLinkedIntegrationRuntimeRequest().withName("ni") - .withSubscriptionId("x").withDataFactoryName("kpycgklwndnhjd").withDataFactoryLocation("whvylw"); + CreateLinkedIntegrationRuntimeRequest model = new CreateLinkedIntegrationRuntimeRequest().withName("ashsfwxos") + .withSubscriptionId("z") + .withDataFactoryName("ugicjooxdjebw") + .withDataFactoryLocation("cwwfvovbvme"); model = BinaryData.fromObject(model).toObject(CreateLinkedIntegrationRuntimeRequest.class); - Assertions.assertEquals("ni", model.name()); - Assertions.assertEquals("x", model.subscriptionId()); - Assertions.assertEquals("kpycgklwndnhjd", model.dataFactoryName()); - Assertions.assertEquals("whvylw", model.dataFactoryLocation()); + Assertions.assertEquals("ashsfwxos", model.name()); + Assertions.assertEquals("z", model.subscriptionId()); + Assertions.assertEquals("ugicjooxdjebw", model.dataFactoryName()); + Assertions.assertEquals("cwwfvovbvme", model.dataFactoryLocation()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateRunResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateRunResponseInnerTests.java index fe9d54777e037..f3cbda6b41c46 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateRunResponseInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateRunResponseInnerTests.java @@ -12,14 +12,14 @@ public final class CreateRunResponseInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CreateRunResponseInner model - = BinaryData.fromString("{\"runId\":\"vvhmxtdrj\"}").toObject(CreateRunResponseInner.class); - Assertions.assertEquals("vvhmxtdrj", model.runId()); + = BinaryData.fromString("{\"runId\":\"smiccwrwfscj\"}").toObject(CreateRunResponseInner.class); + Assertions.assertEquals("smiccwrwfscj", model.runId()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CreateRunResponseInner model = new CreateRunResponseInner().withRunId("vvhmxtdrj"); + CreateRunResponseInner model = new CreateRunResponseInner().withRunId("smiccwrwfscj"); model = BinaryData.fromObject(model).toObject(CreateRunResponseInner.class); - Assertions.assertEquals("vvhmxtdrj", model.runId()); + Assertions.assertEquals("smiccwrwfscj", model.runId()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialListResponseTests.java index cf6966a33f279..8e51f19a303fd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialListResponseTests.java @@ -5,45 +5,55 @@ package com.azure.resourcemanager.datafactory.generated; import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityCredentialResourceInner; +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; +import com.azure.resourcemanager.datafactory.models.Credential; import com.azure.resourcemanager.datafactory.models.CredentialListResponse; -import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential; import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import org.junit.jupiter.api.Assertions; public final class CredentialListResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CredentialListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"type\":\"ManagedIdentity\",\"typeProperties\":{\"resourceId\":\"qhnlbqnbld\"},\"description\":\"aclgschorimk\",\"annotations\":[\"datarmoucsofl\"],\"\":{\"mxuq\":\"dataviyfcaabeolhbhlv\",\"owlkjxnqpv\":\"databsxtkcudfbsfarfs\",\"tmhqykiz\":\"datagf\"}},\"name\":\"ksaoafcluqvox\",\"type\":\"cjimryvwgcwwpbmz\",\"etag\":\"esyds\",\"id\":\"efoh\"},{\"properties\":{\"type\":\"ManagedIdentity\",\"typeProperties\":{\"resourceId\":\"vopwndyqleallk\"},\"description\":\"tkhlowkxxpvbr\",\"annotations\":[\"datamzsyzfhotl\",\"dataikcyyc\"],\"\":{\"uic\":\"datasjlpjrtwszhv\",\"ubhvj\":\"datahvtrrmhwrbfdpyf\",\"memhooclutnpq\":\"datalrocuyzlwh\"}},\"name\":\"mczjkm\",\"type\":\"kyujxsglhsrrr\",\"etag\":\"jylmbkzudnigr\",\"id\":\"hotj\"},{\"properties\":{\"type\":\"ManagedIdentity\",\"typeProperties\":{\"resourceId\":\"pxuzzjg\"},\"description\":\"efqyhqotoihiqaky\",\"annotations\":[\"datafb\"],\"\":{\"qaxsipietgbebjf\":\"datapzdqtvhcspod\",\"pnfpubntnbat\":\"datalbmoichd\",\"uhplrvkmjcwmjv\":\"dataviqsowsaaelcattc\"}},\"name\":\"fggc\",\"type\":\"yylizrz\",\"etag\":\"psfxsf\",\"id\":\"tl\"},{\"properties\":{\"type\":\"ManagedIdentity\",\"typeProperties\":{\"resourceId\":\"vagbwidqlvhukove\"},\"description\":\"i\",\"annotations\":[\"datajfnmjmvlwyz\"],\"\":{\"jpu\":\"datalkujrllfojui\",\"vtzejetjklnti\":\"datayjucejikzoeo\",\"zolxrzvhqjwtr\":\"datayjuzkdb\",\"rrkolawjmjs\":\"datatgvgzp\"}},\"name\":\"rokcdxfzzzwyjaf\",\"type\":\"lhguyn\",\"etag\":\"hlgmltxdwhmoz\",\"id\":\"gzvlnsnn\"}],\"nextLink\":\"fpafolpymwamxq\"}") + "{\"value\":[{\"properties\":{\"type\":\"pvmjcdoewbid\",\"description\":\"t\",\"annotations\":[\"dataxvgpiude\",\"datagfsxzec\",\"dataaxwk\",\"datafykhvuhxepmru\"],\"\":{\"mvguihywart\":\"dataabaobnslujdjltym\",\"j\":\"datapphkixkykxds\",\"kkflrmymyincqlhr\":\"dataemmucfxh\"}},\"name\":\"wslmiiiovgqcg\",\"type\":\"ugqkctotiowlxteq\",\"etag\":\"tjgwdtguk\",\"id\":\"nblwphqlkcc\"},{\"properties\":{\"type\":\"gygqwah\",\"description\":\"ulwgniiprglvawuw\",\"annotations\":[\"datafypiv\",\"datasbbjpmcu\"],\"\":{\"mhbrbqgvg\":\"dataifoxxkubvphav\",\"efjokn\":\"datavpbbt\",\"biqmrjgei\":\"datasqyzqedikdfr\"}},\"name\":\"qlggwfi\",\"type\":\"cxmjpbyephmg\",\"etag\":\"ljvrcmyfqipgxhnp\",\"id\":\"yqwcabvnuil\"}],\"nextLink\":\"yaswlpaugmr\"}") .toObject(CredentialListResponse.class); - Assertions.assertEquals("efoh", model.value().get(0).id()); - Assertions.assertEquals("aclgschorimk", model.value().get(0).properties().description()); - Assertions.assertEquals("qhnlbqnbld", model.value().get(0).properties().resourceId()); - Assertions.assertEquals("fpafolpymwamxq", model.nextLink()); + Assertions.assertEquals("nblwphqlkcc", model.value().get(0).id()); + Assertions.assertEquals("t", model.value().get(0).properties().description()); + Assertions.assertEquals("yaswlpaugmr", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CredentialListResponse model = new CredentialListResponse() - .withValue(Arrays.asList(new ManagedIdentityCredentialResourceInner().withId("efoh") - .withProperties(new ManagedIdentityCredential().withDescription("aclgschorimk") - .withAnnotations(Arrays.asList("datarmoucsofl")).withResourceId("qhnlbqnbld")), - new ManagedIdentityCredentialResourceInner().withId("hotj") - .withProperties(new ManagedIdentityCredential().withDescription("tkhlowkxxpvbr") - .withAnnotations(Arrays.asList("datamzsyzfhotl", "dataikcyyc")) - .withResourceId("vopwndyqleallk")), - new ManagedIdentityCredentialResourceInner().withId("tl") - .withProperties(new ManagedIdentityCredential().withDescription("efqyhqotoihiqaky") - .withAnnotations(Arrays.asList("datafb")).withResourceId("pxuzzjg")), - new ManagedIdentityCredentialResourceInner().withId("gzvlnsnn") - .withProperties(new ManagedIdentityCredential().withDescription("i") - .withAnnotations(Arrays.asList("datajfnmjmvlwyz")).withResourceId("vagbwidqlvhukove")))) - .withNextLink("fpafolpymwamxq"); + CredentialListResponse model + = new CredentialListResponse() + .withValue(Arrays.asList( + new CredentialResourceInner().withId("nblwphqlkcc") + .withProperties(new Credential().withDescription("t") + .withAnnotations( + Arrays.asList("dataxvgpiude", "datagfsxzec", "dataaxwk", "datafykhvuhxepmru")) + .withAdditionalProperties(mapOf("type", "pvmjcdoewbid"))), + new CredentialResourceInner().withId("yqwcabvnuil") + .withProperties(new Credential().withDescription("ulwgniiprglvawuw") + .withAnnotations(Arrays.asList("datafypiv", "datasbbjpmcu")) + .withAdditionalProperties(mapOf("type", "gygqwah"))))) + .withNextLink("yaswlpaugmr"); model = BinaryData.fromObject(model).toObject(CredentialListResponse.class); - Assertions.assertEquals("efoh", model.value().get(0).id()); - Assertions.assertEquals("aclgschorimk", model.value().get(0).properties().description()); - Assertions.assertEquals("qhnlbqnbld", model.value().get(0).properties().resourceId()); - Assertions.assertEquals("fpafolpymwamxq", model.nextLink()); + Assertions.assertEquals("nblwphqlkcc", model.value().get(0).id()); + Assertions.assertEquals("t", model.value().get(0).properties().description()); + Assertions.assertEquals("yaswlpaugmr", model.nextLink()); + } + + // Use "Map.of" if available + @SuppressWarnings("unchecked") + private static Map mapOf(Object... inputs) { + Map map = new HashMap<>(); + for (int i = 0; i < inputs.length; i += 2) { + String key = (String) inputs[i]; + T value = (T) inputs[i + 1]; + map.put(key, value); + } + return map; } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsCreateOrUpdateWithResponseMockTests.java index 7dd2792ecf3c9..27a047fbe5f09 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsCreateOrUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsCreateOrUpdateWithResponseMockTests.java @@ -6,60 +6,57 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential; -import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredentialResource; -import java.nio.ByteBuffer; +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; +import com.azure.resourcemanager.datafactory.models.Credential; +import com.azure.resourcemanager.datafactory.models.CredentialResource; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class CredentialOperationsCreateOrUpdateWithResponseMockTests { @Test public void testCreateOrUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"type\":\"ManagedIdentity\",\"typeProperties\":{\"resourceId\":\"bzekrwpwyiyq\"},\"description\":\"csekhu\",\"annotations\":[\"datae\",\"dataudmpsuqpraqjscni\",\"datapvx\"],\"\":{\"jhrvembitqo\":\"dataewtbyciedxsey\",\"khjxgukzz\":\"dataxieuntce\"}},\"name\":\"wbfbfrz\",\"type\":\"sipqbyvesxuzdae\",\"etag\":\"milpztwzjkbaudtp\",\"id\":\"uqkntnvgwgtgxgg\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ManagedIdentityCredentialResource response - = manager.credentialOperations().define("gerqtcxk").withExistingFactory("xono", "o") - .withProperties(new ManagedIdentityCredential().withDescription("crucz") - .withAnnotations( - Arrays.asList("datauxijmawsamdfw", "dataxnjbdglsllm", "dataarmlbqekvj", "dataaqjpsjrpkgvs")) - .withResourceId("z")) - .withIfMatch("arwxhpufvucnq").create(); + = "{\"properties\":{\"type\":\"burzofigi\",\"description\":\"dywekoxylwysj\",\"annotations\":[\"dataapgqghkr\",\"datacvcxyumtoeuna\"],\"\":{\"xrzzilzedqwd\":\"datatleyztnelvced\",\"dvtittybi\":\"dataylofedxj\"}},\"name\":\"hjerwq\",\"type\":\"dntxxpcxneeyn\",\"etag\":\"rrqsqzkrxcj\",\"id\":\"gwhkqimqw\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + CredentialResource response = manager.credentialOperations() + .createOrUpdateWithResponse("hpreattpcm", "ssl", "lctluczrs", + new CredentialResourceInner().withId("ppau") + .withProperties(new Credential().withDescription("qlskya") + .withAnnotations(Arrays.asList("dataz", "datar", "datanqrvwznhnpznoehn", "dataoeccjzyk")) + .withAdditionalProperties(mapOf("type", "mnjplazrsmwybo"))), + "wjochxon", com.azure.core.util.Context.NONE) + .getValue(); + + Assertions.assertEquals("gwhkqimqw", response.id()); + Assertions.assertEquals("dywekoxylwysj", response.properties().description()); + } - Assertions.assertEquals("uqkntnvgwgtgxgg", response.id()); - Assertions.assertEquals("csekhu", response.properties().description()); - Assertions.assertEquals("bzekrwpwyiyq", response.properties().resourceId()); + // Use "Map.of" if available + @SuppressWarnings("unchecked") + private static Map mapOf(Object... inputs) { + Map map = new HashMap<>(); + for (int i = 0; i < inputs.length; i += 2) { + String key = (String) inputs[i]; + T value = (T) inputs[i + 1]; + map.put(key, value); + } + return map; } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsDeleteWithResponseMockTests.java index 587d2e6a8c063..907627f592911 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsDeleteWithResponseMockTests.java @@ -6,47 +6,28 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class CredentialOperationsDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.credentialOperations().deleteWithResponse("ouvsmmiqfiigsl", "riawknncdfcyey", "l", - com.azure.core.util.Context.NONE); + manager.credentialOperations().deleteWithResponse("srcwpgqmzkel", "t", "ec", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsGetWithResponseMockTests.java index 581878ba4e56e..af2c90685b332 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsGetWithResponseMockTests.java @@ -6,54 +6,35 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredentialResource; -import java.nio.ByteBuffer; +import com.azure.resourcemanager.datafactory.models.CredentialResource; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class CredentialOperationsGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"type\":\"ManagedIdentity\",\"typeProperties\":{\"resourceId\":\"b\"},\"description\":\"ussldjsb\",\"annotations\":[\"datathfas\",\"dataglcktraeraql\",\"datafyhwdogchdqtlbn\"],\"\":{\"wpeksrhkmzs\":\"datayox\"}},\"name\":\"p\",\"type\":\"sbp\",\"etag\":\"kmgwmmwjugaqyrt\",\"id\":\"iyspbghnnx\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"type\":\"jxtxaapsicoext\",\"description\":\"kbgzaxqpnnuwocib\",\"annotations\":[\"datanbwguhpvnzltpdhf\",\"datankeeyxkw\",\"datayhsuiwuoctof\",\"datarubtixrqzsfyxia\"],\"\":{\"qbnjrjrrosns\":\"datagcrpuagxkjx\",\"ml\":\"databdc\",\"ugyqgbqfa\":\"datauu\"}},\"name\":\"tn\",\"type\":\"dl\",\"etag\":\"ipgvdqtvnwkx\",\"id\":\"gxmmlmqrtg\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - ManagedIdentityCredentialResource response = manager.credentialOperations() - .getWithResponse("cdryjgxwdanidr", "qfpxka", "bbpzk", "pmjtfymvlvosbc", com.azure.core.util.Context.NONE) + CredentialResource response = manager.credentialOperations() + .getWithResponse("uwlxgtosifwi", "zwdozhuczsc", "ucdzcnwouopb", "riyyqxj", com.azure.core.util.Context.NONE) .getValue(); - Assertions.assertEquals("iyspbghnnx", response.id()); - Assertions.assertEquals("ussldjsb", response.properties().description()); - Assertions.assertEquals("b", response.properties().resourceId()); + Assertions.assertEquals("gxmmlmqrtg", response.id()); + Assertions.assertEquals("kbgzaxqpnnuwocib", response.properties().description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsListByFactoryMockTests.java index 692132c29abce..6723305d518d5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsListByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsListByFactoryMockTests.java @@ -6,54 +6,35 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredentialResource; -import java.nio.ByteBuffer; +import com.azure.resourcemanager.datafactory.models.CredentialResource; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class CredentialOperationsListByFactoryMockTests { @Test public void testListByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"type\":\"ManagedIdentity\",\"typeProperties\":{\"resourceId\":\"tuxwfwlfqbg\"},\"description\":\"vrwnweiwkbkhdxq\",\"annotations\":[\"datadzbdjottzuk\",\"datasxgaojwulatq\"],\"\":{\"yfdjzefkzcajav\":\"datavkyrslifcwj\"}},\"name\":\"okaqnuy\",\"type\":\"gymbefvuutlirzr\",\"etag\":\"lbdezvmfuhearh\",\"id\":\"hyugjx\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"properties\":{\"type\":\"nniyg\",\"description\":\"u\",\"annotations\":[\"datafpbgcvnqrstgp\",\"datas\"],\"\":{\"ttamfrnumwos\":\"datagxak\",\"uxmrsogmscfno\":\"datatibcd\",\"iiif\":\"datajqvqcboky\"}},\"name\":\"tbwzc\",\"type\":\"auctltbtmqjkz\",\"etag\":\"sblismnwywod\",\"id\":\"r\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - PagedIterable response - = manager.credentialOperations().listByFactory("mn", "tdboru", com.azure.core.util.Context.NONE); + PagedIterable response + = manager.credentialOperations().listByFactory("aiecqmt", "tvlarmfhaind", com.azure.core.util.Context.NONE); - Assertions.assertEquals("hyugjx", response.iterator().next().id()); - Assertions.assertEquals("vrwnweiwkbkhdxq", response.iterator().next().properties().description()); - Assertions.assertEquals("tuxwfwlfqbg", response.iterator().next().properties().resourceId()); + Assertions.assertEquals("r", response.iterator().next().id()); + Assertions.assertEquals("u", response.iterator().next().properties().description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialReferenceTests.java index 06138e8b42c4a..53fddc04754b8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialReferenceTests.java @@ -15,19 +15,20 @@ public final class CredentialReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CredentialReference model = BinaryData.fromString( - "{\"type\":\"CredentialReference\",\"referenceName\":\"lhdyzmyckzex\",\"\":{\"wymxgaabjk\":\"datakck\",\"ogzvk\":\"datatfohf\"}}") + "{\"type\":\"CredentialReference\",\"referenceName\":\"wwwmhkruwaedrym\",\"\":{\"in\":\"dataq\",\"njdiqfliejhpcl\":\"datazduewihapfjii\"}}") .toObject(CredentialReference.class); Assertions.assertEquals(CredentialReferenceType.CREDENTIAL_REFERENCE, model.type()); - Assertions.assertEquals("lhdyzmyckzex", model.referenceName()); + Assertions.assertEquals("wwwmhkruwaedrym", model.referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { CredentialReference model = new CredentialReference().withType(CredentialReferenceType.CREDENTIAL_REFERENCE) - .withReferenceName("lhdyzmyckzex").withAdditionalProperties(mapOf()); + .withReferenceName("wwwmhkruwaedrym") + .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(CredentialReference.class); Assertions.assertEquals(CredentialReferenceType.CREDENTIAL_REFERENCE, model.type()); - Assertions.assertEquals("lhdyzmyckzex", model.referenceName()); + Assertions.assertEquals("wwwmhkruwaedrym", model.referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialResourceInnerTests.java new file mode 100644 index 0000000000000..5b6a0dba714ac --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialResourceInnerTests.java @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.generated; + +import com.azure.core.util.BinaryData; +import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; +import com.azure.resourcemanager.datafactory.models.Credential; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.Assertions; + +public final class CredentialResourceInnerTests { + @org.junit.jupiter.api.Test + public void testDeserialize() throws Exception { + CredentialResourceInner model = BinaryData.fromString( + "{\"properties\":{\"type\":\"jlrxwtoaukhfk\",\"description\":\"isizm\",\"annotations\":[\"datadsxjwuivedw\"],\"\":{\"mguaml\":\"dataeewxeiqbpsmg\"}},\"name\":\"l\",\"type\":\"msplzgaufcshhvn\",\"etag\":\"gnxkympqan\",\"id\":\"jk\"}") + .toObject(CredentialResourceInner.class); + Assertions.assertEquals("jk", model.id()); + Assertions.assertEquals("isizm", model.properties().description()); + } + + @org.junit.jupiter.api.Test + public void testSerialize() throws Exception { + CredentialResourceInner model = new CredentialResourceInner().withId("jk") + .withProperties(new Credential().withDescription("isizm") + .withAnnotations(Arrays.asList("datadsxjwuivedw")) + .withAdditionalProperties(mapOf("type", "jlrxwtoaukhfk"))); + model = BinaryData.fromObject(model).toObject(CredentialResourceInner.class); + Assertions.assertEquals("jk", model.id()); + Assertions.assertEquals("isizm", model.properties().description()); + } + + // Use "Map.of" if available + @SuppressWarnings("unchecked") + private static Map mapOf(Object... inputs) { + Map map = new HashMap<>(); + for (int i = 0; i < inputs.length; i += 2) { + String key = (String) inputs[i]; + T value = (T) inputs[i + 1]; + map.put(key, value); + } + return map; + } +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialTests.java index 73eb602563f6e..3d8ba92b5a0fb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialTests.java @@ -15,18 +15,18 @@ public final class CredentialTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { Credential model = BinaryData.fromString( - "{\"type\":\"Credential\",\"description\":\"pwpgddei\",\"annotations\":[\"datazovgkkumuikj\",\"datajcazt\"],\"\":{\"wxwcomli\":\"datansq\",\"yfdvlvhbwrnfxtgd\":\"dataytwvczcswkacve\",\"kcoeqswank\":\"datapqthehnmnaoya\"}}") + "{\"type\":\"tw\",\"description\":\"aoypny\",\"annotations\":[\"datahxcylhkgm\",\"datasghpx\",\"datacphdrwjjkhvyo\"],\"\":{\"hrpq\":\"dataluzvxnq\"}}") .toObject(Credential.class); - Assertions.assertEquals("pwpgddei", model.description()); + Assertions.assertEquals("aoypny", model.description()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Credential model = new Credential().withDescription("pwpgddei") - .withAnnotations(Arrays.asList("datazovgkkumuikj", "datajcazt")) - .withAdditionalProperties(mapOf("type", "Credential")); + Credential model = new Credential().withDescription("aoypny") + .withAnnotations(Arrays.asList("datahxcylhkgm", "datasghpx", "datacphdrwjjkhvyo")) + .withAdditionalProperties(mapOf("type", "tw")); model = BinaryData.fromObject(model).toObject(Credential.class); - Assertions.assertEquals("pwpgddei", model.description()); + Assertions.assertEquals("aoypny", model.description()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityReferenceObjectTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityReferenceObjectTests.java index 3ae750f06a932..e78a6e51091dc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityReferenceObjectTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityReferenceObjectTests.java @@ -17,30 +17,37 @@ public final class CustomActivityReferenceObjectTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CustomActivityReferenceObject model = BinaryData.fromString( - "{\"linkedServices\":[{\"referenceName\":\"xbrjdpeypuqwd\",\"parameters\":{\"xdeo\":\"datamfvxfssho\",\"fnreempbumyuiquz\":\"datanzprdgmmgtqgzdf\",\"facflkbgohxbj\":\"dataotfoifjrik\"}},{\"referenceName\":\"zfpfxbqdrjunigxn\",\"parameters\":{\"dbvenvr\":\"dataghga\",\"qswrncwhlxvngj\":\"datati\",\"bd\":\"datapydjdpapndmv\"}}],\"datasets\":[{\"referenceName\":\"tapwkwk\",\"parameters\":{\"ide\":\"datae\"}},{\"referenceName\":\"dehskmfiudnp\",\"parameters\":{\"ql\":\"datafhtsgyyrgdguvk\",\"kznffqv\":\"datag\",\"rwgdpf\":\"dataxnytihhqancw\"}},{\"referenceName\":\"dy\",\"parameters\":{\"laiuoncrsw\":\"datarsnbdfamyolvgk\",\"etqcxoamxum\":\"datawz\",\"uhixomxvbruzx\":\"dataz\",\"douneozgnwmc\":\"datanzxipgfkc\"}}]}") + "{\"linkedServices\":[{\"referenceName\":\"c\",\"parameters\":{\"p\":\"datatumttmixe\",\"hvqnkwjhjut\":\"datarbazgou\",\"dflgqso\":\"dataxggn\",\"petsxetneher\":\"datauncmuvfukl\"}},{\"referenceName\":\"belms\",\"parameters\":{\"pjtduvsvgydtdt\":\"datahkzcdn\",\"izkwpo\":\"datamknzotm\"}},{\"referenceName\":\"askflrqw\",\"parameters\":{\"brzvnouth\":\"datakks\",\"fix\":\"datavvcbwudi\",\"rqivqzqcmrxh\":\"dataw\"}},{\"referenceName\":\"lozg\",\"parameters\":{\"yttxspaafs\":\"dataijcetcystrs\",\"u\":\"dataqoyoerlrqtqnx\"}}],\"datasets\":[{\"referenceName\":\"q\",\"parameters\":{\"kjzbxmgsxbk\":\"datamwbmqpbfjbsoljqk\",\"eobkmx\":\"datakambdoq\"}},{\"referenceName\":\"hmrbjh\",\"parameters\":{\"f\":\"dataxnwcejczi\",\"nkjyfy\":\"datadaq\"}},{\"referenceName\":\"mbtiugc\",\"parameters\":{\"uvu\":\"datahgryof\",\"uocmxtyjaxk\":\"datakrkibno\"}},{\"referenceName\":\"yovwtpm\",\"parameters\":{\"mzsvdrryzxhtvyth\":\"datas\"}}]}") .toObject(CustomActivityReferenceObject.class); - Assertions.assertEquals("xbrjdpeypuqwd", model.linkedServices().get(0).referenceName()); - Assertions.assertEquals("tapwkwk", model.datasets().get(0).referenceName()); + Assertions.assertEquals("c", model.linkedServices().get(0).referenceName()); + Assertions.assertEquals("q", model.datasets().get(0).referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { CustomActivityReferenceObject model = new CustomActivityReferenceObject() .withLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("xbrjdpeypuqwd") - .withParameters(mapOf("xdeo", "datamfvxfssho", "fnreempbumyuiquz", "datanzprdgmmgtqgzdf", - "facflkbgohxbj", "dataotfoifjrik")), - new LinkedServiceReference().withReferenceName("zfpfxbqdrjunigxn").withParameters( - mapOf("dbvenvr", "dataghga", "qswrncwhlxvngj", "datati", "bd", "datapydjdpapndmv")))) + new LinkedServiceReference().withReferenceName("c") + .withParameters(mapOf("p", "datatumttmixe", "hvqnkwjhjut", "datarbazgou", "dflgqso", "dataxggn", + "petsxetneher", "datauncmuvfukl")), + new LinkedServiceReference().withReferenceName("belms") + .withParameters(mapOf("pjtduvsvgydtdt", "datahkzcdn", "izkwpo", "datamknzotm")), + new LinkedServiceReference().withReferenceName("askflrqw") + .withParameters(mapOf("brzvnouth", "datakks", "fix", "datavvcbwudi", "rqivqzqcmrxh", "dataw")), + new LinkedServiceReference().withReferenceName("lozg") + .withParameters(mapOf("yttxspaafs", "dataijcetcystrs", "u", "dataqoyoerlrqtqnx")))) .withDatasets(Arrays.asList( - new DatasetReference().withReferenceName("tapwkwk").withParameters(mapOf("ide", "datae")), - new DatasetReference().withReferenceName("dehskmfiudnp").withParameters( - mapOf("ql", "datafhtsgyyrgdguvk", "kznffqv", "datag", "rwgdpf", "dataxnytihhqancw")), - new DatasetReference().withReferenceName("dy").withParameters(mapOf("laiuoncrsw", "datarsnbdfamyolvgk", - "etqcxoamxum", "datawz", "uhixomxvbruzx", "dataz", "douneozgnwmc", "datanzxipgfkc")))); + new DatasetReference().withReferenceName("q") + .withParameters(mapOf("kjzbxmgsxbk", "datamwbmqpbfjbsoljqk", "eobkmx", "datakambdoq")), + new DatasetReference().withReferenceName("hmrbjh") + .withParameters(mapOf("f", "dataxnwcejczi", "nkjyfy", "datadaq")), + new DatasetReference().withReferenceName("mbtiugc") + .withParameters(mapOf("uvu", "datahgryof", "uocmxtyjaxk", "datakrkibno")), + new DatasetReference().withReferenceName("yovwtpm") + .withParameters(mapOf("mzsvdrryzxhtvyth", "datas")))); model = BinaryData.fromObject(model).toObject(CustomActivityReferenceObject.class); - Assertions.assertEquals("xbrjdpeypuqwd", model.linkedServices().get(0).referenceName()); - Assertions.assertEquals("tapwkwk", model.datasets().get(0).referenceName()); + Assertions.assertEquals("c", model.linkedServices().get(0).referenceName()); + Assertions.assertEquals("q", model.datasets().get(0).referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTests.java index ece561b09699f..117cb0aacb85d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTests.java @@ -24,82 +24,94 @@ public final class CustomActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CustomActivity model = BinaryData.fromString( - "{\"type\":\"Custom\",\"typeProperties\":{\"command\":\"databksafnecwyrt\",\"resourceLinkedService\":{\"referenceName\":\"ujyespcgps\",\"parameters\":{\"rc\":\"dataooxieyywwmiwia\"}},\"folderPath\":\"datayb\",\"referenceObjects\":{\"linkedServices\":[{\"referenceName\":\"ia\",\"parameters\":{\"oyvygdefpy\":\"datazszcrwhr\"}},{\"referenceName\":\"twwaxx\",\"parameters\":{\"lbocecmnqcgbijyp\":\"datadsmravxtglpxmdw\",\"uzchegeogdkcrc\":\"datawbyrkxzebv\"}}],\"datasets\":[{\"referenceName\":\"zeumadlpxirew\",\"parameters\":{\"rmgefxkat\":\"datakicimyykmkelbqm\"}}]},\"extendedProperties\":{\"ptbjooq\":\"datakwldvksigxakg\",\"xkh\":\"databpnkvnuwjrxb\",\"imgzimtzzyjhy\":\"dataeqbxvtgloifmlbh\"},\"retentionTimeInDays\":\"datayxrwfuxx\",\"autoUserSpecification\":\"datavdhmumsmnubc\"},\"linkedServiceName\":{\"referenceName\":\"rps\",\"parameters\":{\"rintaaf\":\"dataxidqnvhrbfepf\",\"fuxhlgoexu\":\"datadysevqppxth\",\"ewhbxvriplgk\":\"datanbfoorgtxd\"}},\"policy\":{\"timeout\":\"datammqvzzoomwfo\",\"retry\":\"datahatpazljajzqgg\",\"retryIntervalInSeconds\":219227608,\"secureInput\":true,\"secureOutput\":false,\"\":{\"y\":\"datavbjk\",\"p\":\"dataodgisfejs\",\"ujyn\":\"datawi\"}},\"name\":\"jwktiyhiyk\",\"description\":\"vaodifupdafuf\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"qhgnrxxhzwtrxpwu\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Skipped\",\"Completed\"],\"\":{\"utdthloafhhiyk\":\"datahgbjukaswgvoa\",\"ptefdvjgbemrjb\":\"datatjsebcuynqdl\",\"pdprjethyhbnoye\":\"datavqu\",\"iqoiblaumog\":\"datauivdrzxobtekl\"}},{\"activity\":\"zxwmwrjmtpgkybdk\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Succeeded\"],\"\":{\"npxa\":\"dataqbeqz\"}},{\"activity\":\"scnnyg\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\"],\"\":{\"wefzdnyga\":\"datargo\"}},{\"activity\":\"hv\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Failed\",\"Completed\"],\"\":{\"mkkpbybhqwilbsdg\":\"datayqxjpzykkw\",\"kir\":\"datahe\",\"rth\":\"dataojzfsznephbc\",\"ijayvuymib\":\"databgavwbqjeto\"}}],\"userProperties\":[{\"name\":\"j\",\"value\":\"dataxpudeqw\"},{\"name\":\"ivjhm\",\"value\":\"datadvnoxjbhltxtpgq\"}],\"\":{\"afgbcwawblk\":\"datakkta\",\"sgklxgsqhczokun\":\"datacci\",\"cbqvje\":\"dataqqhbjmvbeznlukeq\",\"hmrybbhktnuzor\":\"datanwwqyyfctfsdhmru\"}}") + "{\"type\":\"jg\",\"typeProperties\":{\"command\":\"datapvealwdltstxro\",\"resourceLinkedService\":{\"referenceName\":\"znkeeeakzysakr\",\"parameters\":{\"kdwpcmy\":\"datatajdfqchj\",\"eedoruiycvourqd\":\"datadrrkfhlaygwxo\"}},\"folderPath\":\"datasitrsppucxi\",\"referenceObjects\":{\"linkedServices\":[{\"referenceName\":\"vtbl\",\"parameters\":{\"ld\":\"datalaldggw\"}},{\"referenceName\":\"elnvcfume\",\"parameters\":{\"apxnoogmfujeci\":\"datahiradklzgiqm\",\"nljy\":\"dataicmezexwzpgy\",\"bcufhkrvxxzhqouo\":\"dataumpydk\"}},{\"referenceName\":\"sczcksjwdwzfdfkg\",\"parameters\":{\"qdotbnfbn\":\"datawvmbsmxh\",\"sgw\":\"dataybotuqzjfkuqvter\",\"q\":\"dataykcvwqyfixw\",\"z\":\"dataxmiw\"}},{\"referenceName\":\"hilypuxbnvquxut\",\"parameters\":{\"nzqnqwk\":\"datasttmvai\",\"tid\":\"dataevbgjhmyzsqov\"}}],\"datasets\":[{\"referenceName\":\"yyajlnotmirg\",\"parameters\":{\"fxmefymdmfrf\":\"datacln\",\"aqzdzkyqqbqbw\":\"dataghn\"}}]},\"extendedProperties\":{\"tdeyoxtlq\":\"datatwmmvbahftkcey\",\"tepzrcqnsjqrgtap\":\"datatx\"},\"retentionTimeInDays\":\"datapzphkmwbtr\",\"autoUserSpecification\":\"datalondbvlq\"},\"linkedServiceName\":{\"referenceName\":\"wzyvxdgten\",\"parameters\":{\"kyctunfukehx\":\"datal\",\"rqfizqamxxp\":\"dataktlrcauadhprjsuw\",\"ftwtepuo\":\"dataylm\",\"hpvohvcaq\":\"datatgwwtaolfdgjrg\"}},\"policy\":{\"timeout\":\"datapkzznarnjueqo\",\"retry\":\"datajgvqzoihtncadrmt\",\"retryIntervalInSeconds\":853348175,\"secureInput\":false,\"secureOutput\":false,\"\":{\"igxndfrxnvwqy\":\"dataazbfrqo\",\"osscpjtiungjbfm\":\"datakl\"}},\"name\":\"esamhxkjjhflrgx\",\"description\":\"ghd\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"zyafa\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\"],\"\":{\"nrsrrijcwnt\":\"datazxjjdboxu\",\"u\":\"datatqtbcwtcqjsvlz\",\"lkocjuajclrtssbk\":\"datatzbvdz\",\"iprjahgqzb\":\"datadgwpyljn\"}},{\"activity\":\"icyufnum\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"qhmuryajp\":\"datar\",\"ihbvfallpobzv\":\"datauflvazpizossqm\",\"h\":\"datantsfyntkfziitbw\",\"s\":\"datawwhml\"}},{\"activity\":\"bfg\",\"dependencyConditions\":[\"Skipped\",\"Completed\"],\"\":{\"pdwknxephw\":\"dataenzuu\",\"ymeqiqnjcajmxu\":\"datadwlo\"}},{\"activity\":\"xkvpleooom\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Failed\"],\"\":{\"uc\":\"datazvgog\"}}],\"userProperties\":[{\"name\":\"fobjlqnaxfvs\",\"value\":\"dataustr\"}],\"\":{\"bhgw\":\"dataxbjbknpzhfh\",\"qb\":\"datasl\",\"nepru\":\"datatcjbxochijwps\"}}") .toObject(CustomActivity.class); - Assertions.assertEquals("jwktiyhiyk", model.name()); - Assertions.assertEquals("vaodifupdafuf", model.description()); + Assertions.assertEquals("esamhxkjjhflrgx", model.name()); + Assertions.assertEquals("ghd", model.description()); Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("qhgnrxxhzwtrxpwu", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("j", model.userProperties().get(0).name()); - Assertions.assertEquals("rps", model.linkedServiceName().referenceName()); - Assertions.assertEquals(219227608, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("zyafa", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("fobjlqnaxfvs", model.userProperties().get(0).name()); + Assertions.assertEquals("wzyvxdgten", model.linkedServiceName().referenceName()); + Assertions.assertEquals(853348175, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("ujyespcgps", model.resourceLinkedService().referenceName()); - Assertions.assertEquals("ia", model.referenceObjects().linkedServices().get(0).referenceName()); - Assertions.assertEquals("zeumadlpxirew", model.referenceObjects().datasets().get(0).referenceName()); + Assertions.assertEquals("znkeeeakzysakr", model.resourceLinkedService().referenceName()); + Assertions.assertEquals("vtbl", model.referenceObjects().linkedServices().get(0).referenceName()); + Assertions.assertEquals("yyajlnotmirg", model.referenceObjects().datasets().get(0).referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CustomActivity model = new CustomActivity().withName("jwktiyhiyk").withDescription("vaodifupdafuf") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("qhgnrxxhzwtrxpwu") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, - DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("zxwmwrjmtpgkybdk") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("scnnyg") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("hv") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, - DependencyCondition.FAILED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("j").withValue("dataxpudeqw"), - new UserProperty().withName("ivjhm").withValue("datadvnoxjbhltxtpgq"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rps") - .withParameters(mapOf("rintaaf", "dataxidqnvhrbfepf", "fuxhlgoexu", "datadysevqppxth", "ewhbxvriplgk", - "datanbfoorgtxd"))) - .withPolicy(new ActivityPolicy() - .withTimeout("datammqvzzoomwfo").withRetry("datahatpazljajzqgg").withRetryIntervalInSeconds(219227608) - .withSecureInput(true).withSecureOutput(false).withAdditionalProperties(mapOf())) - .withCommand("databksafnecwyrt") - .withResourceLinkedService(new LinkedServiceReference().withReferenceName("ujyespcgps") - .withParameters(mapOf("rc", "dataooxieyywwmiwia"))) - .withFolderPath("datayb") - .withReferenceObjects(new CustomActivityReferenceObject() - .withLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("ia") - .withParameters(mapOf("oyvygdefpy", "datazszcrwhr")), - new LinkedServiceReference().withReferenceName("twwaxx").withParameters( - mapOf("lbocecmnqcgbijyp", "datadsmravxtglpxmdw", "uzchegeogdkcrc", "datawbyrkxzebv")))) - .withDatasets(Arrays.asList(new DatasetReference().withReferenceName("zeumadlpxirew") - .withParameters(mapOf("rmgefxkat", "datakicimyykmkelbqm"))))) - .withExtendedProperties(mapOf("ptbjooq", "datakwldvksigxakg", "xkh", "databpnkvnuwjrxb", "imgzimtzzyjhy", - "dataeqbxvtgloifmlbh")) - .withRetentionTimeInDays("datayxrwfuxx").withAutoUserSpecification("datavdhmumsmnubc"); + CustomActivity model + = new CustomActivity().withName("esamhxkjjhflrgx") + .withDescription("ghd") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("zyafa") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("icyufnum") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("bfg") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("xkvpleooom") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, + DependencyCondition.SKIPPED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("fobjlqnaxfvs").withValue("dataustr"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("wzyvxdgten") + .withParameters(mapOf("kyctunfukehx", "datal", "rqfizqamxxp", "dataktlrcauadhprjsuw", "ftwtepuo", + "dataylm", "hpvohvcaq", "datatgwwtaolfdgjrg"))) + .withPolicy(new ActivityPolicy().withTimeout("datapkzznarnjueqo") + .withRetry("datajgvqzoihtncadrmt") + .withRetryIntervalInSeconds(853348175) + .withSecureInput(false) + .withSecureOutput(false) + .withAdditionalProperties(mapOf())) + .withCommand("datapvealwdltstxro") + .withResourceLinkedService(new LinkedServiceReference() + .withReferenceName("znkeeeakzysakr") + .withParameters(mapOf("kdwpcmy", "datatajdfqchj", "eedoruiycvourqd", "datadrrkfhlaygwxo"))) + .withFolderPath("datasitrsppucxi") + .withReferenceObjects(new CustomActivityReferenceObject() + .withLinkedServices(Arrays.asList( + new LinkedServiceReference().withReferenceName("vtbl") + .withParameters(mapOf("ld", "datalaldggw")), + new LinkedServiceReference().withReferenceName("elnvcfume") + .withParameters(mapOf("apxnoogmfujeci", "datahiradklzgiqm", "nljy", "dataicmezexwzpgy", + "bcufhkrvxxzhqouo", "dataumpydk")), + new LinkedServiceReference().withReferenceName("sczcksjwdwzfdfkg") + .withParameters(mapOf("qdotbnfbn", "datawvmbsmxh", "sgw", "dataybotuqzjfkuqvter", "q", + "dataykcvwqyfixw", "z", "dataxmiw")), + new LinkedServiceReference().withReferenceName("hilypuxbnvquxut") + .withParameters(mapOf("nzqnqwk", "datasttmvai", "tid", "dataevbgjhmyzsqov")))) + .withDatasets(Arrays.asList(new DatasetReference().withReferenceName("yyajlnotmirg") + .withParameters(mapOf("fxmefymdmfrf", "datacln", "aqzdzkyqqbqbw", "dataghn"))))) + .withExtendedProperties(mapOf("tdeyoxtlq", "datatwmmvbahftkcey", "tepzrcqnsjqrgtap", "datatx")) + .withRetentionTimeInDays("datapzphkmwbtr") + .withAutoUserSpecification("datalondbvlq"); model = BinaryData.fromObject(model).toObject(CustomActivity.class); - Assertions.assertEquals("jwktiyhiyk", model.name()); - Assertions.assertEquals("vaodifupdafuf", model.description()); + Assertions.assertEquals("esamhxkjjhflrgx", model.name()); + Assertions.assertEquals("ghd", model.description()); Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("qhgnrxxhzwtrxpwu", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("j", model.userProperties().get(0).name()); - Assertions.assertEquals("rps", model.linkedServiceName().referenceName()); - Assertions.assertEquals(219227608, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("zyafa", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("fobjlqnaxfvs", model.userProperties().get(0).name()); + Assertions.assertEquals("wzyvxdgten", model.linkedServiceName().referenceName()); + Assertions.assertEquals(853348175, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("ujyespcgps", model.resourceLinkedService().referenceName()); - Assertions.assertEquals("ia", model.referenceObjects().linkedServices().get(0).referenceName()); - Assertions.assertEquals("zeumadlpxirew", model.referenceObjects().datasets().get(0).referenceName()); + Assertions.assertEquals("znkeeeakzysakr", model.resourceLinkedService().referenceName()); + Assertions.assertEquals("vtbl", model.referenceObjects().linkedServices().get(0).referenceName()); + Assertions.assertEquals("yyajlnotmirg", model.referenceObjects().datasets().get(0).referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTypePropertiesTests.java index 045e3fa964d92..1a9f5f01898a2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTypePropertiesTests.java @@ -18,41 +18,39 @@ public final class CustomActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CustomActivityTypeProperties model = BinaryData.fromString( - "{\"command\":\"dataa\",\"resourceLinkedService\":{\"referenceName\":\"zjqpdfs\",\"parameters\":{\"cfxsgjdiqemcghor\":\"datavitis\",\"vhtmzwgircfnz\":\"datajawfczbbvr\",\"tvlqwpmmmhupvx\":\"dataybrflq\"}},\"folderPath\":\"datapsugebgboqnci\",\"referenceObjects\":{\"linkedServices\":[{\"referenceName\":\"ufo\",\"parameters\":{\"jdaxezfdsoglji\":\"dataqgefxypxmkexjona\"}},{\"referenceName\":\"wduwn\",\"parameters\":{\"fachkzzn\":\"datafwogqwdxtp\"}}],\"datasets\":[{\"referenceName\":\"msfnigjoxhz\",\"parameters\":{\"b\":\"datamcsjyfbutqlotojf\",\"wjgjlo\":\"datay\",\"pbih\":\"datahhhkxlqu\"}},{\"referenceName\":\"injymnqweptejr\",\"parameters\":{\"rkcqpy\":\"datauktcnxtpamwjb\",\"tvovhuifbly\":\"datajj\"}},{\"referenceName\":\"qycknqmbvssjb\",\"parameters\":{\"n\":\"datausnnc\",\"uhsjzduumpl\":\"datai\",\"wjcy\":\"datawupfndafrz\"}},{\"referenceName\":\"zaneave\",\"parameters\":{\"w\":\"datarul\"}}]},\"extendedProperties\":{\"qpawwjvdohzewu\":\"datazesqdvmxuf\"},\"retentionTimeInDays\":\"dataaeshftls\",\"autoUserSpecification\":\"datapvflmxjd\"}") + "{\"command\":\"datahikwahbzdgwkim\",\"resourceLinkedService\":{\"referenceName\":\"atrvjkxcrx\",\"parameters\":{\"erjerh\":\"datankujxdniape\",\"wx\":\"datagiud\"}},\"folderPath\":\"datatppjdyikdykxhx\",\"referenceObjects\":{\"linkedServices\":[{\"referenceName\":\"uc\",\"parameters\":{\"kmmcvftijl\":\"datarpdgm\",\"aednczvnwyfzavsa\":\"datahlcrjynef\"}}],\"datasets\":[{\"referenceName\":\"hshyxhfejtywl\",\"parameters\":{\"qeeodfplwfsmpbw\":\"datas\",\"qmcaofxg\":\"dataphj\",\"xkxjrttzhnam\":\"datayvjefnlxqmtedzxu\"}},{\"referenceName\":\"jqynwqcov\",\"parameters\":{\"hb\":\"datarsurq\",\"yqasdvepld\":\"datadxkojorcm\",\"ubqcqnch\":\"datafxmpyvlfujsbcfog\"}},{\"referenceName\":\"zyjug\",\"parameters\":{\"vcpisvprumttr\":\"datablbri\"}}]},\"extendedProperties\":{\"anlm\":\"dataugxtxxwbj\",\"xplrtueg\":\"datamvegxgy\",\"vjuowkt\":\"datahqulnjeybgpjy\",\"dkydqcgedip\":\"databpv\"},\"retentionTimeInDays\":\"datazmvttttjmdtf\",\"autoUserSpecification\":\"dataxaeekomiesg\"}") .toObject(CustomActivityTypeProperties.class); - Assertions.assertEquals("zjqpdfs", model.resourceLinkedService().referenceName()); - Assertions.assertEquals("ufo", model.referenceObjects().linkedServices().get(0).referenceName()); - Assertions.assertEquals("msfnigjoxhz", model.referenceObjects().datasets().get(0).referenceName()); + Assertions.assertEquals("atrvjkxcrx", model.resourceLinkedService().referenceName()); + Assertions.assertEquals("uc", model.referenceObjects().linkedServices().get(0).referenceName()); + Assertions.assertEquals("hshyxhfejtywl", model.referenceObjects().datasets().get(0).referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CustomActivityTypeProperties model - = new CustomActivityTypeProperties().withCommand("dataa") - .withResourceLinkedService(new LinkedServiceReference().withReferenceName("zjqpdfs") - .withParameters(mapOf("cfxsgjdiqemcghor", "datavitis", "vhtmzwgircfnz", "datajawfczbbvr", - "tvlqwpmmmhupvx", "dataybrflq"))) - .withFolderPath("datapsugebgboqnci") - .withReferenceObjects(new CustomActivityReferenceObject() - .withLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("ufo") - .withParameters(mapOf("jdaxezfdsoglji", "dataqgefxypxmkexjona")), - new LinkedServiceReference().withReferenceName("wduwn") - .withParameters(mapOf("fachkzzn", "datafwogqwdxtp")))) - .withDatasets(Arrays.asList( - new DatasetReference().withReferenceName("msfnigjoxhz").withParameters( - mapOf("b", "datamcsjyfbutqlotojf", "wjgjlo", "datay", "pbih", "datahhhkxlqu")), - new DatasetReference().withReferenceName("injymnqweptejr") - .withParameters(mapOf("rkcqpy", "datauktcnxtpamwjb", "tvovhuifbly", "datajj")), - new DatasetReference().withReferenceName("qycknqmbvssjb") - .withParameters(mapOf("n", "datausnnc", "uhsjzduumpl", "datai", "wjcy", "datawupfndafrz")), - new DatasetReference().withReferenceName("zaneave").withParameters(mapOf("w", "datarul"))))) - .withExtendedProperties(mapOf("qpawwjvdohzewu", "datazesqdvmxuf")) - .withRetentionTimeInDays("dataaeshftls").withAutoUserSpecification("datapvflmxjd"); + CustomActivityTypeProperties model = new CustomActivityTypeProperties().withCommand("datahikwahbzdgwkim") + .withResourceLinkedService(new LinkedServiceReference().withReferenceName("atrvjkxcrx") + .withParameters(mapOf("erjerh", "datankujxdniape", "wx", "datagiud"))) + .withFolderPath("datatppjdyikdykxhx") + .withReferenceObjects(new CustomActivityReferenceObject() + .withLinkedServices(Arrays.asList(new LinkedServiceReference().withReferenceName("uc") + .withParameters(mapOf("kmmcvftijl", "datarpdgm", "aednczvnwyfzavsa", "datahlcrjynef")))) + .withDatasets(Arrays.asList( + new DatasetReference().withReferenceName("hshyxhfejtywl") + .withParameters(mapOf("qeeodfplwfsmpbw", "datas", "qmcaofxg", "dataphj", "xkxjrttzhnam", + "datayvjefnlxqmtedzxu")), + new DatasetReference().withReferenceName("jqynwqcov") + .withParameters(mapOf("hb", "datarsurq", "yqasdvepld", "datadxkojorcm", "ubqcqnch", + "datafxmpyvlfujsbcfog")), + new DatasetReference().withReferenceName("zyjug") + .withParameters(mapOf("vcpisvprumttr", "datablbri"))))) + .withExtendedProperties(mapOf("anlm", "dataugxtxxwbj", "xplrtueg", "datamvegxgy", "vjuowkt", + "datahqulnjeybgpjy", "dkydqcgedip", "databpv")) + .withRetentionTimeInDays("datazmvttttjmdtf") + .withAutoUserSpecification("dataxaeekomiesg"); model = BinaryData.fromObject(model).toObject(CustomActivityTypeProperties.class); - Assertions.assertEquals("zjqpdfs", model.resourceLinkedService().referenceName()); - Assertions.assertEquals("ufo", model.referenceObjects().linkedServices().get(0).referenceName()); - Assertions.assertEquals("msfnigjoxhz", model.referenceObjects().datasets().get(0).referenceName()); + Assertions.assertEquals("atrvjkxcrx", model.resourceLinkedService().referenceName()); + Assertions.assertEquals("uc", model.referenceObjects().linkedServices().get(0).referenceName()); + Assertions.assertEquals("hshyxhfejtywl", model.referenceObjects().datasets().get(0).referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDataSourceLinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDataSourceLinkedServiceTests.java index 6d3f25936d48d..b3143a57fbb37 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDataSourceLinkedServiceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDataSourceLinkedServiceTests.java @@ -18,27 +18,28 @@ public final class CustomDataSourceLinkedServiceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CustomDataSourceLinkedService model = BinaryData.fromString( - "{\"type\":\"CustomDataSource\",\"typeProperties\":\"datag\",\"connectVia\":{\"referenceName\":\"vbtkafcnfitpu\",\"parameters\":{\"xb\":\"datakdwyjdvy\",\"nxbdisjeovgc\":\"datawjgyngoudclridql\"}},\"description\":\"chgjonrhdib\",\"parameters\":{\"meouiuvkcnqo\":{\"type\":\"Object\",\"defaultValue\":\"datay\"}},\"annotations\":[\"datab\",\"datawsfllzykzp\",\"datajdslpbyejsgbpjjn\",\"datalbkaknivw\"],\"\":{\"qilsbabqtjch\":\"datanddctkjcqhxdirt\"}}") + "{\"type\":\"drgtuaoou\",\"typeProperties\":\"datatjpsjwlpcxljz\",\"connectVia\":{\"referenceName\":\"afaxv\",\"parameters\":{\"tx\":\"datapbnrolgelsgnen\",\"rqahlbyjahbzbtlm\":\"datauwd\",\"mjyzice\":\"datacbwmvp\",\"bnduqgjibrxxiao\":\"datarlazcgwn\"}},\"description\":\"wu\",\"parameters\":{\"tgwadudok\":{\"type\":\"String\",\"defaultValue\":\"dataspugnv\"}},\"annotations\":[\"databozezxz\"],\"\":{\"qllt\":\"datargukqobo\",\"zqlyputawdmdikuf\":\"datalqufkrnrbnjkcol\",\"ujzofyldxk\":\"dataqvv\"}}") .toObject(CustomDataSourceLinkedService.class); - Assertions.assertEquals("vbtkafcnfitpu", model.connectVia().referenceName()); - Assertions.assertEquals("chgjonrhdib", model.description()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("meouiuvkcnqo").type()); + Assertions.assertEquals("afaxv", model.connectVia().referenceName()); + Assertions.assertEquals("wu", model.description()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("tgwadudok").type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { CustomDataSourceLinkedService model = new CustomDataSourceLinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("vbtkafcnfitpu") - .withParameters(mapOf("xb", "datakdwyjdvy", "nxbdisjeovgc", "datawjgyngoudclridql"))) - .withDescription("chgjonrhdib") - .withParameters(mapOf("meouiuvkcnqo", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datay"))) - .withAnnotations(Arrays.asList("datab", "datawsfllzykzp", "datajdslpbyejsgbpjjn", "datalbkaknivw")) - .withTypeProperties("datag"); + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("afaxv") + .withParameters(mapOf("tx", "datapbnrolgelsgnen", "rqahlbyjahbzbtlm", "datauwd", "mjyzice", + "datacbwmvp", "bnduqgjibrxxiao", "datarlazcgwn"))) + .withDescription("wu") + .withParameters(mapOf("tgwadudok", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataspugnv"))) + .withAnnotations(Arrays.asList("databozezxz")) + .withTypeProperties("datatjpsjwlpcxljz"); model = BinaryData.fromObject(model).toObject(CustomDataSourceLinkedService.class); - Assertions.assertEquals("vbtkafcnfitpu", model.connectVia().referenceName()); - Assertions.assertEquals("chgjonrhdib", model.description()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("meouiuvkcnqo").type()); + Assertions.assertEquals("afaxv", model.connectVia().referenceName()); + Assertions.assertEquals("wu", model.description()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("tgwadudok").type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDatasetTests.java index 3a10470303281..2f5836a21f08e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDatasetTests.java @@ -19,33 +19,34 @@ public final class CustomDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CustomDataset model = BinaryData.fromString( - "{\"type\":\"CustomDataset\",\"typeProperties\":\"datafgufjnbx\",\"description\":\"mwdukinhl\",\"structure\":\"datagde\",\"schema\":\"datakzou\",\"linkedServiceName\":{\"referenceName\":\"vewwpzrdwcgldo\",\"parameters\":{\"dxfhhht\":\"dataa\",\"qtdn\":\"datast\",\"dshvvf\":\"datackkpl\"}},\"parameters\":{\"zrqnjxm\":{\"type\":\"Int\",\"defaultValue\":\"datayijjimhi\"},\"hqld\":{\"type\":\"String\",\"defaultValue\":\"dataduydwnwgru\"},\"i\":{\"type\":\"Array\",\"defaultValue\":\"datamnswxiexqwqnghx\"},\"qtny\":{\"type\":\"Array\",\"defaultValue\":\"dataujrxgunnqgyp\"}},\"annotations\":[\"datae\",\"dataqmvyumgmmuebsnzn\",\"datagsqufmjxcyo\"],\"folder\":{\"name\":\"cazisvbrqgcyjpg\"},\"\":{\"tbgblxbuibrvjzta\":\"datapkwonrzpghlr\"}}") + "{\"type\":\"owrer\",\"typeProperties\":\"datapulnrfcqufmcihp\",\"description\":\"xptch\",\"structure\":\"datayqqidqimlgbbfjm\",\"schema\":\"datajvxlhmpmh\",\"linkedServiceName\":{\"referenceName\":\"ftyaphq\",\"parameters\":{\"lsn\":\"datay\",\"mcqixuanccqvjf\":\"dataowm\"}},\"parameters\":{\"vmufzgug\":{\"type\":\"Bool\",\"defaultValue\":\"datamquxpjhcfaaradci\"},\"ssnqe\":{\"type\":\"Object\",\"defaultValue\":\"datawala\"},\"xxrwqfmdqecvtamq\":{\"type\":\"String\",\"defaultValue\":\"datatbptgcsma\"}},\"annotations\":[\"datanobfewwpsibx\",\"datavuqoqjrkblndyc\"],\"folder\":{\"name\":\"ycvtqnzjcy\"},\"\":{\"jdleajvmvvlooubs\":\"datahembtbwnalbadpi\",\"upfazusj\":\"dataxipjeopskgocjom\"}}") .toObject(CustomDataset.class); - Assertions.assertEquals("mwdukinhl", model.description()); - Assertions.assertEquals("vewwpzrdwcgldo", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("zrqnjxm").type()); - Assertions.assertEquals("cazisvbrqgcyjpg", model.folder().name()); + Assertions.assertEquals("xptch", model.description()); + Assertions.assertEquals("ftyaphq", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("vmufzgug").type()); + Assertions.assertEquals("ycvtqnzjcy", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CustomDataset model - = new CustomDataset().withDescription("mwdukinhl").withStructure("datagde").withSchema("datakzou") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vewwpzrdwcgldo") - .withParameters(mapOf("dxfhhht", "dataa", "qtdn", "datast", "dshvvf", "datackkpl"))) - .withParameters(mapOf("zrqnjxm", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datayijjimhi"), "hqld", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataduydwnwgru"), "i", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datamnswxiexqwqnghx"), - "qtny", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataujrxgunnqgyp"))) - .withAnnotations(Arrays.asList("datae", "dataqmvyumgmmuebsnzn", "datagsqufmjxcyo")) - .withFolder(new DatasetFolder().withName("cazisvbrqgcyjpg")).withTypeProperties("datafgufjnbx"); + CustomDataset model = new CustomDataset().withDescription("xptch") + .withStructure("datayqqidqimlgbbfjm") + .withSchema("datajvxlhmpmh") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ftyaphq") + .withParameters(mapOf("lsn", "datay", "mcqixuanccqvjf", "dataowm"))) + .withParameters(mapOf("vmufzgug", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datamquxpjhcfaaradci"), + "ssnqe", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datawala"), + "xxrwqfmdqecvtamq", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datatbptgcsma"))) + .withAnnotations(Arrays.asList("datanobfewwpsibx", "datavuqoqjrkblndyc")) + .withFolder(new DatasetFolder().withName("ycvtqnzjcy")) + .withTypeProperties("datapulnrfcqufmcihp"); model = BinaryData.fromObject(model).toObject(CustomDataset.class); - Assertions.assertEquals("mwdukinhl", model.description()); - Assertions.assertEquals("vewwpzrdwcgldo", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("zrqnjxm").type()); - Assertions.assertEquals("cazisvbrqgcyjpg", model.folder().name()); + Assertions.assertEquals("xptch", model.description()); + Assertions.assertEquals("ftyaphq", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("vmufzgug").type()); + Assertions.assertEquals("ycvtqnzjcy", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTests.java index f07c296e2df5f..18b9a08f80fbb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTests.java @@ -17,38 +17,40 @@ public final class CustomEventsTriggerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CustomEventsTrigger model = BinaryData.fromString( - "{\"type\":\"CustomEventsTrigger\",\"typeProperties\":{\"subjectBeginsWith\":\"lmytaeallsx\",\"subjectEndsWith\":\"antssbzmo\",\"events\":[\"dataj\"],\"scope\":\"nhmxkgxrf\"},\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"wpzuxoynxlkloqp\",\"name\":\"aqcrefk\"},\"parameters\":{\"wecrvkiaognm\":\"datanzowpv\",\"prlqwjwpejtszj\":\"datanrz\",\"vwvycvnow\":\"datavj\"}},{\"pipelineReference\":{\"referenceName\":\"lij\",\"name\":\"lhxwwhusro\"},\"parameters\":{\"wkwmq\":\"datamozafwqmo\",\"aqassukv\":\"dataoqldacxo\"}}],\"description\":\"kzxznc\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datanszmjzsjfc\"],\"\":{\"diirvx\":\"datakp\",\"miychdufla\":\"datahxyslhxokfoma\",\"yqp\":\"datasgutgzcbv\"}}") + "{\"type\":\"nyegh\",\"typeProperties\":{\"subjectBeginsWith\":\"rtlhpfuc\",\"subjectEndsWith\":\"waklflwqdj\",\"events\":[\"dataog\",\"databyks\",\"dataqxxy\"],\"scope\":\"rrixkobmrrnkdmn\"},\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"lxnbbkbjnnzq\",\"name\":\"a\"},\"parameters\":{\"cllteuk\":\"dataxvl\",\"otgqgevielyho\":\"dataguc\"}},{\"pipelineReference\":{\"referenceName\":\"nzwhypjpypalptjp\",\"name\":\"dchey\"},\"parameters\":{\"bzmcprtanageh\":\"datasqxwqsszdwwk\",\"sjlilpicc\":\"datarhwkaatjssebyd\",\"afxoyddushvyj\":\"dataegtwstqgcjvklnrz\",\"lmxzdwpdwbnouk\":\"datahy\"}}],\"description\":\"m\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datak\",\"datadcs\"],\"\":{\"ldeq\":\"dataedfdzleaz\",\"q\":\"datafzyhikhnwseftlj\",\"ybe\":\"datapfk\"}}") .toObject(CustomEventsTrigger.class); - Assertions.assertEquals("kzxznc", model.description()); - Assertions.assertEquals("wpzuxoynxlkloqp", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("aqcrefk", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals("lmytaeallsx", model.subjectBeginsWith()); - Assertions.assertEquals("antssbzmo", model.subjectEndsWith()); - Assertions.assertEquals("nhmxkgxrf", model.scope()); + Assertions.assertEquals("m", model.description()); + Assertions.assertEquals("lxnbbkbjnnzq", model.pipelines().get(0).pipelineReference().referenceName()); + Assertions.assertEquals("a", model.pipelines().get(0).pipelineReference().name()); + Assertions.assertEquals("rtlhpfuc", model.subjectBeginsWith()); + Assertions.assertEquals("waklflwqdj", model.subjectEndsWith()); + Assertions.assertEquals("rrixkobmrrnkdmn", model.scope()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CustomEventsTrigger model - = new CustomEventsTrigger().withDescription("kzxznc").withAnnotations(Arrays.asList("datanszmjzsjfc")) - .withPipelines(Arrays.asList( - new TriggerPipelineReference() - .withPipelineReference( - new PipelineReference().withReferenceName("wpzuxoynxlkloqp").withName("aqcrefk")) - .withParameters( - mapOf("wecrvkiaognm", "datanzowpv", "prlqwjwpejtszj", "datanrz", "vwvycvnow", "datavj")), - new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("lij").withName("lhxwwhusro")) - .withParameters(mapOf("wkwmq", "datamozafwqmo", "aqassukv", "dataoqldacxo")))) - .withSubjectBeginsWith("lmytaeallsx").withSubjectEndsWith("antssbzmo") - .withEvents(Arrays.asList("dataj")).withScope("nhmxkgxrf"); + CustomEventsTrigger model = new CustomEventsTrigger().withDescription("m") + .withAnnotations(Arrays.asList("datak", "datadcs")) + .withPipelines(Arrays.asList( + new TriggerPipelineReference() + .withPipelineReference(new PipelineReference().withReferenceName("lxnbbkbjnnzq").withName("a")) + .withParameters(mapOf("cllteuk", "dataxvl", "otgqgevielyho", "dataguc")), + new TriggerPipelineReference() + .withPipelineReference( + new PipelineReference().withReferenceName("nzwhypjpypalptjp").withName("dchey")) + .withParameters(mapOf("bzmcprtanageh", "datasqxwqsszdwwk", "sjlilpicc", "datarhwkaatjssebyd", + "afxoyddushvyj", "dataegtwstqgcjvklnrz", "lmxzdwpdwbnouk", "datahy")))) + .withSubjectBeginsWith("rtlhpfuc") + .withSubjectEndsWith("waklflwqdj") + .withEvents(Arrays.asList("dataog", "databyks", "dataqxxy")) + .withScope("rrixkobmrrnkdmn"); model = BinaryData.fromObject(model).toObject(CustomEventsTrigger.class); - Assertions.assertEquals("kzxznc", model.description()); - Assertions.assertEquals("wpzuxoynxlkloqp", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("aqcrefk", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals("lmytaeallsx", model.subjectBeginsWith()); - Assertions.assertEquals("antssbzmo", model.subjectEndsWith()); - Assertions.assertEquals("nhmxkgxrf", model.scope()); + Assertions.assertEquals("m", model.description()); + Assertions.assertEquals("lxnbbkbjnnzq", model.pipelines().get(0).pipelineReference().referenceName()); + Assertions.assertEquals("a", model.pipelines().get(0).pipelineReference().name()); + Assertions.assertEquals("rtlhpfuc", model.subjectBeginsWith()); + Assertions.assertEquals("waklflwqdj", model.subjectEndsWith()); + Assertions.assertEquals("rrixkobmrrnkdmn", model.scope()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTypePropertiesTests.java index 57dad7f09d673..9d4df476b837f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTypePropertiesTests.java @@ -13,21 +13,22 @@ public final class CustomEventsTriggerTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { CustomEventsTriggerTypeProperties model = BinaryData.fromString( - "{\"subjectBeginsWith\":\"hkm\",\"subjectEndsWith\":\"raoytkkqoaazvmn\",\"events\":[\"datadzfypdsrfpihvij\",\"datajtkpocqboyjjfx\"],\"scope\":\"njduyotqb\"}") + "{\"subjectBeginsWith\":\"xith\",\"subjectEndsWith\":\"jxtobeqgzcadoq\",\"events\":[\"datafllqmu\",\"dataeolcgqjtv\",\"dataalkmwvgdfutds\",\"datajtuq\"],\"scope\":\"ojpauiccja\"}") .toObject(CustomEventsTriggerTypeProperties.class); - Assertions.assertEquals("hkm", model.subjectBeginsWith()); - Assertions.assertEquals("raoytkkqoaazvmn", model.subjectEndsWith()); - Assertions.assertEquals("njduyotqb", model.scope()); + Assertions.assertEquals("xith", model.subjectBeginsWith()); + Assertions.assertEquals("jxtobeqgzcadoq", model.subjectEndsWith()); + Assertions.assertEquals("ojpauiccja", model.scope()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - CustomEventsTriggerTypeProperties model = new CustomEventsTriggerTypeProperties().withSubjectBeginsWith("hkm") - .withSubjectEndsWith("raoytkkqoaazvmn") - .withEvents(Arrays.asList("datadzfypdsrfpihvij", "datajtkpocqboyjjfx")).withScope("njduyotqb"); + CustomEventsTriggerTypeProperties model = new CustomEventsTriggerTypeProperties().withSubjectBeginsWith("xith") + .withSubjectEndsWith("jxtobeqgzcadoq") + .withEvents(Arrays.asList("datafllqmu", "dataeolcgqjtv", "dataalkmwvgdfutds", "datajtuq")) + .withScope("ojpauiccja"); model = BinaryData.fromObject(model).toObject(CustomEventsTriggerTypeProperties.class); - Assertions.assertEquals("hkm", model.subjectBeginsWith()); - Assertions.assertEquals("raoytkkqoaazvmn", model.subjectEndsWith()); - Assertions.assertEquals("njduyotqb", model.scope()); + Assertions.assertEquals("xith", model.subjectBeginsWith()); + Assertions.assertEquals("jxtobeqgzcadoq", model.subjectEndsWith()); + Assertions.assertEquals("ojpauiccja", model.scope()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomSetupBaseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomSetupBaseTests.java index a748844197dbc..76ccd0d3fa700 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomSetupBaseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomSetupBaseTests.java @@ -10,7 +10,7 @@ public final class CustomSetupBaseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - CustomSetupBase model = BinaryData.fromString("{\"type\":\"CustomSetupBase\"}").toObject(CustomSetupBase.class); + CustomSetupBase model = BinaryData.fromString("{\"type\":\"zplhaljom\"}").toObject(CustomSetupBase.class); } @org.junit.jupiter.api.Test diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandDefaultValueTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandDefaultValueTests.java index 0811b85b24e69..3e60e5c4d3551 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandDefaultValueTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandDefaultValueTests.java @@ -11,14 +11,14 @@ public final class DWCopyCommandDefaultValueTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DWCopyCommandDefaultValue model - = BinaryData.fromString("{\"columnName\":\"dataubem\",\"defaultValue\":\"datauygmrenrbn\"}") + = BinaryData.fromString("{\"columnName\":\"datalbungrkjbdaxttoe\",\"defaultValue\":\"dataohipijfywmmqz\"}") .toObject(DWCopyCommandDefaultValue.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DWCopyCommandDefaultValue model - = new DWCopyCommandDefaultValue().withColumnName("dataubem").withDefaultValue("datauygmrenrbn"); + DWCopyCommandDefaultValue model = new DWCopyCommandDefaultValue().withColumnName("datalbungrkjbdaxttoe") + .withDefaultValue("dataohipijfywmmqz"); model = BinaryData.fromObject(model).toObject(DWCopyCommandDefaultValue.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandSettingsTests.java index dc9b1ef9d73af..cd57956bdb8bb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandSettingsTests.java @@ -16,19 +16,22 @@ public final class DWCopyCommandSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DWCopyCommandSettings model = BinaryData.fromString( - "{\"defaultValues\":[{\"columnName\":\"datagows\",\"defaultValue\":\"dataguap\"}],\"additionalOptions\":{\"byfacexp\":\"hh\",\"pkqiqs\":\"pqykicesqpvmoxil\"}}") + "{\"defaultValues\":[{\"columnName\":\"datahozkmi\",\"defaultValue\":\"dataxdnugbisfnbtqd\"},{\"columnName\":\"dataw\",\"defaultValue\":\"datadroi\"},{\"columnName\":\"databulvk\",\"defaultValue\":\"datayhnfqnekpxd\"},{\"columnName\":\"datae\",\"defaultValue\":\"dataf\"}],\"additionalOptions\":{\"xjdolobtzr\":\"ahnsmktkhlq\",\"lpbzo\":\"xnlaurviyntc\",\"fkte\":\"tfbjk\"}}") .toObject(DWCopyCommandSettings.class); - Assertions.assertEquals("hh", model.additionalOptions().get("byfacexp")); + Assertions.assertEquals("ahnsmktkhlq", model.additionalOptions().get("xjdolobtzr")); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DWCopyCommandSettings model = new DWCopyCommandSettings() - .withDefaultValues( - Arrays.asList(new DWCopyCommandDefaultValue().withColumnName("datagows").withDefaultValue("dataguap"))) - .withAdditionalOptions(mapOf("byfacexp", "hh", "pkqiqs", "pqykicesqpvmoxil")); + .withDefaultValues(Arrays.asList( + new DWCopyCommandDefaultValue().withColumnName("datahozkmi").withDefaultValue("dataxdnugbisfnbtqd"), + new DWCopyCommandDefaultValue().withColumnName("dataw").withDefaultValue("datadroi"), + new DWCopyCommandDefaultValue().withColumnName("databulvk").withDefaultValue("datayhnfqnekpxd"), + new DWCopyCommandDefaultValue().withColumnName("datae").withDefaultValue("dataf"))) + .withAdditionalOptions(mapOf("xjdolobtzr", "ahnsmktkhlq", "lpbzo", "xnlaurviyntc", "fkte", "tfbjk")); model = BinaryData.fromObject(model).toObject(DWCopyCommandSettings.class); - Assertions.assertEquals("hh", model.additionalOptions().get("byfacexp")); + Assertions.assertEquals("ahnsmktkhlq", model.additionalOptions().get("xjdolobtzr")); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandPayloadTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandPayloadTests.java index 71eb5a175501a..5df6675d638ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandPayloadTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandPayloadTests.java @@ -13,22 +13,24 @@ public final class DataFlowDebugCommandPayloadTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowDebugCommandPayload model = BinaryData.fromString( - "{\"streamName\":\"uartvti\",\"rowLimits\":1848545940,\"columns\":[\"chnmna\",\"mnxhkxjqirwrweo\"],\"expression\":\"ffifhx\"}") + "{\"streamName\":\"pvwgfstmhqykizmd\",\"rowLimits\":930902009,\"columns\":[\"fcluqvo\",\"mycjimryvwgcwwp\"],\"expression\":\"zgwe\"}") .toObject(DataFlowDebugCommandPayload.class); - Assertions.assertEquals("uartvti", model.streamName()); - Assertions.assertEquals(1848545940, model.rowLimits()); - Assertions.assertEquals("chnmna", model.columns().get(0)); - Assertions.assertEquals("ffifhx", model.expression()); + Assertions.assertEquals("pvwgfstmhqykizmd", model.streamName()); + Assertions.assertEquals(930902009, model.rowLimits()); + Assertions.assertEquals("fcluqvo", model.columns().get(0)); + Assertions.assertEquals("zgwe", model.expression()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DataFlowDebugCommandPayload model = new DataFlowDebugCommandPayload().withStreamName("uartvti") - .withRowLimits(1848545940).withColumns(Arrays.asList("chnmna", "mnxhkxjqirwrweo")).withExpression("ffifhx"); + DataFlowDebugCommandPayload model = new DataFlowDebugCommandPayload().withStreamName("pvwgfstmhqykizmd") + .withRowLimits(930902009) + .withColumns(Arrays.asList("fcluqvo", "mycjimryvwgcwwp")) + .withExpression("zgwe"); model = BinaryData.fromObject(model).toObject(DataFlowDebugCommandPayload.class); - Assertions.assertEquals("uartvti", model.streamName()); - Assertions.assertEquals(1848545940, model.rowLimits()); - Assertions.assertEquals("chnmna", model.columns().get(0)); - Assertions.assertEquals("ffifhx", model.expression()); + Assertions.assertEquals("pvwgfstmhqykizmd", model.streamName()); + Assertions.assertEquals(930902009, model.rowLimits()); + Assertions.assertEquals("fcluqvo", model.columns().get(0)); + Assertions.assertEquals("zgwe", model.expression()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandRequestTests.java index b0b7802b0f2e7..e5c653728e277 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandRequestTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandRequestTests.java @@ -15,28 +15,30 @@ public final class DataFlowDebugCommandRequestTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowDebugCommandRequest model = BinaryData.fromString( - "{\"sessionId\":\"fcbahhp\",\"command\":\"executeExpressionQuery\",\"commandPayload\":{\"streamName\":\"o\",\"rowLimits\":2134608057,\"columns\":[\"filkmkkholv\"],\"expression\":\"dviauogp\"}}") + "{\"sessionId\":\"ldpuviy\",\"command\":\"executeExpressionQuery\",\"commandPayload\":{\"streamName\":\"beolh\",\"rowLimits\":1768939163,\"columns\":[\"mxuq\",\"bsxtkcudfbsfarfs\"],\"expression\":\"wlkjxn\"}}") .toObject(DataFlowDebugCommandRequest.class); - Assertions.assertEquals("fcbahhp", model.sessionId()); + Assertions.assertEquals("ldpuviy", model.sessionId()); Assertions.assertEquals(DataFlowDebugCommandType.EXECUTE_EXPRESSION_QUERY, model.command()); - Assertions.assertEquals("o", model.commandPayload().streamName()); - Assertions.assertEquals(2134608057, model.commandPayload().rowLimits()); - Assertions.assertEquals("filkmkkholv", model.commandPayload().columns().get(0)); - Assertions.assertEquals("dviauogp", model.commandPayload().expression()); + Assertions.assertEquals("beolh", model.commandPayload().streamName()); + Assertions.assertEquals(1768939163, model.commandPayload().rowLimits()); + Assertions.assertEquals("mxuq", model.commandPayload().columns().get(0)); + Assertions.assertEquals("wlkjxn", model.commandPayload().expression()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DataFlowDebugCommandRequest model = new DataFlowDebugCommandRequest().withSessionId("fcbahhp") + DataFlowDebugCommandRequest model = new DataFlowDebugCommandRequest().withSessionId("ldpuviy") .withCommand(DataFlowDebugCommandType.EXECUTE_EXPRESSION_QUERY) - .withCommandPayload(new DataFlowDebugCommandPayload().withStreamName("o").withRowLimits(2134608057) - .withColumns(Arrays.asList("filkmkkholv")).withExpression("dviauogp")); + .withCommandPayload(new DataFlowDebugCommandPayload().withStreamName("beolh") + .withRowLimits(1768939163) + .withColumns(Arrays.asList("mxuq", "bsxtkcudfbsfarfs")) + .withExpression("wlkjxn")); model = BinaryData.fromObject(model).toObject(DataFlowDebugCommandRequest.class); - Assertions.assertEquals("fcbahhp", model.sessionId()); + Assertions.assertEquals("ldpuviy", model.sessionId()); Assertions.assertEquals(DataFlowDebugCommandType.EXECUTE_EXPRESSION_QUERY, model.command()); - Assertions.assertEquals("o", model.commandPayload().streamName()); - Assertions.assertEquals(2134608057, model.commandPayload().rowLimits()); - Assertions.assertEquals("filkmkkholv", model.commandPayload().columns().get(0)); - Assertions.assertEquals("dviauogp", model.commandPayload().expression()); + Assertions.assertEquals("beolh", model.commandPayload().streamName()); + Assertions.assertEquals(1768939163, model.commandPayload().rowLimits()); + Assertions.assertEquals("mxuq", model.commandPayload().columns().get(0)); + Assertions.assertEquals("wlkjxn", model.commandPayload().expression()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandResponseInnerTests.java index 47870222c8a49..23e9dbe7755c4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandResponseInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandResponseInnerTests.java @@ -12,18 +12,18 @@ public final class DataFlowDebugCommandResponseInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowDebugCommandResponseInner model - = BinaryData.fromString("{\"status\":\"snewmozqvbub\",\"data\":\"amhsycxhxzgazt\"}") + = BinaryData.fromString("{\"status\":\"dsxwefoh\",\"data\":\"bvopwndyqle\"}") .toObject(DataFlowDebugCommandResponseInner.class); - Assertions.assertEquals("snewmozqvbub", model.status()); - Assertions.assertEquals("amhsycxhxzgazt", model.data()); + Assertions.assertEquals("dsxwefoh", model.status()); + Assertions.assertEquals("bvopwndyqle", model.data()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DataFlowDebugCommandResponseInner model - = new DataFlowDebugCommandResponseInner().withStatus("snewmozqvbub").withData("amhsycxhxzgazt"); + = new DataFlowDebugCommandResponseInner().withStatus("dsxwefoh").withData("bvopwndyqle"); model = BinaryData.fromObject(model).toObject(DataFlowDebugCommandResponseInner.class); - Assertions.assertEquals("snewmozqvbub", model.status()); - Assertions.assertEquals("amhsycxhxzgazt", model.data()); + Assertions.assertEquals("dsxwefoh", model.status()); + Assertions.assertEquals("bvopwndyqle", model.data()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageDebugSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageDebugSettingsTests.java index bf8a2d3c1b6a6..0792ac9c0e96e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageDebugSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageDebugSettingsTests.java @@ -16,26 +16,23 @@ public final class DataFlowDebugPackageDebugSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowDebugPackageDebugSettings model = BinaryData.fromString( - "{\"sourceSettings\":[{\"sourceName\":\"cnqmxqpsw\",\"rowLimit\":2027239261,\"\":{\"gdhbe\":\"datahl\"}},{\"sourceName\":\"qkzszuwiwtglxxh\",\"rowLimit\":1438070602,\"\":{\"pqcbfrmbodthsq\":\"datapicrmnzhrgmqgjsx\",\"fr\":\"datagvriibakclac\"}},{\"sourceName\":\"ousxauzlwvsgmw\",\"rowLimit\":414245170,\"\":{\"mmkjsvthnwpztek\":\"dataizvu\",\"gplucfotangcfhny\":\"datavmribiat\",\"vtxnjmxmcuqud\":\"datazcugswvxwlmzqw\"}}],\"parameters\":{\"dkvgfabuiyjibuzp\":\"dataclxyn\"},\"datasetParameters\":\"dataugneikn\"}") + "{\"sourceSettings\":[{\"sourceName\":\"zrxklobd\",\"rowLimit\":61809023,\"\":{\"op\":\"datamkmlmvevfx\",\"hrdd\":\"datahbzxli\",\"nzqcy\":\"datatfgxqbawpcb\"}}],\"parameters\":{\"ofyuicd\":\"datap\",\"ybww\":\"datazb\",\"muvapc\":\"databdvibidmhmwffpl\"},\"datasetParameters\":\"datacrrvweyo\"}") .toObject(DataFlowDebugPackageDebugSettings.class); - Assertions.assertEquals("cnqmxqpsw", model.sourceSettings().get(0).sourceName()); - Assertions.assertEquals(2027239261, model.sourceSettings().get(0).rowLimit()); + Assertions.assertEquals("zrxklobd", model.sourceSettings().get(0).sourceName()); + Assertions.assertEquals(61809023, model.sourceSettings().get(0).rowLimit()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DataFlowDebugPackageDebugSettings model = new DataFlowDebugPackageDebugSettings() - .withSourceSettings(Arrays.asList( - new DataFlowSourceSetting().withSourceName("cnqmxqpsw").withRowLimit(2027239261) - .withAdditionalProperties(mapOf()), - new DataFlowSourceSetting().withSourceName("qkzszuwiwtglxxh").withRowLimit(1438070602) - .withAdditionalProperties(mapOf()), - new DataFlowSourceSetting().withSourceName("ousxauzlwvsgmw").withRowLimit(414245170) - .withAdditionalProperties(mapOf()))) - .withParameters(mapOf("dkvgfabuiyjibuzp", "dataclxyn")).withDatasetParameters("dataugneikn"); + .withSourceSettings(Arrays.asList(new DataFlowSourceSetting().withSourceName("zrxklobd") + .withRowLimit(61809023) + .withAdditionalProperties(mapOf()))) + .withParameters(mapOf("ofyuicd", "datap", "ybww", "datazb", "muvapc", "databdvibidmhmwffpl")) + .withDatasetParameters("datacrrvweyo"); model = BinaryData.fromObject(model).toObject(DataFlowDebugPackageDebugSettings.class); - Assertions.assertEquals("cnqmxqpsw", model.sourceSettings().get(0).sourceName()); - Assertions.assertEquals(2027239261, model.sourceSettings().get(0).rowLimit()); + Assertions.assertEquals("zrxklobd", model.sourceSettings().get(0).sourceName()); + Assertions.assertEquals(61809023, model.sourceSettings().get(0).rowLimit()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageTests.java index b8185933a1895..38eacf1534ca3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageTests.java @@ -30,122 +30,133 @@ public final class DataFlowDebugPackageTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowDebugPackage model = BinaryData.fromString( - "{\"sessionId\":\"ryxynqnzrd\",\"dataFlow\":{\"properties\":{\"type\":\"DataFlow\",\"description\":\"vwxzn\",\"annotations\":[\"dataoeiy\",\"datab\",\"databp\",\"datahv\"],\"folder\":{\"name\":\"kvntjlrigjkskyri\"}},\"name\":\"vzidsxwaab\"},\"dataFlows\":[{\"properties\":{\"type\":\"DataFlow\",\"description\":\"rygznmmaxriz\",\"annotations\":[\"databgopxlhslnel\",\"dataieixynllxe\"],\"folder\":{\"name\":\"rojphslhcawjutif\"}},\"name\":\"fmvigorqjbttzh\"},{\"properties\":{\"type\":\"DataFlow\",\"description\":\"glka\",\"annotations\":[\"datan\",\"datajuj\",\"dataickpz\",\"datacpopmxel\"],\"folder\":{\"name\":\"ltyjedexxmlfmk\"}},\"name\":\"cazuaw\"}],\"datasets\":[{\"properties\":{\"type\":\"Dataset\",\"description\":\"puamwabzxr\",\"structure\":\"datacush\",\"schema\":\"datahaivm\",\"linkedServiceName\":{\"referenceName\":\"yasflvgsgzwy\",\"parameters\":{\"knsmjblmljhlnymz\":\"dataoi\"}},\"parameters\":{\"gtayxonsupeujlz\":{\"type\":\"Bool\",\"defaultValue\":\"datayuzcbmqqvxmvw\"},\"nzoibgsxgnx\":{\"type\":\"SecureString\",\"defaultValue\":\"datacvsql\"},\"bxiqxeiiqbimht\":{\"type\":\"Int\",\"defaultValue\":\"dataonmpqoxwdof\"},\"qpofvwbc\":{\"type\":\"Float\",\"defaultValue\":\"datainheh\"}},\"annotations\":[\"datambnkb\",\"datavqvxk\"],\"folder\":{\"name\":\"qihebw\"},\"\":{\"gi\":\"databzuwfmdurag\",\"igkxkbsazga\":\"datavcjfelisdjubggb\",\"apvu\":\"datagacyrcmjdmspo\",\"zjedmstkvnlv\":\"datarylniofrzg\"}},\"name\":\"c\"}],\"linkedServices\":[{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"nktwfansnvpdibmi\",\"parameters\":{\"yls\":\"datatbzbkiwbuqnyophz\"}},\"description\":\"rpfbcunezz\",\"parameters\":{\"psihcla\":{\"type\":\"Bool\",\"defaultValue\":\"datafwyfwlwxjwet\"},\"rsqqwztcm\":{\"type\":\"SecureString\",\"defaultValue\":\"dataaylp\"},\"jexfdeqvhp\":{\"type\":\"Array\",\"defaultValue\":\"datachcxwaxfewzj\"}},\"annotations\":[\"datakkshkbffm\",\"datamxzjrgywwpgjx\",\"datanptfujgi\",\"datagaao\"],\"\":{\"swvr\":\"datataqutdewem\",\"kimrt\":\"dataunzzjgehk\",\"jqepqwhi\":\"dataxokffqyin\"}},\"name\":\"onsts\"}],\"staging\":{\"linkedService\":{\"referenceName\":\"xgvelfclduccbird\",\"parameters\":{\"stmninwjizcilng\":\"datawcobie\"}},\"folderPath\":\"datashejjtbxqm\"},\"debugSettings\":{\"sourceSettings\":[{\"sourceName\":\"xqzv\",\"rowLimit\":411885173,\"\":{\"qbsms\":\"dataycucrwnamikzeb\",\"kzruswh\":\"dataziqgfuh\",\"ycjsx\":\"datahczznvf\",\"xqhndvnoamlds\":\"datawwixzvumw\"}},{\"sourceName\":\"aohdjh\",\"rowLimit\":1043529198,\"\":{\"agltsxoa\":\"datakxcoxpelnje\",\"npbs\":\"dataftgz\"}}],\"parameters\":{\"ipgawtxx\":\"datafloccsrmozih\"},\"datasetParameters\":\"datay\"},\"\":{\"pcycilrmcaykg\":\"datacjxgrytf\",\"pndfcpfnznt\":\"datanoxuztrksx\",\"xuzvoamktcqi\":\"datajtwkjaos\",\"rtltla\":\"datasmgbzahgxqdl\"}}") + "{\"sessionId\":\"kdk\",\"dataFlow\":{\"properties\":{\"type\":\"jnnawtqa\",\"description\":\"xuckpggqoweyir\",\"annotations\":[\"dataisngwflqqmpizru\"],\"folder\":{\"name\":\"qxpxiwfcngjsaa\"}},\"name\":\"ixtmkzjvkviirhgf\"},\"dataFlows\":[{\"properties\":{\"type\":\"dpgra\",\"description\":\"vzbglbyvi\",\"annotations\":[\"datatbrxkjz\",\"datargxffmshkw\",\"databkgozxwopdbydpi\",\"dataqaclnapxbiy\"],\"folder\":{\"name\":\"gjkn\"}},\"name\":\"mfcttux\"},{\"properties\":{\"type\":\"yilflqoiquvrehmr\",\"description\":\"hvsujztc\",\"annotations\":[\"dataqjtwhauu\",\"datafprnjl\",\"datatlxs\"],\"folder\":{\"name\":\"ddoui\"}},\"name\":\"mowaziynknlqwzdv\"},{\"properties\":{\"type\":\"w\",\"description\":\"qszdtmaajquhuxyl\",\"annotations\":[\"datam\",\"dataygjbmzyospspsh\"],\"folder\":{\"name\":\"kyjpmspbps\"}},\"name\":\"fppyogtieyujtvcz\"}],\"datasets\":[{\"properties\":{\"type\":\"xrx\",\"description\":\"njdxvglnkvxl\",\"structure\":\"dataaglqivbgkcvkh\",\"schema\":\"datavuqd\",\"linkedServiceName\":{\"referenceName\":\"lvoniy\",\"parameters\":{\"hjknidibg\":\"dataubcpzgpxti\",\"ik\":\"datajxgpnrhgovfg\",\"wjrmzvuporqzd\":\"datamhha\",\"vxcnqmxqps\":\"datauydzvk\"}},\"parameters\":{\"dhbemzqkzszu\":{\"type\":\"Int\",\"defaultValue\":\"datakhlg\"},\"ljfp\":{\"type\":\"Int\",\"defaultValue\":\"datatglxx\"}},\"annotations\":[\"datacrmnzhrgmqgjs\",\"datavpqcb\"],\"folder\":{\"name\":\"bodthsqqgvri\"},\"\":{\"ousxauzlwvsgmw\":\"datakclacjfrn\"}},\"name\":\"qf\"}],\"linkedServices\":[{\"properties\":{\"type\":\"uxmmkjsvthnwp\",\"connectVia\":{\"referenceName\":\"ekov\",\"parameters\":{\"c\":\"databiattgplucfotan\",\"ugswvx\":\"datahnykz\"}},\"description\":\"mzqwmvtxnjmxmcu\",\"parameters\":{\"npdkv\":{\"type\":\"SecureString\",\"defaultValue\":\"datavclx\"},\"uzphdugnei\":{\"type\":\"Array\",\"defaultValue\":\"databuiyji\"}},\"annotations\":[\"datagox\",\"datajiuqhibtozi\"],\"\":{\"tvqylkmqpzoyhlfb\":\"datajedmurrxxgewp\",\"xoe\":\"datagwgcl\",\"jqlafcbahhpzp\":\"dataqinjipnwjf\",\"kkholvdndvia\":\"datafoiyjwpfilk\"}},\"name\":\"gphuartvtiu\"},{\"properties\":{\"type\":\"efchn\",\"connectVia\":{\"referenceName\":\"ahmnxhkxjqirw\",\"parameters\":{\"i\":\"dataooxf\",\"rsnewmozqvbubqma\":\"datahx\",\"taboidvmf\":\"datahsycxhxzgaz\"}},\"description\":\"ppu\",\"parameters\":{\"juahokqto\":{\"type\":\"Int\",\"defaultValue\":\"datapdfgkmtdherngbt\"}},\"annotations\":[\"datauxofshfphwpnulai\",\"datawzejywhslw\",\"dataojpllndnpdwrpqaf\"],\"\":{\"yetefyp\":\"datagsnnf\"}},\"name\":\"octfjgtixrjvzuyt\"}],\"staging\":{\"linkedService\":{\"referenceName\":\"lmuowo\",\"parameters\":{\"p\":\"datauir\"}},\"folderPath\":\"datanszonwpngaj\"},\"debugSettings\":{\"sourceSettings\":[{\"sourceName\":\"jawrtmjfjmyc\",\"rowLimit\":837645611,\"\":{\"khenlus\":\"datacoxovn\",\"jxtxrdc\":\"datanrd\"}},{\"sourceName\":\"jvidttge\",\"rowLimit\":1531674368,\"\":{\"s\":\"datayjtcvuwk\",\"uughtuqfecjxeyg\":\"datazies\"}}],\"parameters\":{\"cbuewmrswnjlxuz\":\"dataxu\",\"aqehg\":\"datahwpusxj\",\"tu\":\"datadohzjq\"},\"datasetParameters\":\"dataigebxncnwfepb\"},\"\":{\"g\":\"datafmxjg\"}}") .toObject(DataFlowDebugPackage.class); - Assertions.assertEquals("ryxynqnzrd", model.sessionId()); - Assertions.assertEquals("vzidsxwaab", model.dataFlow().name()); - Assertions.assertEquals("vwxzn", model.dataFlow().properties().description()); - Assertions.assertEquals("kvntjlrigjkskyri", model.dataFlow().properties().folder().name()); - Assertions.assertEquals("fmvigorqjbttzh", model.dataFlows().get(0).name()); - Assertions.assertEquals("rygznmmaxriz", model.dataFlows().get(0).properties().description()); - Assertions.assertEquals("rojphslhcawjutif", model.dataFlows().get(0).properties().folder().name()); - Assertions.assertEquals("c", model.datasets().get(0).name()); - Assertions.assertEquals("puamwabzxr", model.datasets().get(0).properties().description()); - Assertions.assertEquals("yasflvgsgzwy", - model.datasets().get(0).properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, - model.datasets().get(0).properties().parameters().get("gtayxonsupeujlz").type()); - Assertions.assertEquals("qihebw", model.datasets().get(0).properties().folder().name()); - Assertions.assertEquals("onsts", model.linkedServices().get(0).name()); - Assertions.assertEquals("nktwfansnvpdibmi", - model.linkedServices().get(0).properties().connectVia().referenceName()); - Assertions.assertEquals("rpfbcunezz", model.linkedServices().get(0).properties().description()); - Assertions.assertEquals(ParameterType.BOOL, - model.linkedServices().get(0).properties().parameters().get("psihcla").type()); - Assertions.assertEquals("xgvelfclduccbird", model.staging().linkedService().referenceName()); - Assertions.assertEquals("xqzv", model.debugSettings().sourceSettings().get(0).sourceName()); - Assertions.assertEquals(411885173, model.debugSettings().sourceSettings().get(0).rowLimit()); + Assertions.assertEquals("kdk", model.sessionId()); + Assertions.assertEquals("ixtmkzjvkviirhgf", model.dataFlow().name()); + Assertions.assertEquals("xuckpggqoweyir", model.dataFlow().properties().description()); + Assertions.assertEquals("qxpxiwfcngjsaa", model.dataFlow().properties().folder().name()); + Assertions.assertEquals("mfcttux", model.dataFlows().get(0).name()); + Assertions.assertEquals("vzbglbyvi", model.dataFlows().get(0).properties().description()); + Assertions.assertEquals("gjkn", model.dataFlows().get(0).properties().folder().name()); + Assertions.assertEquals("qf", model.datasets().get(0).name()); + Assertions.assertEquals("njdxvglnkvxl", model.datasets().get(0).properties().description()); + Assertions.assertEquals("lvoniy", model.datasets().get(0).properties().linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, + model.datasets().get(0).properties().parameters().get("dhbemzqkzszu").type()); + Assertions.assertEquals("bodthsqqgvri", model.datasets().get(0).properties().folder().name()); + Assertions.assertEquals("gphuartvtiu", model.linkedServices().get(0).name()); + Assertions.assertEquals("ekov", model.linkedServices().get(0).properties().connectVia().referenceName()); + Assertions.assertEquals("mzqwmvtxnjmxmcu", model.linkedServices().get(0).properties().description()); + Assertions.assertEquals(ParameterType.SECURE_STRING, + model.linkedServices().get(0).properties().parameters().get("npdkv").type()); + Assertions.assertEquals("lmuowo", model.staging().linkedService().referenceName()); + Assertions.assertEquals("jawrtmjfjmyc", model.debugSettings().sourceSettings().get(0).sourceName()); + Assertions.assertEquals(837645611, model.debugSettings().sourceSettings().get(0).rowLimit()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DataFlowDebugPackage model - = new DataFlowDebugPackage().withSessionId("ryxynqnzrd") - .withDataFlow( - new DataFlowDebugResource().withName("vzidsxwaab") - .withProperties(new DataFlow().withDescription("vwxzn") - .withAnnotations(Arrays.asList("dataoeiy", "datab", "databp", "datahv")).withFolder( - new DataFlowFolder().withName("kvntjlrigjkskyri")))) + = new DataFlowDebugPackage().withSessionId("kdk") + .withDataFlow(new DataFlowDebugResource().withName("ixtmkzjvkviirhgf") + .withProperties(new DataFlow().withDescription("xuckpggqoweyir") + .withAnnotations(Arrays.asList("dataisngwflqqmpizru")) + .withFolder(new DataFlowFolder().withName("qxpxiwfcngjsaa")))) .withDataFlows(Arrays.asList( - new DataFlowDebugResource().withName("fmvigorqjbttzh") - .withProperties(new DataFlow().withDescription("rygznmmaxriz") - .withAnnotations(Arrays.asList("databgopxlhslnel", "dataieixynllxe")) - .withFolder(new DataFlowFolder().withName("rojphslhcawjutif"))), - new DataFlowDebugResource().withName("cazuaw") - .withProperties(new DataFlow().withDescription("glka") - .withAnnotations(Arrays.asList("datan", "datajuj", "dataickpz", "datacpopmxel")) - .withFolder(new DataFlowFolder().withName("ltyjedexxmlfmk"))))) - .withDatasets(Arrays.asList(new DatasetDebugResource().withName("c").withProperties(new Dataset() - .withDescription("puamwabzxr").withStructure("datacush").withSchema("datahaivm") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("yasflvgsgzwy") - .withParameters(mapOf("knsmjblmljhlnymz", "dataoi"))) - .withParameters(mapOf("gtayxonsupeujlz", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datayuzcbmqqvxmvw"), - "nzoibgsxgnx", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datacvsql"), - "bxiqxeiiqbimht", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataonmpqoxwdof"), - "qpofvwbc", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datainheh"))) - .withAnnotations(Arrays.asList("datambnkb", "datavqvxk")) - .withFolder(new DatasetFolder().withName("qihebw")) - .withAdditionalProperties(mapOf("type", "Dataset"))))) + new DataFlowDebugResource().withName("mfcttux") + .withProperties(new DataFlow().withDescription("vzbglbyvi") + .withAnnotations(Arrays.asList("datatbrxkjz", "datargxffmshkw", "databkgozxwopdbydpi", + "dataqaclnapxbiy")) + .withFolder(new DataFlowFolder().withName("gjkn"))), + new DataFlowDebugResource().withName("mowaziynknlqwzdv") + .withProperties(new DataFlow().withDescription("hvsujztc") + .withAnnotations(Arrays.asList("dataqjtwhauu", "datafprnjl", "datatlxs")) + .withFolder(new DataFlowFolder().withName("ddoui"))), + new DataFlowDebugResource() + .withName("fppyogtieyujtvcz") + .withProperties(new DataFlow() + .withDescription("qszdtmaajquhuxyl") + .withAnnotations(Arrays.asList("datam", "dataygjbmzyospspsh")) + .withFolder(new DataFlowFolder().withName("kyjpmspbps"))))) + .withDatasets(Arrays.asList(new DatasetDebugResource().withName("qf") + .withProperties(new Dataset().withDescription("njdxvglnkvxl") + .withStructure("dataaglqivbgkcvkh") + .withSchema("datavuqd") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lvoniy") + .withParameters(mapOf("hjknidibg", "dataubcpzgpxti", "ik", "datajxgpnrhgovfg", + "wjrmzvuporqzd", "datamhha", "vxcnqmxqps", "datauydzvk"))) + .withParameters(mapOf("dhbemzqkzszu", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datakhlg"), + "ljfp", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datatglxx"))) + .withAnnotations(Arrays.asList("datacrmnzhrgmqgjs", "datavpqcb")) + .withFolder(new DatasetFolder().withName("bodthsqqgvri")) + .withAdditionalProperties(mapOf("type", "xrx"))))) .withLinkedServices( - Arrays - .asList( - new LinkedServiceDebugResource().withName("onsts").withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("nktwfansnvpdibmi") - .withParameters(mapOf("yls", "datatbzbkiwbuqnyophz"))) - .withDescription("rpfbcunezz") - .withParameters( - mapOf("psihcla", - new ParameterSpecification() - .withType(ParameterType.BOOL).withDefaultValue("datafwyfwlwxjwet"), - "rsqqwztcm", - new ParameterSpecification() - .withType(ParameterType.SECURE_STRING).withDefaultValue("dataaylp"), - "jexfdeqvhp", - new ParameterSpecification().withType(ParameterType.ARRAY) - .withDefaultValue("datachcxwaxfewzj"))) + Arrays.asList( + new LinkedServiceDebugResource().withName("gphuartvtiu") + .withProperties(new LinkedService() + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("ekov") + .withParameters(mapOf("c", "databiattgplucfotan", "ugswvx", "datahnykz"))) + .withDescription("mzqwmvtxnjmxmcu") + .withParameters(mapOf("npdkv", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("datavclx"), + "uzphdugnei", + new ParameterSpecification().withType(ParameterType.ARRAY) + .withDefaultValue("databuiyji"))) + .withAnnotations(Arrays.asList("datagox", "datajiuqhibtozi")) + .withAdditionalProperties(mapOf("type", "uxmmkjsvthnwp"))), + new LinkedServiceDebugResource().withName("octfjgtixrjvzuyt") + .withProperties(new LinkedService() + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("ahmnxhkxjqirw") + .withParameters(mapOf("i", "dataooxf", "rsnewmozqvbubqma", "datahx", "taboidvmf", + "datahsycxhxzgaz"))) + .withDescription("ppu") + .withParameters(mapOf("juahokqto", + new ParameterSpecification().withType(ParameterType.INT) + .withDefaultValue("datapdfgkmtdherngbt"))) .withAnnotations( - Arrays.asList("datakkshkbffm", "datamxzjrgywwpgjx", "datanptfujgi", "datagaao")) - .withAdditionalProperties(mapOf("type", "LinkedService"))))) - .withStaging(new DataFlowStagingInfo().withLinkedService(new LinkedServiceReference() - .withReferenceName("xgvelfclduccbird").withParameters(mapOf("stmninwjizcilng", "datawcobie"))) - .withFolderPath("datashejjtbxqm")) + Arrays.asList("datauxofshfphwpnulai", "datawzejywhslw", "dataojpllndnpdwrpqaf")) + .withAdditionalProperties(mapOf("type", "efchn"))))) + .withStaging(new DataFlowStagingInfo() + .withLinkedService( + new LinkedServiceReference().withReferenceName("lmuowo").withParameters(mapOf("p", "datauir"))) + .withFolderPath("datanszonwpngaj")) .withDebugSettings(new DataFlowDebugPackageDebugSettings() .withSourceSettings(Arrays.asList( - new DataFlowSourceSetting().withSourceName("xqzv").withRowLimit(411885173) + new DataFlowSourceSetting().withSourceName("jawrtmjfjmyc") + .withRowLimit(837645611) .withAdditionalProperties(mapOf()), - new DataFlowSourceSetting().withSourceName("aohdjh").withRowLimit(1043529198) + new DataFlowSourceSetting().withSourceName("jvidttge") + .withRowLimit(1531674368) .withAdditionalProperties(mapOf()))) - .withParameters(mapOf("ipgawtxx", "datafloccsrmozih")).withDatasetParameters("datay")) + .withParameters(mapOf("cbuewmrswnjlxuz", "dataxu", "aqehg", "datahwpusxj", "tu", "datadohzjq")) + .withDatasetParameters("dataigebxncnwfepb")) .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(DataFlowDebugPackage.class); - Assertions.assertEquals("ryxynqnzrd", model.sessionId()); - Assertions.assertEquals("vzidsxwaab", model.dataFlow().name()); - Assertions.assertEquals("vwxzn", model.dataFlow().properties().description()); - Assertions.assertEquals("kvntjlrigjkskyri", model.dataFlow().properties().folder().name()); - Assertions.assertEquals("fmvigorqjbttzh", model.dataFlows().get(0).name()); - Assertions.assertEquals("rygznmmaxriz", model.dataFlows().get(0).properties().description()); - Assertions.assertEquals("rojphslhcawjutif", model.dataFlows().get(0).properties().folder().name()); - Assertions.assertEquals("c", model.datasets().get(0).name()); - Assertions.assertEquals("puamwabzxr", model.datasets().get(0).properties().description()); - Assertions.assertEquals("yasflvgsgzwy", - model.datasets().get(0).properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, - model.datasets().get(0).properties().parameters().get("gtayxonsupeujlz").type()); - Assertions.assertEquals("qihebw", model.datasets().get(0).properties().folder().name()); - Assertions.assertEquals("onsts", model.linkedServices().get(0).name()); - Assertions.assertEquals("nktwfansnvpdibmi", - model.linkedServices().get(0).properties().connectVia().referenceName()); - Assertions.assertEquals("rpfbcunezz", model.linkedServices().get(0).properties().description()); - Assertions.assertEquals(ParameterType.BOOL, - model.linkedServices().get(0).properties().parameters().get("psihcla").type()); - Assertions.assertEquals("xgvelfclduccbird", model.staging().linkedService().referenceName()); - Assertions.assertEquals("xqzv", model.debugSettings().sourceSettings().get(0).sourceName()); - Assertions.assertEquals(411885173, model.debugSettings().sourceSettings().get(0).rowLimit()); + Assertions.assertEquals("kdk", model.sessionId()); + Assertions.assertEquals("ixtmkzjvkviirhgf", model.dataFlow().name()); + Assertions.assertEquals("xuckpggqoweyir", model.dataFlow().properties().description()); + Assertions.assertEquals("qxpxiwfcngjsaa", model.dataFlow().properties().folder().name()); + Assertions.assertEquals("mfcttux", model.dataFlows().get(0).name()); + Assertions.assertEquals("vzbglbyvi", model.dataFlows().get(0).properties().description()); + Assertions.assertEquals("gjkn", model.dataFlows().get(0).properties().folder().name()); + Assertions.assertEquals("qf", model.datasets().get(0).name()); + Assertions.assertEquals("njdxvglnkvxl", model.datasets().get(0).properties().description()); + Assertions.assertEquals("lvoniy", model.datasets().get(0).properties().linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, + model.datasets().get(0).properties().parameters().get("dhbemzqkzszu").type()); + Assertions.assertEquals("bodthsqqgvri", model.datasets().get(0).properties().folder().name()); + Assertions.assertEquals("gphuartvtiu", model.linkedServices().get(0).name()); + Assertions.assertEquals("ekov", model.linkedServices().get(0).properties().connectVia().referenceName()); + Assertions.assertEquals("mzqwmvtxnjmxmcu", model.linkedServices().get(0).properties().description()); + Assertions.assertEquals(ParameterType.SECURE_STRING, + model.linkedServices().get(0).properties().parameters().get("npdkv").type()); + Assertions.assertEquals("lmuowo", model.staging().linkedService().referenceName()); + Assertions.assertEquals("jawrtmjfjmyc", model.debugSettings().sourceSettings().get(0).sourceName()); + Assertions.assertEquals(837645611, model.debugSettings().sourceSettings().get(0).rowLimit()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugResourceTests.java index 820891068d8aa..e6383fbf92bf8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugResourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugResourceTests.java @@ -15,22 +15,22 @@ public final class DataFlowDebugResourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowDebugResource model = BinaryData.fromString( - "{\"properties\":{\"type\":\"DataFlow\",\"description\":\"ltzkatbhjmznnb\",\"annotations\":[\"dataeq\",\"datalarvlagunbtg\"],\"folder\":{\"name\":\"wlnbm\"}},\"name\":\"reeudzqavb\"}") + "{\"properties\":{\"type\":\"bgdlfgtdysna\",\"description\":\"flq\",\"annotations\":[\"dataq\",\"dataamz\",\"datarwd\"],\"folder\":{\"name\":\"eqyj\"}},\"name\":\"zi\"}") .toObject(DataFlowDebugResource.class); - Assertions.assertEquals("reeudzqavb", model.name()); - Assertions.assertEquals("ltzkatbhjmznnb", model.properties().description()); - Assertions.assertEquals("wlnbm", model.properties().folder().name()); + Assertions.assertEquals("zi", model.name()); + Assertions.assertEquals("flq", model.properties().description()); + Assertions.assertEquals("eqyj", model.properties().folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DataFlowDebugResource model = new DataFlowDebugResource().withName("reeudzqavb") - .withProperties(new DataFlow().withDescription("ltzkatbhjmznnb") - .withAnnotations(Arrays.asList("dataeq", "datalarvlagunbtg")) - .withFolder(new DataFlowFolder().withName("wlnbm"))); + DataFlowDebugResource model = new DataFlowDebugResource().withName("zi") + .withProperties(new DataFlow().withDescription("flq") + .withAnnotations(Arrays.asList("dataq", "dataamz", "datarwd")) + .withFolder(new DataFlowFolder().withName("eqyj"))); model = BinaryData.fromObject(model).toObject(DataFlowDebugResource.class); - Assertions.assertEquals("reeudzqavb", model.name()); - Assertions.assertEquals("ltzkatbhjmznnb", model.properties().description()); - Assertions.assertEquals("wlnbm", model.properties().folder().name()); + Assertions.assertEquals("zi", model.name()); + Assertions.assertEquals("flq", model.properties().description()); + Assertions.assertEquals("eqyj", model.properties().folder().name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionInfoInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionInfoInnerTests.java index abd2ddb9f65fc..a1bf606aad79e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionInfoInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionInfoInnerTests.java @@ -14,36 +14,41 @@ public final class DataFlowDebugSessionInfoInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowDebugSessionInfoInner model = BinaryData.fromString( - "{\"dataFlowName\":\"bwefqsfapaqtfer\",\"computeType\":\"wexjkmfxapjwogq\",\"coreCount\":211935178,\"nodeCount\":775989398,\"integrationRuntimeName\":\"dcdab\",\"sessionId\":\"wpwyawbz\",\"startTime\":\"qbucljgkyexaoguy\",\"timeToLiveInMinutes\":166467616,\"lastActivityTime\":\"dsdaultxijjumf\",\"\":{\"nqnm\":\"dataz\",\"qdqx\":\"datajng\",\"zsvtuikzhajqgl\":\"databjwgnyfus\",\"l\":\"datafh\"}}") + "{\"dataFlowName\":\"lyrtltlaprlt\",\"computeType\":\"atbhjmznn\",\"coreCount\":998957841,\"nodeCount\":421693668,\"integrationRuntimeName\":\"ala\",\"sessionId\":\"lagun\",\"startTime\":\"gfebwlnb\",\"timeToLiveInMinutes\":1797453594,\"lastActivityTime\":\"e\",\"\":{\"yyzglgouwtlmjjy\":\"datazqavbpdqmjx\",\"eytu\":\"dataojqtobaxk\",\"snkq\":\"datalbfjkwr\",\"hdenxaulk\":\"datahsyrqunj\"}}") .toObject(DataFlowDebugSessionInfoInner.class); - Assertions.assertEquals("bwefqsfapaqtfer", model.dataFlowName()); - Assertions.assertEquals("wexjkmfxapjwogq", model.computeType()); - Assertions.assertEquals(211935178, model.coreCount()); - Assertions.assertEquals(775989398, model.nodeCount()); - Assertions.assertEquals("dcdab", model.integrationRuntimeName()); - Assertions.assertEquals("wpwyawbz", model.sessionId()); - Assertions.assertEquals("qbucljgkyexaoguy", model.startTime()); - Assertions.assertEquals(166467616, model.timeToLiveInMinutes()); - Assertions.assertEquals("dsdaultxijjumf", model.lastActivityTime()); + Assertions.assertEquals("lyrtltlaprlt", model.dataFlowName()); + Assertions.assertEquals("atbhjmznn", model.computeType()); + Assertions.assertEquals(998957841, model.coreCount()); + Assertions.assertEquals(421693668, model.nodeCount()); + Assertions.assertEquals("ala", model.integrationRuntimeName()); + Assertions.assertEquals("lagun", model.sessionId()); + Assertions.assertEquals("gfebwlnb", model.startTime()); + Assertions.assertEquals(1797453594, model.timeToLiveInMinutes()); + Assertions.assertEquals("e", model.lastActivityTime()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DataFlowDebugSessionInfoInner model - = new DataFlowDebugSessionInfoInner().withDataFlowName("bwefqsfapaqtfer").withComputeType("wexjkmfxapjwogq") - .withCoreCount(211935178).withNodeCount(775989398).withIntegrationRuntimeName("dcdab") - .withSessionId("wpwyawbz").withStartTime("qbucljgkyexaoguy").withTimeToLiveInMinutes(166467616) - .withLastActivityTime("dsdaultxijjumf").withAdditionalProperties(mapOf()); + DataFlowDebugSessionInfoInner model = new DataFlowDebugSessionInfoInner().withDataFlowName("lyrtltlaprlt") + .withComputeType("atbhjmznn") + .withCoreCount(998957841) + .withNodeCount(421693668) + .withIntegrationRuntimeName("ala") + .withSessionId("lagun") + .withStartTime("gfebwlnb") + .withTimeToLiveInMinutes(1797453594) + .withLastActivityTime("e") + .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(DataFlowDebugSessionInfoInner.class); - Assertions.assertEquals("bwefqsfapaqtfer", model.dataFlowName()); - Assertions.assertEquals("wexjkmfxapjwogq", model.computeType()); - Assertions.assertEquals(211935178, model.coreCount()); - Assertions.assertEquals(775989398, model.nodeCount()); - Assertions.assertEquals("dcdab", model.integrationRuntimeName()); - Assertions.assertEquals("wpwyawbz", model.sessionId()); - Assertions.assertEquals("qbucljgkyexaoguy", model.startTime()); - Assertions.assertEquals(166467616, model.timeToLiveInMinutes()); - Assertions.assertEquals("dsdaultxijjumf", model.lastActivityTime()); + Assertions.assertEquals("lyrtltlaprlt", model.dataFlowName()); + Assertions.assertEquals("atbhjmznn", model.computeType()); + Assertions.assertEquals(998957841, model.coreCount()); + Assertions.assertEquals(421693668, model.nodeCount()); + Assertions.assertEquals("ala", model.integrationRuntimeName()); + Assertions.assertEquals("lagun", model.sessionId()); + Assertions.assertEquals("gfebwlnb", model.startTime()); + Assertions.assertEquals(1797453594, model.timeToLiveInMinutes()); + Assertions.assertEquals("e", model.lastActivityTime()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsAddDataFlowWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsAddDataFlowWithResponseMockTests.java index a3e793ac9e249..2d8fc4db2f8dd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsAddDataFlowWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsAddDataFlowWithResponseMockTests.java @@ -6,11 +6,9 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.AddDataFlowToDebugSessionResponse; import com.azure.resourcemanager.datafactory.models.DataFlow; @@ -29,7 +27,6 @@ import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; import com.azure.resourcemanager.datafactory.models.ParameterSpecification; import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.Arrays; @@ -37,138 +34,102 @@ import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DataFlowDebugSessionsAddDataFlowWithResponseMockTests { @Test public void testAddDataFlowWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); + String responseStr = "{\"jobVersion\":\"cs\"}"; - String responseStr = "{\"jobVersion\":\"pywpednousxr\"}"; + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - AddDataFlowToDebugSessionResponse response - = manager.dataFlowDebugSessions() - .addDataFlowWithResponse("zobpxfgp", "dzdzswvfwiunj", - new DataFlowDebugPackage().withSessionId("xgztfzgd") - .withDataFlow(new DataFlowDebugResource().withName("wjcwwbunfymbwin") - .withProperties(new DataFlow().withDescription("vbiryxsa") - .withAnnotations(Arrays.asList("datag", "datansesxwkhkcd", "dataofakmopqfzvvtif", - "dataqsuemewfutovb")) - .withFolder(new DataFlowFolder().withName("fucxtmhmzcnpsd")))) - .withDataFlows(Arrays.asList( - new DataFlowDebugResource().withName("ayjeh") - .withProperties(new DataFlow().withDescription("t") - .withAnnotations(Arrays.asList("dataokttqgokhaj", "dataylkflf", - "dataofjskndwywbptvym", "datampdcddbe")) - .withFolder(new DataFlowFolder().withName("prlxxbmy"))), - new DataFlowDebugResource().withName("wzbkgtgvrrzmkte") - .withProperties(new DataFlow().withDescription("owvqpncifdxtibvq") - .withAnnotations(Arrays.asList("datapfdlcstucsw", "datamnsdw")) - .withFolder(new DataFlowFolder().withName("atfgoerjmhtxip"))), - new DataFlowDebugResource().withName("mlaile") - .withProperties(new DataFlow().withDescription("euxxtslhjcwlfz") - .withAnnotations(Arrays.asList("datawexcktg", "datacccgoik", "datajjskzuhdiyavfey", - "databyduyastybomiyj")) - .withFolder(new DataFlowFolder().withName("seemh"))), - new DataFlowDebugResource().withName("dfsteouzoglvt") - .withProperties(new DataFlow().withDescription("usiv") - .withAnnotations(Arrays.asList("dataunnjwmdtbx", "datatomcba", "dataamtdfpkfw")) - .withFolder(new DataFlowFolder().withName("elxd"))))) - .withDatasets(Arrays.asList( - new DatasetDebugResource().withName("oy") - .withProperties(new Dataset().withDescription("ejvlf").withStructure("datarqkgibpehqb") - .withSchema("datazcmqqehxigsi") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("fb") - .withParameters(mapOf("lbzxyejoxd", "dataoaypixryf", "tbpiccriqhiwyk", - "datarjulttqgun", "mgparbirgw", "datazncfh", "irnfnlyvdryx", "datagewd"))) - .withParameters( - mapOf("tazuac", new ParameterSpecification().withType(ParameterType.ARRAY))) - .withAnnotations(Arrays.asList("dataslstekbbq", "datatvpsxycvoex")) - .withFolder(new DatasetFolder().withName("xrvxwlfmbbr")) - .withAdditionalProperties(mapOf("type", "Dataset"))), - new DatasetDebugResource().withName("hrxgvubsxajr").withProperties(new Dataset() - .withDescription("a").withStructure("datannlasf").withSchema("datajyvu") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("yexlpmbtm") - .withParameters(mapOf("amsgfvuffdvukjy", "datavbpnye"))) + AddDataFlowToDebugSessionResponse response = manager.dataFlowDebugSessions() + .addDataFlowWithResponse("jqzoabmuymaf", "ytbmfnkmcq", + new DataFlowDebugPackage().withSessionId("f") + .withDataFlow(new DataFlowDebugResource().withName("giwzzgaxi") + .withProperties(new DataFlow().withDescription("ktnadjz") + .withAnnotations( + Arrays.asList("datajlfyum", "datalntyilsiqdunkzad", "datadiqbtfam", "databkhqdf")) + .withFolder(new DataFlowFolder().withName("hcpijri")))) + .withDataFlows( + Arrays.asList(new DataFlowDebugResource().withName("ov") + .withProperties(new DataFlow().withDescription("il") + .withAnnotations(Arrays.asList("datacptthwmuyqgbma")) + .withFolder(new DataFlowFolder().withName("trvcbwt"))))) + .withDatasets(Arrays.asList(new DatasetDebugResource() + .withName("mcuuztrrmsjr") + .withProperties(new Dataset() + .withDescription("pnbfvplztovpikik") + .withStructure("datavuk") + .withSchema("dataeejxqoiozap") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xuytalrzrpkmv") + .withParameters(mapOf("kfvmmszwqwthuviq", "datawhw", "czn", "datagbsdvbrz", + "yvllkskvnr", "datasrttfral"))) + .withParameters(mapOf("rxrkpy", new ParameterSpecification().withType(ParameterType.BOOL), + "kvwtphdkdzjdrijo", new ParameterSpecification().withType(ParameterType.INT), "nwnx", + new ParameterSpecification().withType(ParameterType.FLOAT))) + .withAnnotations(Arrays.asList("datacoemx", "datalhqgqjrs", "datanfyzl")) + .withFolder(new DatasetFolder().withName("kvpvdwylg")) + .withAdditionalProperties(mapOf("type", "fbq"))))) + .withLinkedServices(Arrays.asList( + new LinkedServiceDebugResource().withName("vbxxtgyadifves") + .withProperties(new LinkedService() + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("xnzerrfaumpgjuls") + .withParameters(mapOf("lpisfkd", "datajmgfmurrg", "pxlskjtdcgwuyx", "datakgrj", + "ysqwoi", "datacihtonesnurshfzk"))) + .withDescription("bqxbu") .withParameters( - mapOf("nrgmjp", new ParameterSpecification().withType(ParameterType.INT), - "efwxcuzu", new ParameterSpecification().withType(ParameterType.FLOAT), - "bysyprskj", new ParameterSpecification().withType(ParameterType.OBJECT))) - .withAnnotations(Arrays.asList("datavbvkvomnoslbkrh", "datanvozjudg", "datadsflitmm")) - .withFolder(new DatasetFolder().withName("z")) - .withAdditionalProperties(mapOf("type", "Dataset"))))) - .withLinkedServices( - Arrays.asList( - new LinkedServiceDebugResource().withName("cxwkwkxln") - .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("jvzpvisqf") - .withParameters(mapOf("kenrlcjms", "datalmxvmrdpfan", "dahhiid", - "datapgmebx", "jnoibclfqdtfj", "dataogakrpmjodbdcy"))) - .withDescription("v") - .withParameters(mapOf("yvstv", - new ParameterSpecification().withType(ParameterType.ARRAY), "slqyhabgocq", - new ParameterSpecification().withType(ParameterType.ARRAY), "li", - new ParameterSpecification().withType(ParameterType.FLOAT))) - .withAnnotations(Arrays.asList("datanhcklllwgrkvl", - "dataqkpxvemjjfvanefw")) - .withAdditionalProperties(mapOf("type", "LinkedService"))), - new LinkedServiceDebugResource().withName("pbitcsbzuyfrann") - .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("ba") - .withParameters(mapOf("mwbshqpjueo", "datanjxyproqebsuij", "krejuuii", - "datahtltooikzouv", "arxyh", "datarbtfarb", "xbis", "dataukc"))) - .withDescription("itjovjrirg") - .withParameters(mapOf("acb", - new ParameterSpecification().withType(ParameterType.BOOL), "ioxtqxrbrdpz", - new ParameterSpecification().withType(ParameterType.FLOAT), "yczlylud", - new ParameterSpecification().withType(ParameterType.FLOAT), "iaxigeo", - new ParameterSpecification().withType(ParameterType.OBJECT))) - .withAnnotations(Arrays.asList("datahkhnzsrgiwvze", "datag", "datajtuzqreprn")) - .withAdditionalProperties(mapOf("type", "LinkedService"))))) - .withStaging( - new DataFlowStagingInfo() - .withLinkedService(new LinkedServiceReference().withReferenceName("dymuehvvvrtsnc") - .withParameters(mapOf("pxfhixaagvkwe", "dataunhoogagtjcmly", "bgva", "datalcito", - "fnffjxdccwuzqwv", "datazfiwao", "bawzafzdzhh", "datakewlyrweups"))) - .withFolderPath("dataxcelvawwj")) - .withDebugSettings(new DataFlowDebugPackageDebugSettings() - .withSourceSettings(Arrays.asList( - new DataFlowSourceSetting().withSourceName("jv").withRowLimit(1429028510) - .withAdditionalProperties(mapOf()), - new DataFlowSourceSetting().withSourceName("pmlckzdwietfxpdz").withRowLimit(834316963) - .withAdditionalProperties(mapOf()), - new DataFlowSourceSetting().withSourceName("lygjtibhzjhqfuq").withRowLimit(490505781) - .withAdditionalProperties(mapOf()), - new DataFlowSourceSetting().withSourceName("yeczlxunhntsqsp").withRowLimit(1295527653) - .withAdditionalProperties(mapOf()))) - .withParameters(mapOf("veknwldqj", "dataversu", "cwngg", "datagzcwrhhgnmjxxov", "cwiz", - "datavjbgynpapzbbcfu")) - .withDatasetParameters("datam")) - .withAdditionalProperties(mapOf()), - com.azure.core.util.Context.NONE) - .getValue(); + mapOf("ebhfhvpopctw", new ParameterSpecification().withType(ParameterType.FLOAT), + "jl", new ParameterSpecification().withType(ParameterType.SECURE_STRING), + "yaxnsfwsfdeqp", new ParameterSpecification().withType(ParameterType.STRING), + "hymyim", new ParameterSpecification().withType(ParameterType.FLOAT))) + .withAnnotations( + Arrays.asList("dataolerxdeuckwnwbh", "datatevnpqtjejqkz", "datawwyrrttsp")) + .withAdditionalProperties(mapOf("type", "oosq"))), + new LinkedServiceDebugResource().withName("pthnkjades") + .withProperties(new LinkedService() + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("c") + .withParameters(mapOf("jxthsqlxsx", "datakareusxbcxhytkiq", "xegexrzzpcuas", + "datagzcjie", "uxtkiqcrhgtir", "dataaobapfxjedpvw"))) + .withDescription("bpdsxgxnnj") + .withParameters(mapOf("t", new ParameterSpecification().withType(ParameterType.BOOL))) + .withAnnotations(Arrays.asList("datalykelxpyzpqh", "datacwdiiecptcd")) + .withAdditionalProperties(mapOf("type", "jwdfowjva"))), + new LinkedServiceDebugResource().withName("fsmkfxahfvfpmztb") + .withProperties( + new LinkedService() + .withConnectVia( + new IntegrationRuntimeReference().withReferenceName("pbdmqhg") + .withParameters(mapOf("tjqbhenpfqhupbty", "dataxexvdlivlgu", "lmfrvgckjou", + "datawobydi"))) + .withDescription("hrefopdc") + .withParameters(mapOf("ilfgdngczk", + new ParameterSpecification().withType(ParameterType.ARRAY), "gearrrbll", + new ParameterSpecification().withType(ParameterType.SECURE_STRING), "okkgutpqn", + new ParameterSpecification().withType(ParameterType.FLOAT))) + .withAnnotations(Arrays.asList("datajdofyhookx")) + .withAdditionalProperties(mapOf("type", "xs"))))) + .withStaging(new DataFlowStagingInfo() + .withLinkedService(new LinkedServiceReference().withReferenceName("scwiwzowshqrjjpx") + .withParameters(mapOf("dniwtvoolonlc", "datadlxzx", "fuvtum", "datacmrybwuexszpgk", + "lqklcfvvsebfjoyb", "dataghliny", "xlwhiqfjptytpchd", "dataowttraol"))) + .withFolderPath("datakrk")) + .withDebugSettings(new DataFlowDebugPackageDebugSettings() + .withSourceSettings(Arrays.asList(new DataFlowSourceSetting().withSourceName("ienothtinqshd") + .withRowLimit(1482317078) + .withAdditionalProperties(mapOf()))) + .withParameters(mapOf("ozcmffygaxbsh", "dataqhkgfxzwjoz")) + .withDatasetParameters("datamhs")) + .withAdditionalProperties(mapOf()), + com.azure.core.util.Context.NONE) + .getValue(); - Assertions.assertEquals("pywpednousxr", response.jobVersion()); + Assertions.assertEquals("cs", response.jobVersion()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsCreateMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsCreateMockTests.java index 377e227572d03..44539418f03d2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsCreateMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsCreateMockTests.java @@ -6,74 +6,42 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.CreateDataFlowDebugSessionRequest; import com.azure.resourcemanager.datafactory.models.CreateDataFlowDebugSessionResponse; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDebugResource; -import java.nio.ByteBuffer; +import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntime; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DataFlowDebugSessionsCreateMockTests { @Test public void testCreate() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - - String responseStr = "{\"status\":\"gaecujlaecwnn\",\"sessionId\":\"f\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - CreateDataFlowDebugSessionResponse response - = manager.dataFlowDebugSessions().create("zudysndiexb", "kwjmqnbaerggqaoh", - new CreateDataFlowDebugSessionRequest().withComputeType("qvb").withCoreCount(1071652867) - .withTimeToLive(834593701) - .withIntegrationRuntime(new IntegrationRuntimeDebugResource().withName("xgidjiijpdbwknbm") - .withProperties(new IntegrationRuntime().withDescription("n") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime")))), + String responseStr = "{\"status\":\"yputnywwtfnx\",\"sessionId\":\"punowvavd\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + CreateDataFlowDebugSessionResponse response = manager.dataFlowDebugSessions() + .create("prwgbvdtgaha", "szynsxxnljfsfxtt", + new CreateDataFlowDebugSessionRequest().withComputeType("edgsceme") + .withCoreCount(2066654081) + .withTimeToLive(948042645) + .withIntegrationRuntime(new IntegrationRuntimeDebugResource().withName("dmtylq") + .withProperties(new SelfHostedIntegrationRuntime().withDescription("eh"))), com.azure.core.util.Context.NONE); - Assertions.assertEquals("gaecujlaecwnn", response.status()); - Assertions.assertEquals("f", response.sessionId()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; + Assertions.assertEquals("yputnywwtfnx", response.status()); + Assertions.assertEquals("punowvavd", response.sessionId()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsDeleteWithResponseMockTests.java index 860eda143f239..efa28c5ac03a0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsDeleteWithResponseMockTests.java @@ -6,48 +6,31 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.DeleteDataFlowDebugSessionRequest; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DataFlowDebugSessionsDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.dataFlowDebugSessions().deleteWithResponse("jlqdpqkcbflzzdd", "rew", - new DeleteDataFlowDebugSessionRequest().withSessionId("rsufvtmseuqguz"), com.azure.core.util.Context.NONE); + manager.dataFlowDebugSessions() + .deleteWithResponse("nobfcz", "qglzhsfkimzr", + new DeleteDataFlowDebugSessionRequest().withSessionId("okaoqeayokz"), com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsExecuteCommandMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsExecuteCommandMockTests.java index 977e6db353c83..d7999640dc26b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsExecuteCommandMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsExecuteCommandMockTests.java @@ -6,60 +6,44 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandPayload; import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandRequest; import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandResponse; import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.Arrays; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DataFlowDebugSessionsExecuteCommandMockTests { @Test public void testExecuteCommand() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - - String responseStr = "{\"status\":\"lnjhoemlwea\",\"data\":\"xmshaugenpi\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - DataFlowDebugCommandResponse response = manager.dataFlowDebugSessions().executeCommand("rphdakwwiezeut", "r", - new DataFlowDebugCommandRequest().withSessionId("wmo") - .withCommand(DataFlowDebugCommandType.EXECUTE_PREVIEW_QUERY).withCommandPayload( - new DataFlowDebugCommandPayload().withStreamName("wzatvne").withRowLimits(1961533079) - .withColumns(Arrays.asList("qladlpqlwtxshvo", "hhzlmwvc", "hkvafcjektkg", "xrifyr")) - .withExpression("rgiaeqc")), - com.azure.core.util.Context.NONE); - - Assertions.assertEquals("lnjhoemlwea", response.status()); - Assertions.assertEquals("xmshaugenpi", response.data()); + String responseStr = "{\"status\":\"fvx\",\"data\":\"cribmslcgntxdkq\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + DataFlowDebugCommandResponse response = manager.dataFlowDebugSessions() + .executeCommand("rzqkwmmci", "qejbuhspqysdi", + new DataFlowDebugCommandRequest().withSessionId("cbuvhdpi") + .withCommand(DataFlowDebugCommandType.EXECUTE_PREVIEW_QUERY) + .withCommandPayload(new DataFlowDebugCommandPayload().withStreamName("vcrrenrbums") + .withRowLimits(1574150737) + .withColumns(Arrays.asList("kyaldb", "skfstdt", "hhrvsgutugjsjmv", "asisnfwpnikr")) + .withExpression("lykmgjf")), + com.azure.core.util.Context.NONE); + + Assertions.assertEquals("fvx", response.status()); + Assertions.assertEquals("cribmslcgntxdkq", response.data()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsQueryByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsQueryByFactoryMockTests.java index 9d6028a1d6707..e65f801de48bf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsQueryByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsQueryByFactoryMockTests.java @@ -6,60 +6,42 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.DataFlowDebugSessionInfo; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DataFlowDebugSessionsQueryByFactoryMockTests { @Test public void testQueryByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"dataFlowName\":\"dijyfiabhepxqtk\",\"computeType\":\"lizno\",\"coreCount\":349109970,\"nodeCount\":1834406842,\"integrationRuntimeName\":\"qythsltodlvwbgbm\",\"sessionId\":\"trsxhiuhgvgno\",\"startTime\":\"sqf\",\"timeToLiveInMinutes\":1683333500,\"lastActivityTime\":\"ppwvieymkguvr\",\"\":{\"nelqcvmvpp\":\"dataproytd\",\"is\":\"datatsolxnhlrpsign\"}}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"dataFlowName\":\"nub\",\"computeType\":\"ofihcgyrngbrfyt\",\"coreCount\":1863481234,\"nodeCount\":1781382045,\"integrationRuntimeName\":\"bsr\",\"sessionId\":\"woeuqqgxzhdl\",\"startTime\":\"amearg\",\"timeToLiveInMinutes\":1203253385,\"lastActivityTime\":\"n\",\"\":{\"ht\":\"datacvvyfrf\"}}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PagedIterable response - = manager.dataFlowDebugSessions().queryByFactory("ijlvkrnso", "oh", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("dijyfiabhepxqtk", response.iterator().next().dataFlowName()); - Assertions.assertEquals("lizno", response.iterator().next().computeType()); - Assertions.assertEquals(349109970, response.iterator().next().coreCount()); - Assertions.assertEquals(1834406842, response.iterator().next().nodeCount()); - Assertions.assertEquals("qythsltodlvwbgbm", response.iterator().next().integrationRuntimeName()); - Assertions.assertEquals("trsxhiuhgvgno", response.iterator().next().sessionId()); - Assertions.assertEquals("sqf", response.iterator().next().startTime()); - Assertions.assertEquals(1683333500, response.iterator().next().timeToLiveInMinutes()); - Assertions.assertEquals("ppwvieymkguvr", response.iterator().next().lastActivityTime()); + = manager.dataFlowDebugSessions().queryByFactory("xv", "ybidesrzp", com.azure.core.util.Context.NONE); + + Assertions.assertEquals("nub", response.iterator().next().dataFlowName()); + Assertions.assertEquals("ofihcgyrngbrfyt", response.iterator().next().computeType()); + Assertions.assertEquals(1863481234, response.iterator().next().coreCount()); + Assertions.assertEquals(1781382045, response.iterator().next().nodeCount()); + Assertions.assertEquals("bsr", response.iterator().next().integrationRuntimeName()); + Assertions.assertEquals("woeuqqgxzhdl", response.iterator().next().sessionId()); + Assertions.assertEquals("amearg", response.iterator().next().startTime()); + Assertions.assertEquals(1203253385, response.iterator().next().timeToLiveInMinutes()); + Assertions.assertEquals("n", response.iterator().next().lastActivityTime()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowFolderTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowFolderTests.java index 75447220b6058..f9f7e04c64322 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowFolderTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowFolderTests.java @@ -11,14 +11,14 @@ public final class DataFlowFolderTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - DataFlowFolder model = BinaryData.fromString("{\"name\":\"nmfpp\"}").toObject(DataFlowFolder.class); - Assertions.assertEquals("nmfpp", model.name()); + DataFlowFolder model = BinaryData.fromString("{\"name\":\"lhslnelxieixyn\"}").toObject(DataFlowFolder.class); + Assertions.assertEquals("lhslnelxieixyn", model.name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DataFlowFolder model = new DataFlowFolder().withName("nmfpp"); + DataFlowFolder model = new DataFlowFolder().withName("lhslnelxieixyn"); model = BinaryData.fromObject(model).toObject(DataFlowFolder.class); - Assertions.assertEquals("nmfpp", model.name()); + Assertions.assertEquals("lhslnelxieixyn", model.name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowListResponseTests.java index 638f239c6c623..45d2ee007aecd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowListResponseTests.java @@ -16,29 +16,38 @@ public final class DataFlowListResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"type\":\"DataFlow\",\"description\":\"eevy\",\"annotations\":[\"datasgzfczbg\"],\"folder\":{\"name\":\"gbeglqgleo\"}},\"name\":\"betnluankrrfxee\",\"type\":\"tijv\",\"etag\":\"vbmqzbqq\",\"id\":\"aj\"},{\"properties\":{\"type\":\"DataFlow\",\"description\":\"wxacevehj\",\"annotations\":[\"dataxoafgaoqltfae\",\"datalinmfgv\"],\"folder\":{\"name\":\"pghriypoqeyhl\"}},\"name\":\"ykprlpyznu\",\"type\":\"qdsmexiit\",\"etag\":\"uxtyasiibmi\",\"id\":\"nnust\"}],\"nextLink\":\"ljhnmgixhcmav\"}") + "{\"value\":[{\"properties\":{\"type\":\"ecwcrojphslhcawj\",\"description\":\"i\",\"annotations\":[\"datafmvigorqjbttzh\",\"dataaglkafhon\",\"datajuj\"],\"folder\":{\"name\":\"kpzvcpopmxelnwc\"}},\"name\":\"yjede\",\"type\":\"mlfmkqs\",\"etag\":\"zuawxtzxpuamwa\",\"id\":\"xrvxcushsphai\"},{\"properties\":{\"type\":\"xyasflvgsgzw\",\"description\":\"akoi\",\"annotations\":[\"datasmjblmljhlnym\",\"dataotqyry\"],\"folder\":{\"name\":\"bmqqvxmvw\"}},\"name\":\"tayx\",\"type\":\"supe\",\"etag\":\"lzqnhcvs\",\"id\":\"tnzoibgsxgnxfy\"},{\"properties\":{\"type\":\"nmpqoxwdofdb\",\"description\":\"qxeiiqbimhtmwwi\",\"annotations\":[\"datahfqpofv\",\"databcblemb\",\"datakbwvqvxkdiv\"],\"folder\":{\"name\":\"ebwtswb\"}},\"name\":\"wfmdurage\",\"type\":\"zvcjfelisdjubggb\",\"etag\":\"gkxkbsazgakg\",\"id\":\"yrcmjdmspofap\"},{\"properties\":{\"type\":\"hryl\",\"description\":\"ofrzgb\",\"annotations\":[\"datadmstk\"],\"folder\":{\"name\":\"v\"}},\"name\":\"c\",\"type\":\"iznk\",\"etag\":\"f\",\"id\":\"snvpdibmi\"}],\"nextLink\":\"stbz\"}") .toObject(DataFlowListResponse.class); - Assertions.assertEquals("aj", model.value().get(0).id()); - Assertions.assertEquals("eevy", model.value().get(0).properties().description()); - Assertions.assertEquals("gbeglqgleo", model.value().get(0).properties().folder().name()); - Assertions.assertEquals("ljhnmgixhcmav", model.nextLink()); + Assertions.assertEquals("xrvxcushsphai", model.value().get(0).id()); + Assertions.assertEquals("i", model.value().get(0).properties().description()); + Assertions.assertEquals("kpzvcpopmxelnwc", model.value().get(0).properties().folder().name()); + Assertions.assertEquals("stbz", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DataFlowListResponse model = new DataFlowListResponse().withValue(Arrays.asList( - new DataFlowResourceInner().withId("aj") - .withProperties(new DataFlow().withDescription("eevy").withAnnotations(Arrays.asList("datasgzfczbg")) - .withFolder(new DataFlowFolder().withName("gbeglqgleo"))), - new DataFlowResourceInner().withId("nnust") - .withProperties(new DataFlow().withDescription("wxacevehj") - .withAnnotations(Arrays.asList("dataxoafgaoqltfae", "datalinmfgv")) - .withFolder(new DataFlowFolder().withName("pghriypoqeyhl"))))) - .withNextLink("ljhnmgixhcmav"); + new DataFlowResourceInner().withId("xrvxcushsphai") + .withProperties(new DataFlow().withDescription("i") + .withAnnotations(Arrays.asList("datafmvigorqjbttzh", "dataaglkafhon", "datajuj")) + .withFolder(new DataFlowFolder().withName("kpzvcpopmxelnwc"))), + new DataFlowResourceInner().withId("tnzoibgsxgnxfy") + .withProperties(new DataFlow().withDescription("akoi") + .withAnnotations(Arrays.asList("datasmjblmljhlnym", "dataotqyry")) + .withFolder(new DataFlowFolder().withName("bmqqvxmvw"))), + new DataFlowResourceInner().withId("yrcmjdmspofap") + .withProperties(new DataFlow().withDescription("qxeiiqbimhtmwwi") + .withAnnotations(Arrays.asList("datahfqpofv", "databcblemb", "datakbwvqvxkdiv")) + .withFolder(new DataFlowFolder().withName("ebwtswb"))), + new DataFlowResourceInner().withId("snvpdibmi") + .withProperties(new DataFlow().withDescription("ofrzgb") + .withAnnotations(Arrays.asList("datadmstk")) + .withFolder(new DataFlowFolder().withName("v"))))) + .withNextLink("stbz"); model = BinaryData.fromObject(model).toObject(DataFlowListResponse.class); - Assertions.assertEquals("aj", model.value().get(0).id()); - Assertions.assertEquals("eevy", model.value().get(0).properties().description()); - Assertions.assertEquals("gbeglqgleo", model.value().get(0).properties().folder().name()); - Assertions.assertEquals("ljhnmgixhcmav", model.nextLink()); + Assertions.assertEquals("xrvxcushsphai", model.value().get(0).id()); + Assertions.assertEquals("i", model.value().get(0).properties().description()); + Assertions.assertEquals("kpzvcpopmxelnwc", model.value().get(0).properties().folder().name()); + Assertions.assertEquals("stbz", model.nextLink()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowReferenceTests.java index 37f3d2891051b..319e919221e04 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowReferenceTests.java @@ -15,21 +15,22 @@ public final class DataFlowReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowReference model = BinaryData.fromString( - "{\"type\":\"DataFlowReference\",\"referenceName\":\"sbede\",\"datasetParameters\":\"dataexkxbhx\",\"parameters\":{\"mnhjevdyzn\":\"datanul\",\"kmq\":\"dataajsvk\",\"iizjixlqfhefkwa\":\"datazzkivyhjr\",\"nlqxsjxtele\":\"datasolronqqlm\"},\"\":{\"oolzqocarkuzl\":\"datauqbo\",\"t\":\"datacnn\"}}") + "{\"type\":\"DataFlowReference\",\"referenceName\":\"dqaolfylnkk\",\"datasetParameters\":\"datapjvlywltmfwo\",\"parameters\":{\"fnqzocr\":\"datawhlwy\"},\"\":{\"ncaqttiekoifu\":\"dataczeuntgx\",\"yri\":\"datanyttzgix\",\"lqtxnrflkndrn\":\"datalgmgbe\"}}") .toObject(DataFlowReference.class); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.type()); - Assertions.assertEquals("sbede", model.referenceName()); + Assertions.assertEquals("dqaolfylnkk", model.referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DataFlowReference model = new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("sbede").withDatasetParameters("dataexkxbhx").withParameters(mapOf("mnhjevdyzn", - "datanul", "kmq", "dataajsvk", "iizjixlqfhefkwa", "datazzkivyhjr", "nlqxsjxtele", "datasolronqqlm")) + .withReferenceName("dqaolfylnkk") + .withDatasetParameters("datapjvlywltmfwo") + .withParameters(mapOf("fnqzocr", "datawhlwy")) .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(DataFlowReference.class); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.type()); - Assertions.assertEquals("sbede", model.referenceName()); + Assertions.assertEquals("dqaolfylnkk", model.referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowResourceInnerTests.java index 88401f32a0b4b..404320eb22473 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowResourceInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowResourceInnerTests.java @@ -15,22 +15,22 @@ public final class DataFlowResourceInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowResourceInner model = BinaryData.fromString( - "{\"properties\":{\"type\":\"DataFlow\",\"description\":\"qspkcdqzhlctd\",\"annotations\":[\"dataqn\",\"datayfp\",\"datahrqbnjjrcg\",\"datagydcw\"],\"folder\":{\"name\":\"jumvqqolihrraio\"}},\"name\":\"ubrjtl\",\"type\":\"xfuojrn\",\"etag\":\"flrzpas\",\"id\":\"biuimzdlyjdfq\"}") + "{\"properties\":{\"type\":\"dsdaultxijjumf\",\"description\":\"azlnqnmcjngzqdqx\",\"annotations\":[\"datawgnyfusfzsvtui\"],\"folder\":{\"name\":\"ajqglcf\"}},\"name\":\"l\",\"type\":\"ryxynqnzrd\",\"etag\":\"ovw\",\"id\":\"nptgoeiybba\"}") .toObject(DataFlowResourceInner.class); - Assertions.assertEquals("biuimzdlyjdfq", model.id()); - Assertions.assertEquals("qspkcdqzhlctd", model.properties().description()); - Assertions.assertEquals("jumvqqolihrraio", model.properties().folder().name()); + Assertions.assertEquals("nptgoeiybba", model.id()); + Assertions.assertEquals("azlnqnmcjngzqdqx", model.properties().description()); + Assertions.assertEquals("ajqglcf", model.properties().folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DataFlowResourceInner model = new DataFlowResourceInner().withId("biuimzdlyjdfq") - .withProperties(new DataFlow().withDescription("qspkcdqzhlctd") - .withAnnotations(Arrays.asList("dataqn", "datayfp", "datahrqbnjjrcg", "datagydcw")) - .withFolder(new DataFlowFolder().withName("jumvqqolihrraio"))); + DataFlowResourceInner model = new DataFlowResourceInner().withId("nptgoeiybba") + .withProperties(new DataFlow().withDescription("azlnqnmcjngzqdqx") + .withAnnotations(Arrays.asList("datawgnyfusfzsvtui")) + .withFolder(new DataFlowFolder().withName("ajqglcf"))); model = BinaryData.fromObject(model).toObject(DataFlowResourceInner.class); - Assertions.assertEquals("biuimzdlyjdfq", model.id()); - Assertions.assertEquals("qspkcdqzhlctd", model.properties().description()); - Assertions.assertEquals("jumvqqolihrraio", model.properties().folder().name()); + Assertions.assertEquals("nptgoeiybba", model.id()); + Assertions.assertEquals("azlnqnmcjngzqdqx", model.properties().description()); + Assertions.assertEquals("ajqglcf", model.properties().folder().name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSinkTests.java index 67621137d6b84..158f2f6ab3185 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSinkTests.java @@ -18,43 +18,46 @@ public final class DataFlowSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowSink model = BinaryData.fromString( - "{\"schemaLinkedService\":{\"referenceName\":\"trqrejda\",\"parameters\":{\"lfxlmuifmuadj\":\"dataqimlda\",\"skiioshjgczetybn\":\"datafsn\",\"j\":\"datagztlcgc\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"fjvmy\",\"parameters\":{\"cljkxpyl\":\"dataebecuvlbefv\",\"eypdvrbk\":\"datawoxzgwpsyxji\"}},\"name\":\"rdkdkgaw\",\"description\":\"jxildfkcef\",\"dataset\":{\"referenceName\":\"gzqpjoi\",\"parameters\":{\"entq\":\"datanaybdjnxu\",\"towlhlsycoyb\":\"datantwhymxymulwiv\",\"j\":\"datajasqubf\",\"htfxcpupuki\":\"dataywhjqwmchq\"}},\"linkedService\":{\"referenceName\":\"j\",\"parameters\":{\"osaonhqnamppu\":\"datadlvwtiws\",\"eajbkajlcyizyddc\":\"datatassaekewna\",\"krvfsxxbydes\":\"dataxo\",\"nm\":\"datalvgecpwgoljtz\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"obygoo\",\"datasetParameters\":\"dataqa\",\"parameters\":{\"jfucsaodjnosdkv\":\"datavaz\"},\"\":{\"cd\":\"dataasgmatrnzpd\",\"jktzboimyfpq\":\"dataakt\"}}}") + "{\"schemaLinkedService\":{\"referenceName\":\"wimaaneakhtmh\",\"parameters\":{\"vkhgv\":\"datayanrfvq\",\"ymhcctopuo\":\"dataogxkfnaoa\",\"hquhczygxvhajp\":\"datayrnskb\",\"xqnwhscoz\":\"datae\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"mvgxsmpknpwir\",\"parameters\":{\"dnmckap\":\"datafewxqouox\",\"cdgmoqu\":\"datahknqiijge\",\"kyow\":\"dataqih\"}},\"name\":\"tjouwhldxwhi\",\"description\":\"roqksmfxm\",\"dataset\":{\"referenceName\":\"prstvkitbfj\",\"parameters\":{\"rqxw\":\"dataotnplfacqocc\",\"a\":\"datatjtdrhutfd\",\"eh\":\"datatxop\"}},\"linkedService\":{\"referenceName\":\"dkmdzgsszx\",\"parameters\":{\"rlsirnccla\":\"datakbbxuh\",\"suxxc\":\"datavoyn\",\"dsaidjanormovdxx\":\"databmyqjog\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"tujmoil\",\"datasetParameters\":\"datawemhdee\",\"parameters\":{\"egrhrh\":\"datalkyozdsfzj\",\"xvgjbfi\":\"dataslejtvxj\"},\"\":{\"moxsa\":\"datanjodfcbjqqwmtq\"}}}") .toObject(DataFlowSink.class); - Assertions.assertEquals("rdkdkgaw", model.name()); - Assertions.assertEquals("jxildfkcef", model.description()); - Assertions.assertEquals("gzqpjoi", model.dataset().referenceName()); - Assertions.assertEquals("j", model.linkedService().referenceName()); + Assertions.assertEquals("tjouwhldxwhi", model.name()); + Assertions.assertEquals("roqksmfxm", model.description()); + Assertions.assertEquals("prstvkitbfj", model.dataset().referenceName()); + Assertions.assertEquals("dkmdzgsszx", model.linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("obygoo", model.flowlet().referenceName()); - Assertions.assertEquals("trqrejda", model.schemaLinkedService().referenceName()); - Assertions.assertEquals("fjvmy", model.rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("tujmoil", model.flowlet().referenceName()); + Assertions.assertEquals("wimaaneakhtmh", model.schemaLinkedService().referenceName()); + Assertions.assertEquals("mvgxsmpknpwir", model.rejectedDataLinkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DataFlowSink model = new DataFlowSink().withName("rdkdkgaw").withDescription("jxildfkcef") - .withDataset(new DatasetReference().withReferenceName("gzqpjoi") - .withParameters(mapOf("entq", "datanaybdjnxu", "towlhlsycoyb", "datantwhymxymulwiv", "j", "datajasqubf", - "htfxcpupuki", "dataywhjqwmchq"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("j") - .withParameters(mapOf("osaonhqnamppu", "datadlvwtiws", "eajbkajlcyizyddc", "datatassaekewna", - "krvfsxxbydes", "dataxo", "nm", "datalvgecpwgoljtz"))) + DataFlowSink model = new DataFlowSink().withName("tjouwhldxwhi") + .withDescription("roqksmfxm") + .withDataset(new DatasetReference().withReferenceName("prstvkitbfj") + .withParameters(mapOf("rqxw", "dataotnplfacqocc", "a", "datatjtdrhutfd", "eh", "datatxop"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("dkmdzgsszx") + .withParameters( + mapOf("rlsirnccla", "datakbbxuh", "suxxc", "datavoyn", "dsaidjanormovdxx", "databmyqjog"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("obygoo").withDatasetParameters("dataqa") - .withParameters(mapOf("jfucsaodjnosdkv", "datavaz")).withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("trqrejda").withParameters( - mapOf("lfxlmuifmuadj", "dataqimlda", "skiioshjgczetybn", "datafsn", "j", "datagztlcgc"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("fjvmy") - .withParameters(mapOf("cljkxpyl", "dataebecuvlbefv", "eypdvrbk", "datawoxzgwpsyxji"))); + .withReferenceName("tujmoil") + .withDatasetParameters("datawemhdee") + .withParameters(mapOf("egrhrh", "datalkyozdsfzj", "xvgjbfi", "dataslejtvxj")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("wimaaneakhtmh") + .withParameters(mapOf("vkhgv", "datayanrfvq", "ymhcctopuo", "dataogxkfnaoa", "hquhczygxvhajp", + "datayrnskb", "xqnwhscoz", "datae"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("mvgxsmpknpwir") + .withParameters(mapOf("dnmckap", "datafewxqouox", "cdgmoqu", "datahknqiijge", "kyow", "dataqih"))); model = BinaryData.fromObject(model).toObject(DataFlowSink.class); - Assertions.assertEquals("rdkdkgaw", model.name()); - Assertions.assertEquals("jxildfkcef", model.description()); - Assertions.assertEquals("gzqpjoi", model.dataset().referenceName()); - Assertions.assertEquals("j", model.linkedService().referenceName()); + Assertions.assertEquals("tjouwhldxwhi", model.name()); + Assertions.assertEquals("roqksmfxm", model.description()); + Assertions.assertEquals("prstvkitbfj", model.dataset().referenceName()); + Assertions.assertEquals("dkmdzgsszx", model.linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("obygoo", model.flowlet().referenceName()); - Assertions.assertEquals("trqrejda", model.schemaLinkedService().referenceName()); - Assertions.assertEquals("fjvmy", model.rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("tujmoil", model.flowlet().referenceName()); + Assertions.assertEquals("wimaaneakhtmh", model.schemaLinkedService().referenceName()); + Assertions.assertEquals("mvgxsmpknpwir", model.rejectedDataLinkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceSettingTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceSettingTests.java index ce3dd20491bdd..3fc92aa6e7bfd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceSettingTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceSettingTests.java @@ -14,19 +14,20 @@ public final class DataFlowSourceSettingTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowSourceSetting model = BinaryData.fromString( - "{\"sourceName\":\"oxgjiuqhibt\",\"rowLimit\":645359051,\"\":{\"ktvqylkmqpzoy\":\"datawjedmurrxxgew\"}}") + "{\"sourceName\":\"yyukphaimmo\",\"rowLimit\":1751961807,\"\":{\"mfsvbpav\":\"dataoshbragapyy\",\"kuma\":\"dataopfppdbwnupgah\",\"hqepvufhbzehewh\":\"datajcaacfdmmcpugm\",\"eaclgschorimk\":\"dataqhnlbqnbld\"}}") .toObject(DataFlowSourceSetting.class); - Assertions.assertEquals("oxgjiuqhibt", model.sourceName()); - Assertions.assertEquals(645359051, model.rowLimit()); + Assertions.assertEquals("yyukphaimmo", model.sourceName()); + Assertions.assertEquals(1751961807, model.rowLimit()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DataFlowSourceSetting model = new DataFlowSourceSetting().withSourceName("oxgjiuqhibt").withRowLimit(645359051) + DataFlowSourceSetting model = new DataFlowSourceSetting().withSourceName("yyukphaimmo") + .withRowLimit(1751961807) .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(DataFlowSourceSetting.class); - Assertions.assertEquals("oxgjiuqhibt", model.sourceName()); - Assertions.assertEquals(645359051, model.rowLimit()); + Assertions.assertEquals("yyukphaimmo", model.sourceName()); + Assertions.assertEquals(1751961807, model.rowLimit()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceTests.java index 72f928c332de7..d1120b41947ec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceTests.java @@ -18,40 +18,41 @@ public final class DataFlowSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowSource model = BinaryData.fromString( - "{\"schemaLinkedService\":{\"referenceName\":\"zlpzbtzuyky\",\"parameters\":{\"fp\":\"datafsdyepfnocmbeza\"}},\"name\":\"tga\",\"description\":\"yqejga\",\"dataset\":{\"referenceName\":\"kctgkp\",\"parameters\":{\"fngdyfcixr\":\"dataqzkcyzm\",\"mkahpqha\":\"datalcqvhoejgoiutgw\",\"mip\":\"datayntacihnco\"}},\"linkedService\":{\"referenceName\":\"liqmvlbhikeaq\",\"parameters\":{\"dtsdfjy\":\"datagpomxpu\",\"mpyzgleo\":\"dataesocwiqbuou\",\"bwwzvdajf\":\"datajsb\",\"lwixvtbou\":\"datanncfmaciqgjjrlhi\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"tnd\",\"datasetParameters\":\"datagjttbasualapdlnd\",\"parameters\":{\"ixv\":\"dataqb\",\"spugzfeuzjljmph\":\"datalwynpbbfqvzfj\",\"zolgjzmicuydocc\":\"dataky\",\"iadhbatec\":\"dataxshanzb\"},\"\":{\"iucbda\":\"datasdohz\",\"pow\":\"datambwiinjdllwktl\",\"g\":\"datavvqxua\",\"si\":\"dataqwulynkgfcfdru\"}}}") + "{\"schemaLinkedService\":{\"referenceName\":\"jxildfkcef\",\"parameters\":{\"oisfmnaybdjn\":\"datazqp\",\"lwivqt\":\"dataumentqontwhymxym\"}},\"name\":\"wlhlsycoybajasq\",\"description\":\"fajcywhjqwm\",\"dataset\":{\"referenceName\":\"qo\",\"parameters\":{\"kiymjzpwd\":\"dataxcpup\",\"nhqnampp\":\"datavwtiwsmosa\",\"eajbkajlcyizyddc\":\"dataltassaekewna\"}},\"linkedService\":{\"referenceName\":\"od\",\"parameters\":{\"e\":\"datafsxxby\",\"pwgoljt\":\"dataqlvge\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"mxsdobygoog\",\"datasetParameters\":\"dataapjxvazyjfucsao\",\"parameters\":{\"kvibfa\":\"dataos\",\"nzpducdaaktu\":\"datagmat\"},\"\":{\"kpp\":\"datazboimyfpqdo\",\"ikff\":\"datawyytfvpctf\"}}}") .toObject(DataFlowSource.class); - Assertions.assertEquals("tga", model.name()); - Assertions.assertEquals("yqejga", model.description()); - Assertions.assertEquals("kctgkp", model.dataset().referenceName()); - Assertions.assertEquals("liqmvlbhikeaq", model.linkedService().referenceName()); + Assertions.assertEquals("wlhlsycoybajasq", model.name()); + Assertions.assertEquals("fajcywhjqwm", model.description()); + Assertions.assertEquals("qo", model.dataset().referenceName()); + Assertions.assertEquals("od", model.linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("tnd", model.flowlet().referenceName()); - Assertions.assertEquals("zlpzbtzuyky", model.schemaLinkedService().referenceName()); + Assertions.assertEquals("mxsdobygoog", model.flowlet().referenceName()); + Assertions.assertEquals("jxildfkcef", model.schemaLinkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DataFlowSource model = new DataFlowSource().withName("tga").withDescription("yqejga") - .withDataset(new DatasetReference().withReferenceName("kctgkp").withParameters( - mapOf("fngdyfcixr", "dataqzkcyzm", "mkahpqha", "datalcqvhoejgoiutgw", "mip", "datayntacihnco"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("liqmvlbhikeaq") - .withParameters(mapOf("dtsdfjy", "datagpomxpu", "mpyzgleo", "dataesocwiqbuou", "bwwzvdajf", "datajsb", - "lwixvtbou", "datanncfmaciqgjjrlhi"))) + DataFlowSource model = new DataFlowSource().withName("wlhlsycoybajasq") + .withDescription("fajcywhjqwm") + .withDataset(new DatasetReference().withReferenceName("qo") + .withParameters(mapOf("kiymjzpwd", "dataxcpup", "nhqnampp", "datavwtiwsmosa", "eajbkajlcyizyddc", + "dataltassaekewna"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("od") + .withParameters(mapOf("e", "datafsxxby", "pwgoljt", "dataqlvge"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("tnd").withDatasetParameters("datagjttbasualapdlnd") - .withParameters(mapOf("ixv", "dataqb", "spugzfeuzjljmph", "datalwynpbbfqvzfj", "zolgjzmicuydocc", - "dataky", "iadhbatec", "dataxshanzb")) + .withReferenceName("mxsdobygoog") + .withDatasetParameters("dataapjxvazyjfucsao") + .withParameters(mapOf("kvibfa", "dataos", "nzpducdaaktu", "datagmat")) .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("zlpzbtzuyky") - .withParameters(mapOf("fp", "datafsdyepfnocmbeza"))); + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("jxildfkcef") + .withParameters(mapOf("oisfmnaybdjn", "datazqp", "lwivqt", "dataumentqontwhymxym"))); model = BinaryData.fromObject(model).toObject(DataFlowSource.class); - Assertions.assertEquals("tga", model.name()); - Assertions.assertEquals("yqejga", model.description()); - Assertions.assertEquals("kctgkp", model.dataset().referenceName()); - Assertions.assertEquals("liqmvlbhikeaq", model.linkedService().referenceName()); + Assertions.assertEquals("wlhlsycoybajasq", model.name()); + Assertions.assertEquals("fajcywhjqwm", model.description()); + Assertions.assertEquals("qo", model.dataset().referenceName()); + Assertions.assertEquals("od", model.linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("tnd", model.flowlet().referenceName()); - Assertions.assertEquals("zlpzbtzuyky", model.schemaLinkedService().referenceName()); + Assertions.assertEquals("mxsdobygoog", model.flowlet().referenceName()); + Assertions.assertEquals("jxildfkcef", model.schemaLinkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowStagingInfoTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowStagingInfoTests.java index 051b1cbb7a537..270ea0b0f2dfd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowStagingInfoTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowStagingInfoTests.java @@ -15,19 +15,22 @@ public final class DataFlowStagingInfoTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlowStagingInfo model = BinaryData.fromString( - "{\"linkedService\":{\"referenceName\":\"cpzgpxtiv\",\"parameters\":{\"n\":\"datanidibgqjxg\",\"kqmhhaowjr\":\"datahgovfgp\"}},\"folderPath\":\"datavuporqzdfuydzv\"}") + "{\"linkedService\":{\"referenceName\":\"bchaqd\",\"parameters\":{\"qct\":\"dataec\",\"ytxzvtznapxbanno\":\"dataxxdtddmflh\",\"nwvroevytlyokrr\":\"datavoxczytpr\"}},\"folderPath\":\"datauuxvnsasbcry\"}") .toObject(DataFlowStagingInfo.class); - Assertions.assertEquals("cpzgpxtiv", model.linkedService().referenceName()); + Assertions.assertEquals("bchaqd", model.linkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DataFlowStagingInfo model = new DataFlowStagingInfo() - .withLinkedService(new LinkedServiceReference().withReferenceName("cpzgpxtiv") - .withParameters(mapOf("n", "datanidibgqjxg", "kqmhhaowjr", "datahgovfgp"))) - .withFolderPath("datavuporqzdfuydzv"); + .withLinkedService( + new LinkedServiceReference() + .withReferenceName("bchaqd") + .withParameters(mapOf("qct", "dataec", "ytxzvtznapxbanno", "dataxxdtddmflh", "nwvroevytlyokrr", + "datavoxczytpr"))) + .withFolderPath("datauuxvnsasbcry"); model = BinaryData.fromObject(model).toObject(DataFlowStagingInfo.class); - Assertions.assertEquals("cpzgpxtiv", model.linkedService().referenceName()); + Assertions.assertEquals("bchaqd", model.linkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowTests.java index f0fa4359742cd..31c3c6a2b79e6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowTests.java @@ -14,19 +14,19 @@ public final class DataFlowTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataFlow model = BinaryData.fromString( - "{\"type\":\"DataFlow\",\"description\":\"kyoqufdv\",\"annotations\":[\"dataslzojh\",\"datactfnmdxotng\"],\"folder\":{\"name\":\"ugeyzihgrkyuiza\"}}") + "{\"type\":\"fhvfsl\",\"description\":\"ntjlr\",\"annotations\":[\"datakskyrioovzid\",\"dataxwaabzmifrygznmm\"],\"folder\":{\"name\":\"izkzobgo\"}}") .toObject(DataFlow.class); - Assertions.assertEquals("kyoqufdv", model.description()); - Assertions.assertEquals("ugeyzihgrkyuiza", model.folder().name()); + Assertions.assertEquals("ntjlr", model.description()); + Assertions.assertEquals("izkzobgo", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DataFlow model - = new DataFlow().withDescription("kyoqufdv").withAnnotations(Arrays.asList("dataslzojh", "datactfnmdxotng")) - .withFolder(new DataFlowFolder().withName("ugeyzihgrkyuiza")); + DataFlow model = new DataFlow().withDescription("ntjlr") + .withAnnotations(Arrays.asList("datakskyrioovzid", "dataxwaabzmifrygznmm")) + .withFolder(new DataFlowFolder().withName("izkzobgo")); model = BinaryData.fromObject(model).toObject(DataFlow.class); - Assertions.assertEquals("kyoqufdv", model.description()); - Assertions.assertEquals("ugeyzihgrkyuiza", model.folder().name()); + Assertions.assertEquals("ntjlr", model.description()); + Assertions.assertEquals("izkzobgo", model.folder().name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsCreateOrUpdateWithResponseMockTests.java index 547c75b7b5f65..5665ed4db803c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsCreateOrUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsCreateOrUpdateWithResponseMockTests.java @@ -6,59 +6,44 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.DataFlow; import com.azure.resourcemanager.datafactory.models.DataFlowFolder; import com.azure.resourcemanager.datafactory.models.DataFlowResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.Arrays; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DataFlowsCreateOrUpdateWithResponseMockTests { @Test public void testCreateOrUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"type\":\"DataFlow\",\"description\":\"lyzbgrg\",\"annotations\":[\"datavkasxs\",\"dataxwgzoyjp\",\"dataelv\",\"datamyboremswqznqovb\"],\"folder\":{\"name\":\"ayiivoixefn\"}},\"name\":\"fewqnznctnmkits\",\"type\":\"uopevqsab\",\"etag\":\"ounugxnz\",\"id\":\"eggalvo\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - DataFlowResource response = manager.dataFlows().define("ykommmygmit").withExistingFactory("bypxxe", "ju") - .withProperties(new DataFlow().withDescription("vxvdugnbdeg") - .withAnnotations(Arrays.asList("datahobtxu", "dataebberydeoeyef")) - .withFolder(new DataFlowFolder().withName("bhqieytu"))) - .withIfMatch("srhqzjzuonttfvj").create(); - - Assertions.assertEquals("eggalvo", response.id()); - Assertions.assertEquals("lyzbgrg", response.properties().description()); - Assertions.assertEquals("ayiivoixefn", response.properties().folder().name()); + = "{\"properties\":{\"type\":\"arhk\",\"description\":\"bdxoqqvy\",\"annotations\":[\"dataojmcnyjvmy\",\"dataqaiadspfesujp\"],\"folder\":{\"name\":\"wandrdfe\"}},\"name\":\"ryidxmnqefy\",\"type\":\"ucmpxn\",\"etag\":\"pgbokainm\",\"id\":\"eecmvpwauhlkf\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + DataFlowResource response = manager.dataFlows() + .define("kxwnjsbegqhfgt") + .withExistingFactory("xzpvibwvamjqxzj", "ljv") + .withProperties(new DataFlow().withDescription("nkrdqzfkzdrco") + .withAnnotations(Arrays.asList("datazxpapbatok", "datajzrwcmqkkeihlo", "databnytwczqpsvin")) + .withFolder(new DataFlowFolder().withName("dcy"))) + .withIfMatch("gdrzmupusqsxuf") + .create(); + + Assertions.assertEquals("eecmvpwauhlkf", response.id()); + Assertions.assertEquals("bdxoqqvy", response.properties().description()); + Assertions.assertEquals("wandrdfe", response.properties().folder().name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsDeleteWithResponseMockTests.java index 28f933103e6db..97666a5bcba42 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsDeleteWithResponseMockTests.java @@ -6,46 +6,28 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DataFlowsDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.dataFlows().deleteWithResponse("r", "gbbiuk", "kmthioae", com.azure.core.util.Context.NONE); + manager.dataFlows().deleteWithResponse("geu", "pbbydkzqbefd", "wmsrdqzrrl", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsGetWithResponseMockTests.java index 9aefb9f94f9b1..43887e7e768eb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsGetWithResponseMockTests.java @@ -6,54 +6,36 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.DataFlowResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DataFlowsGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"type\":\"DataFlow\",\"description\":\"wvdmwaam\",\"annotations\":[\"dataqsb\"],\"folder\":{\"name\":\"nckeqxtqaomihr\"}},\"name\":\"ksdpc\",\"type\":\"wficzzoxnlvi\",\"etag\":\"dsi\",\"id\":\"zaolzkoyn\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"type\":\"twuw\",\"description\":\"zjbjzxrr\",\"annotations\":[\"datadtphdu\"],\"folder\":{\"name\":\"x\"}},\"name\":\"agovb\",\"type\":\"uchvq\",\"etag\":\"npxdsdevhzktjmi\",\"id\":\"evthdtqthvthi\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); DataFlowResource response = manager.dataFlows() - .getWithResponse("lruxjqqozx", "nowvpdfypd", "syym", "uueeokvq", com.azure.core.util.Context.NONE) + .getWithResponse("drunykntstoen", "ixypyitfcjfgpu", "xvffbzeqsmaqd", "nb", com.azure.core.util.Context.NONE) .getValue(); - Assertions.assertEquals("zaolzkoyn", response.id()); - Assertions.assertEquals("wvdmwaam", response.properties().description()); - Assertions.assertEquals("nckeqxtqaomihr", response.properties().folder().name()); + Assertions.assertEquals("evthdtqthvthi", response.id()); + Assertions.assertEquals("zjbjzxrr", response.properties().description()); + Assertions.assertEquals("x", response.properties().folder().name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsListByFactoryMockTests.java index 65e720e02f5d2..7f34e07980eb0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsListByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsListByFactoryMockTests.java @@ -6,54 +6,36 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.DataFlowResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DataFlowsListByFactoryMockTests { @Test public void testListByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"type\":\"DataFlow\",\"description\":\"lkvmftrqa\",\"annotations\":[\"dataq\",\"datao\",\"datatqvnkofvz\",\"databddvqtruyzbrkzsa\"],\"folder\":{\"name\":\"nsepdwxflmk\"}},\"name\":\"fidr\",\"type\":\"nuththzejkusz\",\"etag\":\"aehtgzr\",\"id\":\"ctrvvjamxgq\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"properties\":{\"type\":\"zydak\",\"description\":\"fayhzfspnq\",\"annotations\":[\"datac\",\"datawzecmldjhydk\",\"dataywr\",\"datargjmcsbeozb\"],\"folder\":{\"name\":\"dqbuh\"}},\"name\":\"ghignjlfnpyxo\",\"type\":\"xmk\",\"etag\":\"fpj\",\"id\":\"lwmq\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PagedIterable response - = manager.dataFlows().listByFactory("h", "mlcenty", com.azure.core.util.Context.NONE); + = manager.dataFlows().listByFactory("lcfzrmdfehieblpc", "o", com.azure.core.util.Context.NONE); - Assertions.assertEquals("ctrvvjamxgq", response.iterator().next().id()); - Assertions.assertEquals("lkvmftrqa", response.iterator().next().properties().description()); - Assertions.assertEquals("nsepdwxflmk", response.iterator().next().properties().folder().name()); + Assertions.assertEquals("lwmq", response.iterator().next().id()); + Assertions.assertEquals("fayhzfspnq", response.iterator().next().properties().description()); + Assertions.assertEquals("dqbuh", response.iterator().next().properties().folder().name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTests.java index f2159cc40258d..fa77e72eb69a0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTests.java @@ -22,57 +22,76 @@ public final class DataLakeAnalyticsUsqlActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataLakeAnalyticsUsqlActivity model = BinaryData.fromString( - "{\"type\":\"DataLakeAnalyticsU-SQL\",\"typeProperties\":{\"scriptPath\":\"dataobz\",\"scriptLinkedService\":{\"referenceName\":\"sqpstxulnntj\",\"parameters\":{\"niac\":\"datan\",\"llk\":\"datattdyvifltvwebzf\",\"cerqhp\":\"datanwinqywlvxuxztj\"}},\"degreeOfParallelism\":\"datakxjlyjlkjhmug\",\"priority\":\"datayhpdstlsdgi\",\"parameters\":{\"glzxgwk\":\"dataeqcgunsoikev\",\"qxnkdqsyhmrk\":\"dataykazpd\",\"z\":\"datawkfgvhwkwzxj\",\"xduhydxahjudaz\":\"datastirrhbkzzqwikq\"},\"runtimeVersion\":\"datagsxolwofo\",\"compilationMode\":\"datayludflfxwlwh\"},\"linkedServiceName\":{\"referenceName\":\"ykfcccaujgacckjq\",\"parameters\":{\"xbvg\":\"dataxdbgm\",\"p\":\"datamblntdy\"}},\"policy\":{\"timeout\":\"dataigxefscsrw\",\"retry\":\"datauteusuxvliq\",\"retryIntervalInSeconds\":1459080325,\"secureInput\":false,\"secureOutput\":false,\"\":{\"gizvvtdrjockz\":\"datazzsbqnv\",\"ppjzmpxam\":\"datafnph\"}},\"name\":\"qdostvx\",\"description\":\"fnmnfndrbkko\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"qfze\",\"dependencyConditions\":[\"Completed\",\"Completed\",\"Failed\",\"Failed\"],\"\":{\"kvn\":\"datadr\",\"arvhzfynbxw\":\"datazumczlknfwslvs\"}}],\"userProperties\":[{\"name\":\"mvlkuvbesrawzxnw\",\"value\":\"datasjygi\"}],\"\":{\"slcnsxhpqey\":\"datafo\",\"chdjarfdfnqfvr\":\"datazydpvv\",\"n\":\"dataxlh\"}}") + "{\"type\":\"pyfhez\",\"typeProperties\":{\"scriptPath\":\"dataqmqqrzeo\",\"scriptLinkedService\":{\"referenceName\":\"o\",\"parameters\":{\"xiwmwrbruuw\":\"dataptmpcpiros\",\"kynfxxldmxms\":\"datah\"}},\"degreeOfParallelism\":\"datadvqgaamcm\",\"priority\":\"dataesyhpzrosuawdlsm\",\"parameters\":{\"pzfjbzosyttur\":\"datanksovvbt\",\"l\":\"datahkpdkwvwxrxmu\",\"ygwwxentudpvsnll\":\"datasagp\",\"pmmtlwrwsgyqwfp\":\"datajbb\"},\"runtimeVersion\":\"datafkyttxgtcov\",\"compilationMode\":\"datacp\"},\"linkedServiceName\":{\"referenceName\":\"ascxmnbenanhzx\",\"parameters\":{\"kfwokzizlaha\":\"datafwcqcxyju\",\"ezmp\":\"datadngh\",\"hnwcq\":\"dataiuzxphhwnvt\"}},\"policy\":{\"timeout\":\"datababmddgqbkngtdaw\",\"retry\":\"dataytengfkr\",\"retryIntervalInSeconds\":21067628,\"secureInput\":true,\"secureOutput\":true,\"\":{\"ggxesxxpnckbhw\":\"datavbwe\",\"exgykriwpxc\":\"datarivjso\"}},\"name\":\"uhqctecmu\",\"description\":\"gcoygznmrscn\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"gobaqnhlktdzf\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\",\"Completed\"],\"\":{\"rirxlvus\":\"datadvcoghptjvsk\",\"zgrbub\":\"datahd\",\"dxxzoywk\":\"dataorkcplpuddn\",\"s\":\"datalzobhhqefzamgic\"}},{\"activity\":\"xunzqpfgrqmq\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"xggspydmuldlz\":\"datanthceoujfazt\",\"eggtyifmfi\":\"datahcuvxygloxfnzjpg\",\"dbayxdr\":\"datax\",\"nvrbhqxewdcdnqh\":\"dataunooouq\"}},{\"activity\":\"nehpuhlj\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Skipped\",\"Completed\"],\"\":{\"qczigdwgqko\":\"datapjabs\",\"scrpabaw\":\"databghp\",\"x\":\"datavawmrmwrzmfnjs\",\"vragr\":\"datanst\"}},{\"activity\":\"imunmgtkyzupjn\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Succeeded\",\"Failed\"],\"\":{\"ydaxgwgbpblsasyl\":\"datayxiezm\",\"tkmuttaeobrx\":\"dataxfuzfybpwzgwh\"}}],\"userProperties\":[{\"name\":\"ft\",\"value\":\"datascbgarfbx\"},{\"name\":\"alpig\",\"value\":\"datanshlu\"}],\"\":{\"ncats\":\"datamp\"}}") .toObject(DataLakeAnalyticsUsqlActivity.class); - Assertions.assertEquals("qdostvx", model.name()); - Assertions.assertEquals("fnmnfndrbkko", model.description()); + Assertions.assertEquals("uhqctecmu", model.name()); + Assertions.assertEquals("gcoygznmrscn", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("qfze", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("mvlkuvbesrawzxnw", model.userProperties().get(0).name()); - Assertions.assertEquals("ykfcccaujgacckjq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1459080325, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("sqpstxulnntj", model.scriptLinkedService().referenceName()); + Assertions.assertEquals("gobaqnhlktdzf", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("ft", model.userProperties().get(0).name()); + Assertions.assertEquals("ascxmnbenanhzx", model.linkedServiceName().referenceName()); + Assertions.assertEquals(21067628, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals(true, model.policy().secureOutput()); + Assertions.assertEquals("o", model.scriptLinkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DataLakeAnalyticsUsqlActivity model = new DataLakeAnalyticsUsqlActivity().withName("qdostvx") - .withDescription("fnmnfndrbkko").withState(ActivityState.INACTIVE) + DataLakeAnalyticsUsqlActivity model = new DataLakeAnalyticsUsqlActivity().withName("uhqctecmu") + .withDescription("gcoygznmrscn") + .withState(ActivityState.INACTIVE) .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("qfze") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.COMPLETED, - DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("mvlkuvbesrawzxnw").withValue("datasjygi"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ykfcccaujgacckjq") - .withParameters(mapOf("xbvg", "dataxdbgm", "p", "datamblntdy"))) - .withPolicy(new ActivityPolicy() - .withTimeout("dataigxefscsrw").withRetry("datauteusuxvliq").withRetryIntervalInSeconds(1459080325) - .withSecureInput(false).withSecureOutput(false).withAdditionalProperties(mapOf())) - .withScriptPath("dataobz") - .withScriptLinkedService(new LinkedServiceReference().withReferenceName("sqpstxulnntj") - .withParameters(mapOf("niac", "datan", "llk", "datattdyvifltvwebzf", "cerqhp", "datanwinqywlvxuxztj"))) - .withDegreeOfParallelism("datakxjlyjlkjhmug").withPriority("datayhpdstlsdgi") - .withParameters(mapOf("glzxgwk", "dataeqcgunsoikev", "qxnkdqsyhmrk", "dataykazpd", "z", "datawkfgvhwkwzxj", - "xduhydxahjudaz", "datastirrhbkzzqwikq")) - .withRuntimeVersion("datagsxolwofo").withCompilationMode("datayludflfxwlwh"); + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("gobaqnhlktdzf") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.COMPLETED, DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("xunzqpfgrqmq") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("nehpuhlj") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, + DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("imunmgtkyzupjn") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, + DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("ft").withValue("datascbgarfbx"), + new UserProperty().withName("alpig").withValue("datanshlu"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ascxmnbenanhzx") + .withParameters(mapOf("kfwokzizlaha", "datafwcqcxyju", "ezmp", "datadngh", "hnwcq", "dataiuzxphhwnvt"))) + .withPolicy(new ActivityPolicy().withTimeout("datababmddgqbkngtdaw") + .withRetry("dataytengfkr") + .withRetryIntervalInSeconds(21067628) + .withSecureInput(true) + .withSecureOutput(true) + .withAdditionalProperties(mapOf())) + .withScriptPath("dataqmqqrzeo") + .withScriptLinkedService(new LinkedServiceReference().withReferenceName("o") + .withParameters(mapOf("xiwmwrbruuw", "dataptmpcpiros", "kynfxxldmxms", "datah"))) + .withDegreeOfParallelism("datadvqgaamcm") + .withPriority("dataesyhpzrosuawdlsm") + .withParameters(mapOf("pzfjbzosyttur", "datanksovvbt", "l", "datahkpdkwvwxrxmu", "ygwwxentudpvsnll", + "datasagp", "pmmtlwrwsgyqwfp", "datajbb")) + .withRuntimeVersion("datafkyttxgtcov") + .withCompilationMode("datacp"); model = BinaryData.fromObject(model).toObject(DataLakeAnalyticsUsqlActivity.class); - Assertions.assertEquals("qdostvx", model.name()); - Assertions.assertEquals("fnmnfndrbkko", model.description()); + Assertions.assertEquals("uhqctecmu", model.name()); + Assertions.assertEquals("gcoygznmrscn", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("qfze", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("mvlkuvbesrawzxnw", model.userProperties().get(0).name()); - Assertions.assertEquals("ykfcccaujgacckjq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1459080325, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("sqpstxulnntj", model.scriptLinkedService().referenceName()); + Assertions.assertEquals("gobaqnhlktdzf", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("ft", model.userProperties().get(0).name()); + Assertions.assertEquals("ascxmnbenanhzx", model.linkedServiceName().referenceName()); + Assertions.assertEquals(21067628, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals(true, model.policy().secureOutput()); + Assertions.assertEquals("o", model.scriptLinkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTypePropertiesTests.java index 18ff03965117e..334a63b2bf88c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTypePropertiesTests.java @@ -15,23 +15,24 @@ public final class DataLakeAnalyticsUsqlActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataLakeAnalyticsUsqlActivityTypeProperties model = BinaryData.fromString( - "{\"scriptPath\":\"databvbdqmjcedfpub\",\"scriptLinkedService\":{\"referenceName\":\"xoohyesmlscvhra\",\"parameters\":{\"xhkdyhdk\":\"databorj\",\"qfde\":\"datafqzud\",\"tplpgf\":\"datag\",\"xlibszcvceg\":\"datazugkfabvek\"}},\"degreeOfParallelism\":\"datazh\",\"priority\":\"datavv\",\"parameters\":{\"vxnumvorosqessp\":\"datadpclazaoytkub\",\"rqspwsiitzbyu\":\"datautk\",\"ovpsflmwduis\":\"datalumqmo\",\"e\":\"datavlunyqe\"},\"runtimeVersion\":\"datarzth\",\"compilationMode\":\"datanrlesghhcfqzmjm\"}") + "{\"scriptPath\":\"dataib\",\"scriptLinkedService\":{\"referenceName\":\"xg\",\"parameters\":{\"gtxpbvmc\":\"datamxhudpjn\"}},\"degreeOfParallelism\":\"datasahpswspyifg\",\"priority\":\"datakaihwdybjgyxbwhu\",\"parameters\":{\"hvcyyvpobcxnrwaz\":\"datahmckzbuadoxlle\"},\"runtimeVersion\":\"datayhaajhllnk\",\"compilationMode\":\"datauwoxnbkcweea\"}") .toObject(DataLakeAnalyticsUsqlActivityTypeProperties.class); - Assertions.assertEquals("xoohyesmlscvhra", model.scriptLinkedService().referenceName()); + Assertions.assertEquals("xg", model.scriptLinkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DataLakeAnalyticsUsqlActivityTypeProperties model = new DataLakeAnalyticsUsqlActivityTypeProperties() - .withScriptPath("databvbdqmjcedfpub") - .withScriptLinkedService(new LinkedServiceReference().withReferenceName("xoohyesmlscvhra").withParameters( - mapOf("xhkdyhdk", "databorj", "qfde", "datafqzud", "tplpgf", "datag", "xlibszcvceg", "datazugkfabvek"))) - .withDegreeOfParallelism("datazh").withPriority("datavv") - .withParameters(mapOf("vxnumvorosqessp", "datadpclazaoytkub", "rqspwsiitzbyu", "datautk", "ovpsflmwduis", - "datalumqmo", "e", "datavlunyqe")) - .withRuntimeVersion("datarzth").withCompilationMode("datanrlesghhcfqzmjm"); + .withScriptPath("dataib") + .withScriptLinkedService( + new LinkedServiceReference().withReferenceName("xg").withParameters(mapOf("gtxpbvmc", "datamxhudpjn"))) + .withDegreeOfParallelism("datasahpswspyifg") + .withPriority("datakaihwdybjgyxbwhu") + .withParameters(mapOf("hvcyyvpobcxnrwaz", "datahmckzbuadoxlle")) + .withRuntimeVersion("datayhaajhllnk") + .withCompilationMode("datauwoxnbkcweea"); model = BinaryData.fromObject(model).toObject(DataLakeAnalyticsUsqlActivityTypeProperties.class); - Assertions.assertEquals("xoohyesmlscvhra", model.scriptLinkedService().referenceName()); + Assertions.assertEquals("xg", model.scriptLinkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataMapperMappingTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataMapperMappingTests.java index 1c8e8a7b831d8..dc1aae6fedc90 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataMapperMappingTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataMapperMappingTests.java @@ -19,82 +19,143 @@ public final class DataMapperMappingTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DataMapperMapping model = BinaryData.fromString( - "{\"targetEntityName\":\"mhklbnl\",\"sourceEntityName\":\"vcb\",\"sourceConnectionReference\":{\"connectionName\":\"zyqu\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{\"name\":\"rp\",\"type\":\"Aggregate\",\"functionName\":\"yuuatvlmbjwcolbm\",\"expression\":\"b\",\"attributeReference\":{\"name\":\"pcpahprzrvxhmtf\",\"entity\":\"cnxzcmj\",\"entityConnectionReference\":{\"connectionName\":\"xnoqrxtdisn\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"mydidwhepfw\",\"entity\":\"jfdoesxxhm\",\"entityConnectionReference\":{}}]},{\"name\":\"bckyoikxk\",\"type\":\"Direct\",\"functionName\":\"gknjzr\",\"expression\":\"t\",\"attributeReference\":{\"name\":\"lvukaobrlb\",\"entity\":\"snbagnchjhg\",\"entityConnectionReference\":{\"connectionName\":\"owa\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"jymxcgqt\",\"entity\":\"drclssoljome\",\"entityConnectionReference\":{}},{\"name\":\"ycnlbvgjcodk\",\"entity\":\"ji\",\"entityConnectionReference\":{}}]}]},\"sourceDenormalizeInfo\":\"datas\"}") + "{\"targetEntityName\":\"tkbtnqlrngl\",\"sourceEntityName\":\"biipsnawwlqkznx\",\"sourceConnectionReference\":{\"connectionName\":\"lxri\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{\"name\":\"qqoajxeiyglesrw\",\"type\":\"Aggregate\",\"functionName\":\"hdctrceqn\",\"expression\":\"rupobehd\",\"attributeReference\":{\"name\":\"zacvu\",\"entity\":\"pjpbibnzpp\",\"entityConnectionReference\":{\"connectionName\":\"ifexleqirccjcly\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"pjlvczuoda\",\"entity\":\"u\",\"entityConnectionReference\":{}},{\"name\":\"tepdjxqes\",\"entity\":\"ynu\",\"entityConnectionReference\":{}},{\"name\":\"pckaewse\",\"entity\":\"eskwxeg\",\"entityConnectionReference\":{}},{\"name\":\"rgfnz\",\"entity\":\"tmjtsghp\",\"entityConnectionReference\":{}}]},{\"name\":\"cp\",\"type\":\"Aggregate\",\"functionName\":\"zeq\",\"expression\":\"dldtzmpyp\",\"attributeReference\":{\"name\":\"pc\",\"entity\":\"hnuqndaizu\",\"entityConnectionReference\":{\"connectionName\":\"huytuszx\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"egw\",\"entity\":\"ukvzwydwttha\",\"entityConnectionReference\":{}},{\"name\":\"ksk\",\"entity\":\"vb\",\"entityConnectionReference\":{}},{\"name\":\"ajq\",\"entity\":\"kpeexpgeumilh\",\"entityConnectionReference\":{}},{\"name\":\"trdexyionofnin\",\"entity\":\"bzsx\",\"entityConnectionReference\":{}}]},{\"name\":\"qrs\",\"type\":\"Aggregate\",\"functionName\":\"bprt\",\"expression\":\"av\",\"attributeReference\":{\"name\":\"cyksivmfo\",\"entity\":\"rt\",\"entityConnectionReference\":{\"connectionName\":\"mkrftsjcwjjxsgm\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"dxkecifh\",\"entity\":\"jxwkloozrvtx\",\"entityConnectionReference\":{}},{\"name\":\"uf\",\"entity\":\"lcpx\",\"entityConnectionReference\":{}},{\"name\":\"ryeyngjgv\",\"entity\":\"uvp\",\"entityConnectionReference\":{}},{\"name\":\"lpmcrdc\",\"entity\":\"ljtiahxmfqryarv\",\"entityConnectionReference\":{}}]}]},\"sourceDenormalizeInfo\":\"dataqbglcjkaysp\"}") .toObject(DataMapperMapping.class); - Assertions.assertEquals("mhklbnl", model.targetEntityName()); - Assertions.assertEquals("vcb", model.sourceEntityName()); - Assertions.assertEquals("zyqu", model.sourceConnectionReference().connectionName()); + Assertions.assertEquals("tkbtnqlrngl", model.targetEntityName()); + Assertions.assertEquals("biipsnawwlqkznx", model.sourceEntityName()); + Assertions.assertEquals("lxri", model.sourceConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.sourceConnectionReference().type()); - Assertions.assertEquals("rp", model.attributeMappingInfo().attributeMappings().get(0).name()); + Assertions.assertEquals("qqoajxeiyglesrw", model.attributeMappingInfo().attributeMappings().get(0).name()); Assertions.assertEquals(MappingType.AGGREGATE, model.attributeMappingInfo().attributeMappings().get(0).type()); - Assertions.assertEquals("yuuatvlmbjwcolbm", - model.attributeMappingInfo().attributeMappings().get(0).functionName()); - Assertions.assertEquals("b", model.attributeMappingInfo().attributeMappings().get(0).expression()); - Assertions.assertEquals("pcpahprzrvxhmtf", + Assertions.assertEquals("hdctrceqn", model.attributeMappingInfo().attributeMappings().get(0).functionName()); + Assertions.assertEquals("rupobehd", model.attributeMappingInfo().attributeMappings().get(0).expression()); + Assertions.assertEquals("zacvu", model.attributeMappingInfo().attributeMappings().get(0).attributeReference().name()); - Assertions.assertEquals("cnxzcmj", + Assertions.assertEquals("pjpbibnzpp", model.attributeMappingInfo().attributeMappings().get(0).attributeReference().entity()); - Assertions.assertEquals("xnoqrxtdisn", model.attributeMappingInfo().attributeMappings().get(0) - .attributeReference().entityConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.attributeMappingInfo().attributeMappings() - .get(0).attributeReference().entityConnectionReference().type()); - Assertions.assertEquals("mydidwhepfw", + Assertions.assertEquals("ifexleqirccjcly", + model.attributeMappingInfo() + .attributeMappings() + .get(0) + .attributeReference() + .entityConnectionReference() + .connectionName()); + Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, + model.attributeMappingInfo() + .attributeMappings() + .get(0) + .attributeReference() + .entityConnectionReference() + .type()); + Assertions.assertEquals("pjlvczuoda", model.attributeMappingInfo().attributeMappings().get(0).attributeReferences().get(0).name()); - Assertions.assertEquals("jfdoesxxhm", + Assertions.assertEquals("u", model.attributeMappingInfo().attributeMappings().get(0).attributeReferences().get(0).entity()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DataMapperMapping model - = new DataMapperMapping().withTargetEntityName("mhklbnl").withSourceEntityName("vcb") - .withSourceConnectionReference(new MapperConnectionReference().withConnectionName("zyqu") - .withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo( - new MapperAttributeMappings() - .withAttributeMappings(Arrays.asList( - new MapperAttributeMapping().withName("rp").withType(MappingType.AGGREGATE) - .withFunctionName("yuuatvlmbjwcolbm").withExpression("b") - .withAttributeReference(new MapperAttributeReference().withName("pcpahprzrvxhmtf") - .withEntity("cnxzcmj") - .withEntityConnectionReference(new MapperConnectionReference() - .withConnectionName("xnoqrxtdisn").withType(ConnectionType.LINKEDSERVICETYPE))) - .withAttributeReferences(Arrays.asList( - new MapperAttributeReference().withName("mydidwhepfw").withEntity("jfdoesxxhm") - .withEntityConnectionReference(new MapperConnectionReference()))), - new MapperAttributeMapping().withName("bckyoikxk").withType(MappingType.DIRECT) - .withFunctionName("gknjzr").withExpression("t") - .withAttributeReference( - new MapperAttributeReference().withName("lvukaobrlb").withEntity("snbagnchjhg") - .withEntityConnectionReference(new MapperConnectionReference() - .withConnectionName("owa").withType(ConnectionType.LINKEDSERVICETYPE))) - .withAttributeReferences(Arrays.asList( - new MapperAttributeReference().withName("jymxcgqt").withEntity("drclssoljome") - .withEntityConnectionReference(new MapperConnectionReference()), - new MapperAttributeReference().withName("ycnlbvgjcodk").withEntity("ji") - .withEntityConnectionReference(new MapperConnectionReference())))))) - .withSourceDenormalizeInfo("datas"); + DataMapperMapping model = new DataMapperMapping().withTargetEntityName("tkbtnqlrngl") + .withSourceEntityName("biipsnawwlqkznx") + .withSourceConnectionReference( + new MapperConnectionReference().withConnectionName("lxri").withType(ConnectionType.LINKEDSERVICETYPE)) + .withAttributeMappingInfo(new MapperAttributeMappings().withAttributeMappings(Arrays.asList( + new MapperAttributeMapping().withName("qqoajxeiyglesrw") + .withType(MappingType.AGGREGATE) + .withFunctionName("hdctrceqn") + .withExpression("rupobehd") + .withAttributeReference(new MapperAttributeReference().withName("zacvu") + .withEntity("pjpbibnzpp") + .withEntityConnectionReference( + new MapperConnectionReference().withConnectionName("ifexleqirccjcly") + .withType(ConnectionType.LINKEDSERVICETYPE))) + .withAttributeReferences(Arrays.asList( + new MapperAttributeReference().withName("pjlvczuoda") + .withEntity("u") + .withEntityConnectionReference(new MapperConnectionReference()), + new MapperAttributeReference().withName("tepdjxqes") + .withEntity("ynu") + .withEntityConnectionReference(new MapperConnectionReference()), + new MapperAttributeReference().withName("pckaewse") + .withEntity("eskwxeg") + .withEntityConnectionReference(new MapperConnectionReference()), + new MapperAttributeReference().withName("rgfnz") + .withEntity("tmjtsghp") + .withEntityConnectionReference(new MapperConnectionReference()))), + new MapperAttributeMapping().withName("cp") + .withType(MappingType.AGGREGATE) + .withFunctionName("zeq") + .withExpression("dldtzmpyp") + .withAttributeReference(new MapperAttributeReference().withName("pc") + .withEntity("hnuqndaizu") + .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("huytuszx") + .withType(ConnectionType.LINKEDSERVICETYPE))) + .withAttributeReferences(Arrays.asList( + new MapperAttributeReference().withName("egw") + .withEntity("ukvzwydwttha") + .withEntityConnectionReference(new MapperConnectionReference()), + new MapperAttributeReference().withName("ksk") + .withEntity("vb") + .withEntityConnectionReference(new MapperConnectionReference()), + new MapperAttributeReference().withName("ajq") + .withEntity("kpeexpgeumilh") + .withEntityConnectionReference(new MapperConnectionReference()), + new MapperAttributeReference().withName("trdexyionofnin") + .withEntity("bzsx") + .withEntityConnectionReference(new MapperConnectionReference()))), + new MapperAttributeMapping().withName("qrs") + .withType(MappingType.AGGREGATE) + .withFunctionName("bprt") + .withExpression("av") + .withAttributeReference(new MapperAttributeReference().withName("cyksivmfo") + .withEntity("rt") + .withEntityConnectionReference( + new MapperConnectionReference().withConnectionName("mkrftsjcwjjxsgm") + .withType(ConnectionType.LINKEDSERVICETYPE))) + .withAttributeReferences(Arrays.asList( + new MapperAttributeReference().withName("dxkecifh") + .withEntity("jxwkloozrvtx") + .withEntityConnectionReference(new MapperConnectionReference()), + new MapperAttributeReference().withName("uf") + .withEntity("lcpx") + .withEntityConnectionReference(new MapperConnectionReference()), + new MapperAttributeReference().withName("ryeyngjgv") + .withEntity("uvp") + .withEntityConnectionReference(new MapperConnectionReference()), + new MapperAttributeReference().withName("lpmcrdc") + .withEntity("ljtiahxmfqryarv") + .withEntityConnectionReference(new MapperConnectionReference())))))) + .withSourceDenormalizeInfo("dataqbglcjkaysp"); model = BinaryData.fromObject(model).toObject(DataMapperMapping.class); - Assertions.assertEquals("mhklbnl", model.targetEntityName()); - Assertions.assertEquals("vcb", model.sourceEntityName()); - Assertions.assertEquals("zyqu", model.sourceConnectionReference().connectionName()); + Assertions.assertEquals("tkbtnqlrngl", model.targetEntityName()); + Assertions.assertEquals("biipsnawwlqkznx", model.sourceEntityName()); + Assertions.assertEquals("lxri", model.sourceConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.sourceConnectionReference().type()); - Assertions.assertEquals("rp", model.attributeMappingInfo().attributeMappings().get(0).name()); + Assertions.assertEquals("qqoajxeiyglesrw", model.attributeMappingInfo().attributeMappings().get(0).name()); Assertions.assertEquals(MappingType.AGGREGATE, model.attributeMappingInfo().attributeMappings().get(0).type()); - Assertions.assertEquals("yuuatvlmbjwcolbm", - model.attributeMappingInfo().attributeMappings().get(0).functionName()); - Assertions.assertEquals("b", model.attributeMappingInfo().attributeMappings().get(0).expression()); - Assertions.assertEquals("pcpahprzrvxhmtf", + Assertions.assertEquals("hdctrceqn", model.attributeMappingInfo().attributeMappings().get(0).functionName()); + Assertions.assertEquals("rupobehd", model.attributeMappingInfo().attributeMappings().get(0).expression()); + Assertions.assertEquals("zacvu", model.attributeMappingInfo().attributeMappings().get(0).attributeReference().name()); - Assertions.assertEquals("cnxzcmj", + Assertions.assertEquals("pjpbibnzpp", model.attributeMappingInfo().attributeMappings().get(0).attributeReference().entity()); - Assertions.assertEquals("xnoqrxtdisn", model.attributeMappingInfo().attributeMappings().get(0) - .attributeReference().entityConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.attributeMappingInfo().attributeMappings() - .get(0).attributeReference().entityConnectionReference().type()); - Assertions.assertEquals("mydidwhepfw", + Assertions.assertEquals("ifexleqirccjcly", + model.attributeMappingInfo() + .attributeMappings() + .get(0) + .attributeReference() + .entityConnectionReference() + .connectionName()); + Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, + model.attributeMappingInfo() + .attributeMappings() + .get(0) + .attributeReference() + .entityConnectionReference() + .type()); + Assertions.assertEquals("pjlvczuoda", model.attributeMappingInfo().attributeMappings().get(0).attributeReferences().get(0).name()); - Assertions.assertEquals("jfdoesxxhm", + Assertions.assertEquals("u", model.attributeMappingInfo().attributeMappings().get(0).attributeReferences().get(0).entity()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTests.java index 31847d1986a0a..c08bceb019d30 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTests.java @@ -22,63 +22,57 @@ public final class DatabricksNotebookActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DatabricksNotebookActivity model = BinaryData.fromString( - "{\"type\":\"DatabricksNotebook\",\"typeProperties\":{\"notebookPath\":\"datafvmfyxle\",\"baseParameters\":{\"mqzndlgqtuqjczco\":\"datakiaybdivxvxwd\",\"fc\":\"datactcwtxa\",\"rzsninkhbm\":\"dataqhmsdod\",\"yt\":\"datalfo\"},\"libraries\":[{\"psepkr\":\"datazxm\",\"mndbbpjdgnjmelxk\":\"datagerqzxkpxrfulqh\",\"ogfaiy\":\"datamfmgboyliopb\"}]},\"linkedServiceName\":{\"referenceName\":\"pfebsu\",\"parameters\":{\"mrickuh\":\"datasrxnneqxsdu\",\"os\":\"databrvhrs\",\"mv\":\"datazvclzutvqkoi\"}},\"policy\":{\"timeout\":\"datakqwjlohkaffynyw\",\"retry\":\"dataqbyty\",\"retryIntervalInSeconds\":1675626334,\"secureInput\":false,\"secureOutput\":false,\"\":{\"vxegiufjnjgupjjp\":\"datapaxka\"}},\"name\":\"ba\",\"description\":\"ftiwbdm\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"xxgocpzqrbtyza\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Succeeded\"],\"\":{\"wxoog\":\"datauafxpjqz\",\"tvp\":\"dataxgnplz\",\"fvvi\":\"dataigtnjye\",\"lyyvebpykz\":\"dataxoitnqmiwlrijex\"}},{\"activity\":\"rqusb\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Failed\"],\"\":{\"hkyluqxndmtasx\":\"datanxepapmvvkx\",\"lofkvshozjkwjwv\":\"datanbttefbbr\"}},{\"activity\":\"oh\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Skipped\",\"Skipped\"],\"\":{\"i\":\"datastyacbekcxe\",\"kpiedcrtvdcbzpyn\":\"datavbzyhexlh\"}},{\"activity\":\"dtsibtdm\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\"],\"\":{\"vjbpyvoswgkbz\":\"dataawehxshamzfxoc\",\"uiags\":\"datameftgvfu\",\"deykv\":\"datavzghnq\",\"gqdcaimsrryt\":\"datafhuihotz\"}}],\"userProperties\":[{\"name\":\"hbpcrdy\",\"value\":\"dataunrajtbum\"}],\"\":{\"vyutcvumvgttjvc\":\"datarsn\",\"jpgxuxkc\":\"datatltt\",\"biaoaqww\":\"datanxkdqqo\"}}") + "{\"type\":\"tdek\",\"typeProperties\":{\"notebookPath\":\"datankgm\",\"baseParameters\":{\"gwsrr\":\"datayfyvodctpczzqusf\",\"lyspk\":\"datagij\",\"khgqsjecccfyc\":\"dataswyaejffvf\",\"r\":\"datawcuhqfxferfza\"},\"libraries\":[{\"obstiob\":\"datayphc\",\"vubszjyttgkps\":\"datar\"}]},\"linkedServiceName\":{\"referenceName\":\"irftlomec\",\"parameters\":{\"efn\":\"datagbgpx\",\"iaqafalbk\":\"dataefavbsbhd\",\"vdhvdv\":\"datamod\"}},\"policy\":{\"timeout\":\"datarkvxmeihrziwn\",\"retry\":\"datapojmgkeoqrxhdsu\",\"retryIntervalInSeconds\":2125623505,\"secureInput\":false,\"secureOutput\":false,\"\":{\"hiegkpukv\":\"dataeikbvax\",\"io\":\"datalcvlbqhtiijli\"}},\"name\":\"wzsmya\",\"description\":\"hjsmkhk\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"siizadmbn\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Skipped\",\"Succeeded\"],\"\":{\"vgzjfptprfq\":\"datahkhzwfnsduwtt\",\"gyo\":\"datat\"}}],\"userProperties\":[{\"name\":\"vtfkdz\",\"value\":\"datatkxiyjqbe\"},{\"name\":\"vxebdhpizk\",\"value\":\"dataqkylmfy\"},{\"name\":\"iodcgwbkfcajtxzd\",\"value\":\"dataqthqgngrfzaex\"},{\"name\":\"wftqoifvjfaqa\",\"value\":\"datameskd\"}],\"\":{\"uco\":\"databbdajcbrlnx\"}}") .toObject(DatabricksNotebookActivity.class); - Assertions.assertEquals("ba", model.name()); - Assertions.assertEquals("ftiwbdm", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("xxgocpzqrbtyza", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("hbpcrdy", model.userProperties().get(0).name()); - Assertions.assertEquals("pfebsu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1675626334, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("wzsmya", model.name()); + Assertions.assertEquals("hjsmkhk", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("siizadmbn", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("vtfkdz", model.userProperties().get(0).name()); + Assertions.assertEquals("irftlomec", model.linkedServiceName().referenceName()); + Assertions.assertEquals(2125623505, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DatabricksNotebookActivity model = new DatabricksNotebookActivity().withName("ba").withDescription("ftiwbdm") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("xxgocpzqrbtyza") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("rqusb") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("oh") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, - DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("dtsibtdm") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("hbpcrdy").withValue("dataunrajtbum"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("pfebsu") - .withParameters(mapOf("mrickuh", "datasrxnneqxsdu", "os", "databrvhrs", "mv", "datazvclzutvqkoi"))) - .withPolicy(new ActivityPolicy() - .withTimeout("datakqwjlohkaffynyw").withRetry("dataqbyty").withRetryIntervalInSeconds(1675626334) - .withSecureInput(false).withSecureOutput(false).withAdditionalProperties(mapOf())) - .withNotebookPath("datafvmfyxle") - .withBaseParameters(mapOf("mqzndlgqtuqjczco", "datakiaybdivxvxwd", "fc", "datactcwtxa", "rzsninkhbm", - "dataqhmsdod", "yt", "datalfo")) - .withLibraries(Arrays.asList( - mapOf("psepkr", "datazxm", "mndbbpjdgnjmelxk", "datagerqzxkpxrfulqh", "ogfaiy", "datamfmgboyliopb"))); + DatabricksNotebookActivity model = new DatabricksNotebookActivity().withName("wzsmya") + .withDescription("hjsmkhk") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("siizadmbn") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, + DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("vtfkdz").withValue("datatkxiyjqbe"), + new UserProperty().withName("vxebdhpizk").withValue("dataqkylmfy"), + new UserProperty().withName("iodcgwbkfcajtxzd").withValue("dataqthqgngrfzaex"), + new UserProperty().withName("wftqoifvjfaqa").withValue("datameskd"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("irftlomec") + .withParameters(mapOf("efn", "datagbgpx", "iaqafalbk", "dataefavbsbhd", "vdhvdv", "datamod"))) + .withPolicy(new ActivityPolicy().withTimeout("datarkvxmeihrziwn") + .withRetry("datapojmgkeoqrxhdsu") + .withRetryIntervalInSeconds(2125623505) + .withSecureInput(false) + .withSecureOutput(false) + .withAdditionalProperties(mapOf())) + .withNotebookPath("datankgm") + .withBaseParameters(mapOf("gwsrr", "datayfyvodctpczzqusf", "lyspk", "datagij", "khgqsjecccfyc", + "dataswyaejffvf", "r", "datawcuhqfxferfza")) + .withLibraries(Arrays.asList(mapOf("obstiob", "datayphc", "vubszjyttgkps", "datar"))); model = BinaryData.fromObject(model).toObject(DatabricksNotebookActivity.class); - Assertions.assertEquals("ba", model.name()); - Assertions.assertEquals("ftiwbdm", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("xxgocpzqrbtyza", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("hbpcrdy", model.userProperties().get(0).name()); - Assertions.assertEquals("pfebsu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1675626334, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("wzsmya", model.name()); + Assertions.assertEquals("hjsmkhk", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("siizadmbn", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("vtfkdz", model.userProperties().get(0).name()); + Assertions.assertEquals("irftlomec", model.linkedServiceName().referenceName()); + Assertions.assertEquals(2125623505, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTypePropertiesTests.java index c884ebac1844c..01b413657d9a7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTypePropertiesTests.java @@ -14,20 +14,20 @@ public final class DatabricksNotebookActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DatabricksNotebookActivityTypeProperties model = BinaryData.fromString( - "{\"notebookPath\":\"dataixnuffraursqtj\",\"baseParameters\":{\"akkgqfkigukfximw\":\"databhrdpnzqqtipkrea\",\"eriqendtyccn\":\"datanwaymrlvhl\",\"bizjbwufjogswf\":\"datahszgaub\"},\"libraries\":[{\"etaydhfgxyd\":\"databpypwrvnv\",\"hjsraumawfzyvxk\":\"databsx\",\"ofpsr\":\"datat\",\"ijbolksehtyx\":\"dataoujkcpyerfsngt\"},{\"rzdqqo\":\"datasurfnktxht\",\"cttjibognhuqdkq\":\"datadralt\",\"fppwobhkqgb\":\"dataffcvahknv\"}]}") + "{\"notebookPath\":\"datazjryppv\",\"baseParameters\":{\"bftafrbuvw\":\"datalcczgflogzstcf\",\"dxmmtyumejp\":\"datagwwrclxhveso\",\"b\":\"dataxuiod\",\"dvceuyw\":\"dataaujhox\"},\"libraries\":[{\"jbjdyoccnlvyhiet\":\"datazlcvokvo\"},{\"ihhgpek\":\"datannlsqym\",\"sani\":\"dataulkkhyfiuxdadcdr\",\"ewepoantsr\":\"datacfrtijzsumgzebqb\",\"unedsozjfigi\":\"datapdeewjgjdqla\"},{\"tjjqz\":\"dataswxcvwh\",\"srisfccf\":\"dataixsvoob\",\"mihifrkyvut\":\"datau\"}]}") .toObject(DatabricksNotebookActivityTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DatabricksNotebookActivityTypeProperties model - = new DatabricksNotebookActivityTypeProperties().withNotebookPath("dataixnuffraursqtj") - .withBaseParameters(mapOf("akkgqfkigukfximw", "databhrdpnzqqtipkrea", "eriqendtyccn", "datanwaymrlvhl", - "bizjbwufjogswf", "datahszgaub")) - .withLibraries(Arrays.asList( - mapOf("etaydhfgxyd", "databpypwrvnv", "hjsraumawfzyvxk", "databsx", "ofpsr", "datat", - "ijbolksehtyx", "dataoujkcpyerfsngt"), - mapOf("rzdqqo", "datasurfnktxht", "cttjibognhuqdkq", "datadralt", "fppwobhkqgb", "dataffcvahknv"))); + = new DatabricksNotebookActivityTypeProperties().withNotebookPath("datazjryppv") + .withBaseParameters(mapOf("bftafrbuvw", "datalcczgflogzstcf", "dxmmtyumejp", "datagwwrclxhveso", "b", + "dataxuiod", "dvceuyw", "dataaujhox")) + .withLibraries(Arrays.asList(mapOf("jbjdyoccnlvyhiet", "datazlcvokvo"), + mapOf("ihhgpek", "datannlsqym", "sani", "dataulkkhyfiuxdadcdr", "ewepoantsr", + "datacfrtijzsumgzebqb", "unedsozjfigi", "datapdeewjgjdqla"), + mapOf("tjjqz", "dataswxcvwh", "srisfccf", "dataixsvoob", "mihifrkyvut", "datau"))); model = BinaryData.fromObject(model).toObject(DatabricksNotebookActivityTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTests.java index 305e56dda92c7..7cbe8bc2a893d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTests.java @@ -22,57 +22,69 @@ public final class DatabricksSparkJarActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DatabricksSparkJarActivity model = BinaryData.fromString( - "{\"type\":\"DatabricksSparkJar\",\"typeProperties\":{\"mainClassName\":\"datajzoi\",\"parameters\":[\"dataizlscnknkukempa\",\"datavajbgpu\"],\"libraries\":[{\"xujl\":\"datakankzyqi\"},{\"oqrutbfkynwwmlzp\":\"datahtrgybfumo\",\"n\":\"datarzazkaljuvm\",\"bfntgsju\":\"datalbzloae\",\"gix\":\"datagueggph\"},{\"qgkvkoynjucmyj\":\"datagvwmvafhriua\"},{\"v\":\"dataafvvnd\"}]},\"linkedServiceName\":{\"referenceName\":\"rqenbgymgjneohx\",\"parameters\":{\"yjs\":\"datazbhgbfsjodsk\",\"rfih\":\"dataxgefk\"}},\"policy\":{\"timeout\":\"datarrchfu\",\"retry\":\"datarqagpjociunndgp\",\"retryIntervalInSeconds\":172680098,\"secureInput\":true,\"secureOutput\":false,\"\":{\"ci\":\"datalqzymivjk\",\"kksywd\":\"datazag\",\"flpuxyakofrsoes\":\"datagzrg\"}},\"name\":\"qttkqcpclootcei\",\"description\":\"smnyfahidlscdow\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"qzjtdkojbx\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Completed\",\"Skipped\"],\"\":{\"sybw\":\"datagdsqcbxkwwn\",\"agrbirvsxubbnbof\":\"datavifgjztzhkhyqjv\",\"aauubkrzc\":\"dataeg\"}},{\"activity\":\"kwwibxjpy\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Succeeded\",\"Completed\"],\"\":{\"pxbqujnorfxi\":\"dataelujwcy\"}}],\"userProperties\":[{\"name\":\"cggwzvdqpxicpo\",\"value\":\"datazhfz\"}],\"\":{\"ps\":\"dataqpc\",\"fkzu\":\"datardkdomyqbeasbvz\",\"bmfqtnqaqlt\":\"datazqudqgfr\"}}") + "{\"type\":\"caxkivry\",\"typeProperties\":{\"mainClassName\":\"datac\",\"parameters\":[\"dataspyd\",\"dataladfcqkzcizu\",\"datagyljwmefpijwrvbu\",\"datahbwaiswbacgrysjg\"],\"libraries\":[{\"eduxyd\":\"datajupdcmpfwfdc\"},{\"nkhgg\":\"datapfdhfp\"}]},\"linkedServiceName\":{\"referenceName\":\"eqmtetnywgmeiiha\",\"parameters\":{\"jkcqednaeefzlwoh\":\"dataargetncfljwjrp\",\"eiqhbrdcgmyjmc\":\"databaaccglvixflcn\",\"rrvb\":\"datankp\",\"rsandmusud\":\"datavinkkteblrn\"}},\"policy\":{\"timeout\":\"datas\",\"retry\":\"datamzotcpffmik\",\"retryIntervalInSeconds\":1324064750,\"secureInput\":true,\"secureOutput\":false,\"\":{\"onejpjzqb\":\"datavi\",\"nlow\":\"dataut\",\"dqobngjbe\":\"dataatfalhnixo\"}},\"name\":\"bcxnnirnfuv\",\"description\":\"mep\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"svnotbe\",\"dependencyConditions\":[\"Succeeded\",\"Failed\"],\"\":{\"dtjcyvmsduodpm\":\"datawbtexigm\",\"ocqaejlebcy\":\"dataisggn\",\"g\":\"datapqwucprpw\",\"w\":\"datazdkcike\"}},{\"activity\":\"qial\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"sfhxhulrekr\":\"dataraocep\",\"dnzrcjokgthy\":\"datay\",\"wlguesoivaory\":\"datayzr\"}},{\"activity\":\"fgwo\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Failed\",\"Skipped\"],\"\":{\"tgmfjzqvi\":\"datat\",\"cocsmcqskrjnqaa\":\"datadhixd\",\"eox\":\"datayzjdrkcs\",\"wcqp\":\"datasfztlxqhyyxhzgx\"}},{\"activity\":\"rrmlkrroqsdvxdd\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Succeeded\"],\"\":{\"vswnnsbz\":\"databriykrxaevbur\"}}],\"userProperties\":[{\"name\":\"xbc\",\"value\":\"datakojynkhbtycfjwf\"},{\"name\":\"ia\",\"value\":\"datatsukdoy\"}],\"\":{\"vipxzzcxqdrqsu\":\"datazniekedxvw\",\"ptzqazwybbewjvyr\":\"dataekzqybpoxqwcusl\",\"osmp\":\"dataownbwrnbmcblmzar\",\"abhpdkrjlwrqheh\":\"dataajx\"}}") .toObject(DatabricksSparkJarActivity.class); - Assertions.assertEquals("qttkqcpclootcei", model.name()); - Assertions.assertEquals("smnyfahidlscdow", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals("bcxnnirnfuv", model.name()); + Assertions.assertEquals("mep", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("qzjtdkojbx", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("cggwzvdqpxicpo", model.userProperties().get(0).name()); - Assertions.assertEquals("rqenbgymgjneohx", model.linkedServiceName().referenceName()); - Assertions.assertEquals(172680098, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("svnotbe", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("xbc", model.userProperties().get(0).name()); + Assertions.assertEquals("eqmtetnywgmeiiha", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1324064750, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DatabricksSparkJarActivity model = new DatabricksSparkJarActivity().withName("qttkqcpclootcei") - .withDescription("smnyfahidlscdow").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency() - .withActivity("qzjtdkojbx") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("kwwibxjpy") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("cggwzvdqpxicpo").withValue("datazhfz"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rqenbgymgjneohx") - .withParameters(mapOf("yjs", "datazbhgbfsjodsk", "rfih", "dataxgefk"))) - .withPolicy(new ActivityPolicy().withTimeout("datarrchfu").withRetry("datarqagpjociunndgp") - .withRetryIntervalInSeconds(172680098).withSecureInput(true).withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withMainClassName("datajzoi").withParameters(Arrays.asList("dataizlscnknkukempa", "datavajbgpu")) - .withLibraries(Arrays.asList( - mapOf("xujl", "datakankzyqi"), mapOf("oqrutbfkynwwmlzp", "datahtrgybfumo", "n", "datarzazkaljuvm", - "bfntgsju", "datalbzloae", "gix", "datagueggph"), - mapOf("qgkvkoynjucmyj", "datagvwmvafhriua"), mapOf("v", "dataafvvnd"))); + DatabricksSparkJarActivity model + = new DatabricksSparkJarActivity().withName("bcxnnirnfuv") + .withDescription("mep") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("svnotbe") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("qial") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("fgwo") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED, + DependencyCondition.FAILED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("rrmlkrroqsdvxdd") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, + DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("xbc").withValue("datakojynkhbtycfjwf"), + new UserProperty().withName("ia").withValue("datatsukdoy"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("eqmtetnywgmeiiha") + .withParameters(mapOf("jkcqednaeefzlwoh", "dataargetncfljwjrp", "eiqhbrdcgmyjmc", + "databaaccglvixflcn", "rrvb", "datankp", "rsandmusud", "datavinkkteblrn"))) + .withPolicy(new ActivityPolicy().withTimeout("datas") + .withRetry("datamzotcpffmik") + .withRetryIntervalInSeconds(1324064750) + .withSecureInput(true) + .withSecureOutput(false) + .withAdditionalProperties(mapOf())) + .withMainClassName("datac") + .withParameters( + Arrays.asList("dataspyd", "dataladfcqkzcizu", "datagyljwmefpijwrvbu", "datahbwaiswbacgrysjg")) + .withLibraries(Arrays.asList(mapOf("eduxyd", "datajupdcmpfwfdc"), mapOf("nkhgg", "datapfdhfp"))); model = BinaryData.fromObject(model).toObject(DatabricksSparkJarActivity.class); - Assertions.assertEquals("qttkqcpclootcei", model.name()); - Assertions.assertEquals("smnyfahidlscdow", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals("bcxnnirnfuv", model.name()); + Assertions.assertEquals("mep", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("qzjtdkojbx", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("cggwzvdqpxicpo", model.userProperties().get(0).name()); - Assertions.assertEquals("rqenbgymgjneohx", model.linkedServiceName().referenceName()); - Assertions.assertEquals(172680098, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("svnotbe", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("xbc", model.userProperties().get(0).name()); + Assertions.assertEquals("eqmtetnywgmeiiha", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1324064750, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTypePropertiesTests.java index 1e6a13b6e8b50..2017d5f184465 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTypePropertiesTests.java @@ -14,20 +14,21 @@ public final class DatabricksSparkJarActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DatabricksSparkJarActivityTypeProperties model = BinaryData.fromString( - "{\"mainClassName\":\"dataxhfphawjov\",\"parameters\":[\"databusyqyf\",\"datatdprbmmf\",\"datateoxgikdcjmb\",\"datar\"],\"libraries\":[{\"dsrwhjhivgeran\":\"dataudegykzdspbjks\",\"r\":\"dataxnafojtqqqc\"},{\"mvzqcgyvz\":\"dataduspxijrr\",\"ybjucfs\":\"datavzsduz\",\"poywymtwh\":\"datapkqpgfyjwx\",\"x\":\"datadgbg\"},{\"gia\":\"datarzhkhmw\",\"nlzalsuj\":\"datarftpgqxnyoakd\"},{\"ekbcedbmfejtdb\":\"datagz\",\"ykjorlrjgrzxaa\":\"dataacnyacjypgbhfzy\",\"eyhalb\":\"dataibhkaqzahjqslsh\"}]}") + "{\"mainClassName\":\"dataaz\",\"parameters\":[\"databpysgzgi\",\"dataoahektwgiumcco\",\"datajxxjaafr\",\"datadhrkhfyaxi\"],\"libraries\":[{\"ogzwwyubkppo\":\"datasimbgvrksjjqqpar\",\"byjfeanbnw\":\"datajyjqemgbkjxuxmkk\",\"fkzlv\":\"dataekpgllezvrvjws\",\"jynvguhqugnqs\":\"datajbsvk\"},{\"wcfinsoi\":\"datarvquwhmnc\",\"gcl\":\"dataxxsybtpqgxz\"},{\"i\":\"dataicnckdxflg\",\"l\":\"datatcer\",\"mrsbgjjuhzf\":\"dataerrpal\"},{\"kwlmittpbivhkdxh\":\"datanabyvmch\",\"jbgvdzzukhlwvvh\":\"datavybxplbdaz\"}]}") .toObject(DatabricksSparkJarActivityTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DatabricksSparkJarActivityTypeProperties model - = new DatabricksSparkJarActivityTypeProperties().withMainClassName("dataxhfphawjov") - .withParameters(Arrays.asList("databusyqyf", "datatdprbmmf", "datateoxgikdcjmb", "datar")) - .withLibraries(Arrays.asList(mapOf("dsrwhjhivgeran", "dataudegykzdspbjks", "r", "dataxnafojtqqqc"), - mapOf("mvzqcgyvz", "dataduspxijrr", "ybjucfs", "datavzsduz", "poywymtwh", "datapkqpgfyjwx", "x", - "datadgbg"), - mapOf("gia", "datarzhkhmw", "nlzalsuj", "datarftpgqxnyoakd"), mapOf("ekbcedbmfejtdb", "datagz", - "ykjorlrjgrzxaa", "dataacnyacjypgbhfzy", "eyhalb", "dataibhkaqzahjqslsh"))); + = new DatabricksSparkJarActivityTypeProperties().withMainClassName("dataaz") + .withParameters(Arrays.asList("databpysgzgi", "dataoahektwgiumcco", "datajxxjaafr", "datadhrkhfyaxi")) + .withLibraries(Arrays.asList( + mapOf("ogzwwyubkppo", "datasimbgvrksjjqqpar", "byjfeanbnw", "datajyjqemgbkjxuxmkk", "fkzlv", + "dataekpgllezvrvjws", "jynvguhqugnqs", "datajbsvk"), + mapOf("wcfinsoi", "datarvquwhmnc", "gcl", "dataxxsybtpqgxz"), + mapOf("i", "dataicnckdxflg", "l", "datatcer", "mrsbgjjuhzf", "dataerrpal"), + mapOf("kwlmittpbivhkdxh", "datanabyvmch", "jbgvdzzukhlwvvh", "datavybxplbdaz"))); model = BinaryData.fromObject(model).toObject(DatabricksSparkJarActivityTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTests.java index d0abb90df610a..4179d92064984 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTests.java @@ -22,59 +22,73 @@ public final class DatabricksSparkPythonActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DatabricksSparkPythonActivity model = BinaryData.fromString( - "{\"type\":\"DatabricksSparkPython\",\"typeProperties\":{\"pythonFile\":\"datarmgsnffcoa\",\"parameters\":[\"datapa\",\"datazith\",\"datajrtcdavlrifmtk\",\"datawezwkparj\"],\"libraries\":[{\"zexxzkcigykea\":\"datasvj\",\"shdubqhafxl\":\"datarumhzgdsjblag\",\"wbormfnntpocf\":\"dataomwnkhiwqiqx\",\"dohytkhq\":\"datavmzs\"},{\"ehqmt\":\"datadyz\"}]},\"linkedServiceName\":{\"referenceName\":\"wpeaivbzrms\",\"parameters\":{\"fqameccuqkoat\":\"datadwjimrzavci\",\"mdiecrbcvhkkdsyx\":\"datai\",\"zwlbccxj\":\"databddjbzohvpqtxluq\"}},\"policy\":{\"timeout\":\"dataoihjnknfvpa\",\"retry\":\"dataaeeiboqc\",\"retryIntervalInSeconds\":1524667796,\"secureInput\":true,\"secureOutput\":false,\"\":{\"osedxsphfj\":\"datafijmi\",\"relggg\":\"dataxeswz\",\"emtweh\":\"datatsjeolxbggoj\",\"rkrfabffeahypjqa\":\"datauttngatglarczzgu\"}},\"name\":\"ceujuclffpvdjfw\",\"description\":\"bplbtmwae\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"cxh\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\"],\"\":{\"btymhh\":\"datad\",\"pu\":\"datauwcmwixyrv\",\"eihmv\":\"databuvviysg\"}},{\"activity\":\"upqfawwoxqjhm\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Succeeded\",\"Skipped\"],\"\":{\"gfaotokipndekpz\":\"datavsnyn\",\"rotnvxyeqdin\":\"datadrkddzkki\"}}],\"userProperties\":[{\"name\":\"ejtqoxethrxlpgrv\",\"value\":\"datazjxmnsrejqwy\"}],\"\":{\"kdaomxyx\":\"datasmhoviear\"}}") + "{\"type\":\"hymd\",\"typeProperties\":{\"pythonFile\":\"datackijv\",\"parameters\":[\"datagzkfjq\",\"databbpjlrvxryjxjd\"],\"libraries\":[{\"twzgbuh\":\"datanjalccix\",\"mfufs\":\"datarwqrfejznz\"},{\"tlcqaafuwxehoza\":\"datasjiojv\",\"y\":\"databgcbdkq\",\"vgowkak\":\"datavtimyccdognhw\",\"jiykwbytuzhcpx\":\"datajns\"}]},\"linkedServiceName\":{\"referenceName\":\"zlyoiyovc\",\"parameters\":{\"edowmhgzrrikvyu\":\"dataalveavuzjezraj\"}},\"policy\":{\"timeout\":\"dataopdeqqfyc\",\"retry\":\"dataupxf\",\"retryIntervalInSeconds\":1522713628,\"secureInput\":false,\"secureOutput\":true,\"\":{\"ilgamxnj\":\"dataptqbwn\",\"pnsbbhdjeegllcy\":\"dataw\"}},\"name\":\"ukfmkq\",\"description\":\"zvxknyg\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"xjyxhwvnyup\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"lcr\":\"datanuddlazvsmnx\",\"oxehynur\":\"databymgfwdxukm\"}},{\"activity\":\"w\",\"dependencyConditions\":[\"Completed\",\"Skipped\"],\"\":{\"o\":\"datamda\",\"ym\":\"datajnvmfm\",\"pjxdi\":\"datalztorvw\",\"kbcouavo\":\"dataiutdz\"}},{\"activity\":\"fmgtxzvy\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"znxd\":\"datarxjqpbbj\",\"kjirti\":\"dataiwaaumy\",\"gonrrarznlrr\":\"databvyud\"}},{\"activity\":\"sexaejbmtoun\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Completed\",\"Completed\"],\"\":{\"upishcvsjaaedsqf\":\"datasedxiigwxzwqjpu\",\"romhsias\":\"dataulndywghnptfvolj\"}}],\"userProperties\":[{\"name\":\"pelq\",\"value\":\"datakwc\"},{\"name\":\"pmsyhrvifurg\",\"value\":\"dataxhoqfvuqimdgk\"},{\"name\":\"fghc\",\"value\":\"dataiipnszrrmq\"}],\"\":{\"yoffglwmk\":\"datayawtdsnvxhxkmdec\",\"lqnzxsdbfbkqi\":\"databxusn\",\"ngrdu\":\"dataehxmztf\"}}") .toObject(DatabricksSparkPythonActivity.class); - Assertions.assertEquals("ceujuclffpvdjfw", model.name()); - Assertions.assertEquals("bplbtmwae", model.description()); + Assertions.assertEquals("ukfmkq", model.name()); + Assertions.assertEquals("zvxknyg", model.description()); Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("cxh", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ejtqoxethrxlpgrv", model.userProperties().get(0).name()); - Assertions.assertEquals("wpeaivbzrms", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1524667796, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("xjyxhwvnyup", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("pelq", model.userProperties().get(0).name()); + Assertions.assertEquals("zlyoiyovc", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1522713628, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals(true, model.policy().secureOutput()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DatabricksSparkPythonActivity model = new DatabricksSparkPythonActivity().withName("ceujuclffpvdjfw") - .withDescription("bplbtmwae").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("cxh") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("upqfawwoxqjhm") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("ejtqoxethrxlpgrv").withValue("datazjxmnsrejqwy"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("wpeaivbzrms") - .withParameters(mapOf("fqameccuqkoat", "datadwjimrzavci", "mdiecrbcvhkkdsyx", "datai", "zwlbccxj", - "databddjbzohvpqtxluq"))) - .withPolicy(new ActivityPolicy().withTimeout("dataoihjnknfvpa").withRetry("dataaeeiboqc") - .withRetryIntervalInSeconds(1524667796).withSecureInput(true).withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withPythonFile("datarmgsnffcoa") - .withParameters(Arrays.asList("datapa", "datazith", "datajrtcdavlrifmtk", "datawezwkparj")) - .withLibraries(Arrays.asList(mapOf("zexxzkcigykea", "datasvj", "shdubqhafxl", "datarumhzgdsjblag", - "wbormfnntpocf", "dataomwnkhiwqiqx", "dohytkhq", "datavmzs"), mapOf("ehqmt", "datadyz"))); + DatabricksSparkPythonActivity model + = new DatabricksSparkPythonActivity().withName("ukfmkq") + .withDescription("zvxknyg") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("xjyxhwvnyup") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("w") + .withDependencyConditions( + Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("fmgtxzvy") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("sexaejbmtoun") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("pelq").withValue("datakwc"), + new UserProperty().withName("pmsyhrvifurg").withValue("dataxhoqfvuqimdgk"), + new UserProperty().withName("fghc").withValue("dataiipnszrrmq"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zlyoiyovc") + .withParameters(mapOf("edowmhgzrrikvyu", "dataalveavuzjezraj"))) + .withPolicy(new ActivityPolicy().withTimeout("dataopdeqqfyc") + .withRetry("dataupxf") + .withRetryIntervalInSeconds(1522713628) + .withSecureInput(false) + .withSecureOutput(true) + .withAdditionalProperties(mapOf())) + .withPythonFile("datackijv") + .withParameters(Arrays.asList("datagzkfjq", "databbpjlrvxryjxjd")) + .withLibraries(Arrays.asList(mapOf("twzgbuh", "datanjalccix", "mfufs", "datarwqrfejznz"), + mapOf("tlcqaafuwxehoza", "datasjiojv", "y", "databgcbdkq", "vgowkak", "datavtimyccdognhw", + "jiykwbytuzhcpx", "datajns"))); model = BinaryData.fromObject(model).toObject(DatabricksSparkPythonActivity.class); - Assertions.assertEquals("ceujuclffpvdjfw", model.name()); - Assertions.assertEquals("bplbtmwae", model.description()); + Assertions.assertEquals("ukfmkq", model.name()); + Assertions.assertEquals("zvxknyg", model.description()); Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("cxh", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ejtqoxethrxlpgrv", model.userProperties().get(0).name()); - Assertions.assertEquals("wpeaivbzrms", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1524667796, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("xjyxhwvnyup", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("pelq", model.userProperties().get(0).name()); + Assertions.assertEquals("zlyoiyovc", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1522713628, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals(true, model.policy().secureOutput()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTypePropertiesTests.java index dc8bffd92afef..35349677a4b9a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTypePropertiesTests.java @@ -14,21 +14,16 @@ public final class DatabricksSparkPythonActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DatabricksSparkPythonActivityTypeProperties model = BinaryData.fromString( - "{\"pythonFile\":\"databen\",\"parameters\":[\"dataxhxfsknmr\"],\"libraries\":[{\"wbwoflfnii\":\"datafbdxwywdyqp\",\"cyvtsgopmatu\":\"datalohftmfmxrxrya\",\"oelqfsfxthcdzeu\":\"datatejipqynrlnqo\",\"f\":\"datakqkvfthbnikoybrs\"},{\"iszoxmzvlofzdn\":\"dataqmtzhikuby\",\"vxrqegk\":\"datasrvlbwzujafcx\",\"deuvsbsdcoq\":\"datacxtxgrhaqbst\"},{\"vsijwrjrxccnf\":\"dataffeqlwk\",\"q\":\"dataknxuoccey\"},{\"ksgejmpkqtjacnbe\":\"datahkqmlld\",\"ov\":\"dataiqhpkaam\",\"uoqbclhn\":\"databv\",\"kzhqpkckwaafu\":\"dataqxuxrggx\"}]}") + "{\"pythonFile\":\"dataozdzbhtfmgpio\",\"parameters\":[\"datapbjhhuimgdfoh\",\"dataeeu\",\"datatfavmdp\"],\"libraries\":[{\"paqj\":\"datacvwewognpu\",\"fb\":\"datarqvjwlritsxuxre\"}]}") .toObject(DatabricksSparkPythonActivityTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DatabricksSparkPythonActivityTypeProperties model = new DatabricksSparkPythonActivityTypeProperties() - .withPythonFile("databen").withParameters(Arrays.asList("dataxhxfsknmr")) - .withLibraries(Arrays.asList( - mapOf("wbwoflfnii", "datafbdxwywdyqp", "cyvtsgopmatu", "datalohftmfmxrxrya", "oelqfsfxthcdzeu", - "datatejipqynrlnqo", "f", "datakqkvfthbnikoybrs"), - mapOf("iszoxmzvlofzdn", "dataqmtzhikuby", "vxrqegk", "datasrvlbwzujafcx", "deuvsbsdcoq", - "datacxtxgrhaqbst"), - mapOf("vsijwrjrxccnf", "dataffeqlwk", "q", "dataknxuoccey"), mapOf("ksgejmpkqtjacnbe", "datahkqmlld", - "ov", "dataiqhpkaam", "uoqbclhn", "databv", "kzhqpkckwaafu", "dataqxuxrggx"))); + DatabricksSparkPythonActivityTypeProperties model + = new DatabricksSparkPythonActivityTypeProperties().withPythonFile("dataozdzbhtfmgpio") + .withParameters(Arrays.asList("datapbjhhuimgdfoh", "dataeeu", "datatfavmdp")) + .withLibraries(Arrays.asList(mapOf("paqj", "datacvwewognpu", "fb", "datarqvjwlritsxuxre"))); model = BinaryData.fromObject(model).toObject(DatabricksSparkPythonActivityTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetCompressionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetCompressionTests.java index 6c14d6bc666e1..9862da367b76d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetCompressionTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetCompressionTests.java @@ -12,14 +12,16 @@ public final class DatasetCompressionTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - DatasetCompression model = BinaryData.fromString( - "{\"type\":\"datagzancoinmphy\",\"level\":\"dataqidkltvdh\",\"\":{\"bydhuih\":\"datafbxweiibntojovfn\",\"kehwvumosqirca\":\"dataouwudhuaorh\",\"cv\":\"dataqprlobruge\"}}") + DatasetCompression model = BinaryData + .fromString( + "{\"type\":\"datavrqoemwsi\",\"level\":\"dataailwdqmqfyd\",\"\":{\"i\":\"datamfxlkdlgwgnamkuu\"}}") .toObject(DatasetCompression.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DatasetCompression model = new DatasetCompression().withType("datagzancoinmphy").withLevel("dataqidkltvdh") + DatasetCompression model = new DatasetCompression().withType("datavrqoemwsi") + .withLevel("dataailwdqmqfyd") .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(DatasetCompression.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetDebugResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetDebugResourceTests.java index 7624f3255af4b..388d4328b14b1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetDebugResourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetDebugResourceTests.java @@ -20,35 +20,39 @@ public final class DatasetDebugResourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DatasetDebugResource model = BinaryData.fromString( - "{\"properties\":{\"type\":\"Dataset\",\"description\":\"qmjxlyyzglgouwtl\",\"structure\":\"datajyuojqtobaxkjeyt\",\"schema\":\"datalbfjkwr\",\"linkedServiceName\":{\"referenceName\":\"snkq\",\"parameters\":{\"qunjqh\":\"datay\"}},\"parameters\":{\"ifmjnn\":{\"type\":\"Float\",\"defaultValue\":\"dataulkpakd\"},\"yirdhlisngwflqq\":{\"type\":\"String\",\"defaultValue\":\"dataqabpxuckpggqow\"}},\"annotations\":[\"datazruwn\",\"dataqxpxiwfcngjsaa\",\"dataiixtmkzj\",\"datakv\"],\"folder\":{\"name\":\"hgfgrwsd\"},\"\":{\"bglbyvict\":\"dataatzv\"}},\"name\":\"brxkjzwr\"}") + "{\"properties\":{\"type\":\"jxdfzantkwcegya\",\"description\":\"bnseqacjjvpilg\",\"structure\":\"dataoq\",\"schema\":\"datagmditgueiookjbs\",\"linkedServiceName\":{\"referenceName\":\"hrtdtpdelq\",\"parameters\":{\"gdirazf\":\"datalmotoebnfxofvcj\",\"bmdujtmvcopexc\":\"dataxejw\"}},\"parameters\":{\"hlkyqltqsrog\":{\"type\":\"Float\",\"defaultValue\":\"datau\"},\"dfvclglxnfu\":{\"type\":\"Float\",\"defaultValue\":\"datakffdjktsys\"},\"sfikayiansharuj\":{\"type\":\"Object\",\"defaultValue\":\"datakbusqo\"},\"qhjpenuygbqe\":{\"type\":\"Float\",\"defaultValue\":\"dataqxfzyjqttvwk\"}},\"annotations\":[\"datakewvnqv\",\"datadlguaucmfdjwn\",\"dataaxpunjqikczvv\",\"datatacgxmfc\"],\"folder\":{\"name\":\"rxhtvso\"},\"\":{\"ruuuybnch\":\"datawntsjgqrsxy\",\"qyggagfl\":\"dataszizoyuelyetndnb\",\"byrplrohkpig\":\"datalgmtrwahzjmucf\"}},\"name\":\"usuckzmkwklsno\"}") .toObject(DatasetDebugResource.class); - Assertions.assertEquals("brxkjzwr", model.name()); - Assertions.assertEquals("qmjxlyyzglgouwtl", model.properties().description()); - Assertions.assertEquals("snkq", model.properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("ifmjnn").type()); - Assertions.assertEquals("hgfgrwsd", model.properties().folder().name()); + Assertions.assertEquals("usuckzmkwklsno", model.name()); + Assertions.assertEquals("bnseqacjjvpilg", model.properties().description()); + Assertions.assertEquals("hrtdtpdelq", model.properties().linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("hlkyqltqsrog").type()); + Assertions.assertEquals("rxhtvso", model.properties().folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DatasetDebugResource model = new DatasetDebugResource().withName("brxkjzwr") - .withProperties(new Dataset().withDescription("qmjxlyyzglgouwtl").withStructure("datajyuojqtobaxkjeyt") - .withSchema("datalbfjkwr") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("snkq").withParameters(mapOf("qunjqh", "datay"))) - .withParameters(mapOf("ifmjnn", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataulkpakd"), - "yirdhlisngwflqq", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataqabpxuckpggqow"))) - .withAnnotations(Arrays.asList("datazruwn", "dataqxpxiwfcngjsaa", "dataiixtmkzj", "datakv")) - .withFolder(new DatasetFolder().withName("hgfgrwsd")) - .withAdditionalProperties(mapOf("type", "Dataset"))); + DatasetDebugResource model = new DatasetDebugResource().withName("usuckzmkwklsno") + .withProperties(new Dataset().withDescription("bnseqacjjvpilg") + .withStructure("dataoq") + .withSchema("datagmditgueiookjbs") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hrtdtpdelq") + .withParameters(mapOf("gdirazf", "datalmotoebnfxofvcj", "bmdujtmvcopexc", "dataxejw"))) + .withParameters(mapOf("hlkyqltqsrog", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datau"), "dfvclglxnfu", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datakffdjktsys"), + "sfikayiansharuj", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datakbusqo"), + "qhjpenuygbqe", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataqxfzyjqttvwk"))) + .withAnnotations(Arrays.asList("datakewvnqv", "datadlguaucmfdjwn", "dataaxpunjqikczvv", "datatacgxmfc")) + .withFolder(new DatasetFolder().withName("rxhtvso")) + .withAdditionalProperties(mapOf("type", "jxdfzantkwcegya"))); model = BinaryData.fromObject(model).toObject(DatasetDebugResource.class); - Assertions.assertEquals("brxkjzwr", model.name()); - Assertions.assertEquals("qmjxlyyzglgouwtl", model.properties().description()); - Assertions.assertEquals("snkq", model.properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("ifmjnn").type()); - Assertions.assertEquals("hgfgrwsd", model.properties().folder().name()); + Assertions.assertEquals("usuckzmkwklsno", model.name()); + Assertions.assertEquals("bnseqacjjvpilg", model.properties().description()); + Assertions.assertEquals("hrtdtpdelq", model.properties().linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("hlkyqltqsrog").type()); + Assertions.assertEquals("rxhtvso", model.properties().folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetFolderTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetFolderTests.java index 9a1b172e92b5a..525859ef1a26e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetFolderTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetFolderTests.java @@ -11,14 +11,14 @@ public final class DatasetFolderTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - DatasetFolder model = BinaryData.fromString("{\"name\":\"eyvpnqicvinvkj\"}").toObject(DatasetFolder.class); - Assertions.assertEquals("eyvpnqicvinvkj", model.name()); + DatasetFolder model = BinaryData.fromString("{\"name\":\"tpp\"}").toObject(DatasetFolder.class); + Assertions.assertEquals("tpp", model.name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DatasetFolder model = new DatasetFolder().withName("eyvpnqicvinvkj"); + DatasetFolder model = new DatasetFolder().withName("tpp"); model = BinaryData.fromObject(model).toObject(DatasetFolder.class); - Assertions.assertEquals("eyvpnqicvinvkj", model.name()); + Assertions.assertEquals("tpp", model.name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetListResponseTests.java index d8c1605715a81..5216f257466f8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetListResponseTests.java @@ -21,59 +21,80 @@ public final class DatasetListResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DatasetListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"type\":\"Dataset\",\"description\":\"uxvypomgkopkwh\",\"structure\":\"datav\",\"schema\":\"datajqg\",\"linkedServiceName\":{\"referenceName\":\"ysmocmbqfqvmkcxo\",\"parameters\":{\"kcbcue\":\"datavhelxprglyatdd\",\"hos\":\"datarjxgciqib\",\"ibahwflus\":\"datasdqrhzoymibmrq\"}},\"parameters\":{\"piexpbtgiw\":{\"type\":\"Object\",\"defaultValue\":\"datarkwofyyvoqa\"},\"tdtkcn\":{\"type\":\"Object\",\"defaultValue\":\"dataenwash\"},\"i\":{\"type\":\"Float\",\"defaultValue\":\"databpokulpiujwaasip\"},\"rpqlp\":{\"type\":\"Int\",\"defaultValue\":\"datayuq\"}},\"annotations\":[\"dataciuqgbdb\",\"datat\",\"datauvfbtkuwh\",\"datamhykojoxafnndl\"],\"folder\":{\"name\":\"hkoymkcdyhbp\"},\"\":{\"xywsuws\":\"datawdreqnovvqfovl\",\"aeneqnzarrwl\":\"datarsndsytgadgvra\",\"jfqka\":\"datauu\"}},\"name\":\"wiipfpub\",\"type\":\"bwwift\",\"etag\":\"qkvpuvksgplsakn\",\"id\":\"fsynljphuop\"},{\"properties\":{\"type\":\"Dataset\",\"description\":\"dlqiyntorzih\",\"structure\":\"dataosjswsr\",\"schema\":\"datalyzrpzbchckqqzqi\",\"linkedServiceName\":{\"referenceName\":\"xiy\",\"parameters\":{\"ynkedyatrwyhqmib\":\"datai\",\"mnzgmwznmabi\":\"datayhwitsmypyynpcdp\",\"wwrlkdmtncv\":\"datansorgjhxbldt\",\"xdy\":\"datakotl\"}},\"parameters\":{\"hadoocrk\":{\"type\":\"Array\",\"defaultValue\":\"datacogjltdtbn\"},\"gxqquezik\":{\"type\":\"Object\",\"defaultValue\":\"datakhnvpam\"},\"lla\":{\"type\":\"Int\",\"defaultValue\":\"datagxk\"},\"ccjzkzivgvv\":{\"type\":\"Bool\",\"defaultValue\":\"datalwuip\"}},\"annotations\":[\"datay\",\"datahyrnxxmu\"],\"folder\":{\"name\":\"ndrdvstkwq\"},\"\":{\"ygdvwv\":\"datahealmfmtda\"}},\"name\":\"iohgwxrtfud\",\"type\":\"pxgy\",\"etag\":\"gvr\",\"id\":\"npkukghimdblx\"}],\"nextLink\":\"imfnjhfjx\"}") + "{\"value\":[{\"properties\":{\"type\":\"auwzizxbmpgc\",\"description\":\"fuzmuvpbtt\",\"structure\":\"datamorppxebmnzbtbh\",\"schema\":\"dataglkfg\",\"linkedServiceName\":{\"referenceName\":\"hdneuelfph\",\"parameters\":{\"quuvxzxcl\":\"datahtozfikdow\",\"sg\":\"dataithhqzon\",\"c\":\"datab\",\"acffgdkzzewkfvhq\":\"datahfwdsjnkaljutiis\"}},\"parameters\":{\"uflrwd\":{\"type\":\"Object\",\"defaultValue\":\"datavpnpp\"}},\"annotations\":[\"datalxyjr\",\"datasag\"],\"folder\":{\"name\":\"nihgwqapnedg\"},\"\":{\"vdrhvoo\":\"datavkcvqvpkeqd\",\"dopcjwvnh\":\"datasotbob\",\"mgxcxrslpm\":\"datald\",\"qsluicp\":\"datatwuoegrpkhjwni\"}},\"name\":\"gk\",\"type\":\"lvmbmpaxmodfvuef\",\"etag\":\"sbpfvmwyhr\",\"id\":\"uyfta\"},{\"properties\":{\"type\":\"cpwi\",\"description\":\"vqtmnub\",\"structure\":\"datakpzksmondjmq\",\"schema\":\"datavypomgkopkwho\",\"linkedServiceName\":{\"referenceName\":\"v\",\"parameters\":{\"mocmbqfqvmk\":\"dataqgxy\"}},\"parameters\":{\"yat\":{\"type\":\"SecureString\",\"defaultValue\":\"datapvhelxprg\"}},\"annotations\":[\"datakcbcue\"],\"folder\":{\"name\":\"xgc\"},\"\":{\"sxsdqrhzoymibm\":\"databrh\",\"hwflu\":\"dataqyib\",\"voqacpiexpbt\":\"datazdtmhrkwofy\"}},\"name\":\"wbwo\",\"type\":\"washr\",\"etag\":\"tkcnqxwb\",\"id\":\"kulpiujwaasi\"},{\"properties\":{\"type\":\"i\",\"description\":\"byuqerpqlp\",\"structure\":\"datacciuqgbdbutau\",\"schema\":\"databtkuwhh\",\"linkedServiceName\":{\"referenceName\":\"hykojoxafnndlpic\",\"parameters\":{\"kkpwdreqnovvq\":\"dataymkcdyhb\"}},\"parameters\":{\"rsndsytgadgvra\":{\"type\":\"Array\",\"defaultValue\":\"dataxywsuws\"},\"arrwlquu\":{\"type\":\"SecureString\",\"defaultValue\":\"dataneqn\"}},\"annotations\":[\"dataqkacewii\"],\"folder\":{\"name\":\"ubjibww\"},\"\":{\"kn\":\"dataohqkvpuvksgpls\",\"synljphuopxodl\":\"datan\",\"sjswsrms\":\"dataiyntorzihle\",\"ox\":\"datayzrpzbchckqqzq\"}},\"name\":\"suiizynkedyat\",\"type\":\"yhqmibzyhwi\",\"etag\":\"mypyynpcdpu\",\"id\":\"zgmwznmabikns\"},{\"properties\":{\"type\":\"gj\",\"description\":\"bldtlww\",\"structure\":\"datakdmtncvokotll\",\"schema\":\"datayhgsy\",\"linkedServiceName\":{\"referenceName\":\"cogjltdtbn\",\"parameters\":{\"ocrkvcikh\":\"datad\",\"qgxqquezikyw\":\"datavpa\",\"lla\":\"datagxk\"}},\"parameters\":{\"ivgvvcna\":{\"type\":\"String\",\"defaultValue\":\"datauipiccjzk\"},\"rdvstkwqqtch\":{\"type\":\"Bool\",\"defaultValue\":\"datayrnxxmueedn\"}},\"annotations\":[\"datamfmtdaaygdvw\",\"datagpiohgwxrtfudxe\",\"dataxg\"],\"folder\":{\"name\":\"gvr\"},\"\":{\"dblx\":\"datapkukghi\",\"fnjhfjxwmszkkfo\":\"datawi\",\"kzikfjawneaivxwc\":\"datarey\",\"fatkld\":\"dataelpcirelsfeaenwa\"}},\"name\":\"bjhwuaan\",\"type\":\"jos\",\"etag\":\"youlp\",\"id\":\"v\"}],\"nextLink\":\"glrvimjwosytxi\"}") .toObject(DatasetListResponse.class); - Assertions.assertEquals("fsynljphuop", model.value().get(0).id()); - Assertions.assertEquals("uxvypomgkopkwh", model.value().get(0).properties().description()); - Assertions.assertEquals("ysmocmbqfqvmkcxo", - model.value().get(0).properties().linkedServiceName().referenceName()); + Assertions.assertEquals("uyfta", model.value().get(0).id()); + Assertions.assertEquals("fuzmuvpbtt", model.value().get(0).properties().description()); + Assertions.assertEquals("hdneuelfph", model.value().get(0).properties().linkedServiceName().referenceName()); Assertions.assertEquals(ParameterType.OBJECT, - model.value().get(0).properties().parameters().get("piexpbtgiw").type()); - Assertions.assertEquals("hkoymkcdyhbp", model.value().get(0).properties().folder().name()); - Assertions.assertEquals("imfnjhfjx", model.nextLink()); + model.value().get(0).properties().parameters().get("uflrwd").type()); + Assertions.assertEquals("nihgwqapnedg", model.value().get(0).properties().folder().name()); + Assertions.assertEquals("glrvimjwosytxi", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DatasetListResponse model = new DatasetListResponse().withValue(Arrays.asList( - new DatasetResourceInner().withId("fsynljphuop").withProperties(new Dataset() - .withDescription("uxvypomgkopkwh").withStructure("datav").withSchema("datajqg") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ysmocmbqfqvmkcxo") - .withParameters(mapOf("kcbcue", "datavhelxprglyatdd", "hos", "datarjxgciqib", "ibahwflus", - "datasdqrhzoymibmrq"))) - .withParameters(mapOf("piexpbtgiw", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datarkwofyyvoqa"), - "tdtkcn", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataenwash"), "i", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("databpokulpiujwaasip"), - "rpqlp", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datayuq"))) - .withAnnotations(Arrays.asList("dataciuqgbdb", "datat", "datauvfbtkuwh", "datamhykojoxafnndl")) - .withFolder(new DatasetFolder().withName("hkoymkcdyhbp")) - .withAdditionalProperties(mapOf("type", "Dataset"))), - new DatasetResourceInner().withId("npkukghimdblx").withProperties(new Dataset() - .withDescription("dlqiyntorzih").withStructure("dataosjswsr").withSchema("datalyzrpzbchckqqzqi") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xiy") - .withParameters(mapOf("ynkedyatrwyhqmib", "datai", "mnzgmwznmabi", "datayhwitsmypyynpcdp", - "wwrlkdmtncv", "datansorgjhxbldt", "xdy", "datakotl"))) - .withParameters(mapOf("hadoocrk", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datacogjltdtbn"), - "gxqquezik", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datakhnvpam"), "lla", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datagxk"), "ccjzkzivgvv", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datalwuip"))) - .withAnnotations(Arrays.asList("datay", "datahyrnxxmu")) - .withFolder(new DatasetFolder().withName("ndrdvstkwq")) - .withAdditionalProperties(mapOf("type", "Dataset"))))) - .withNextLink("imfnjhfjx"); + new DatasetResourceInner().withId("uyfta") + .withProperties(new Dataset().withDescription("fuzmuvpbtt") + .withStructure("datamorppxebmnzbtbh") + .withSchema("dataglkfg") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hdneuelfph") + .withParameters(mapOf("quuvxzxcl", "datahtozfikdow", "sg", "dataithhqzon", "c", "datab", + "acffgdkzzewkfvhq", "datahfwdsjnkaljutiis"))) + .withParameters(mapOf("uflrwd", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datavpnpp"))) + .withAnnotations(Arrays.asList("datalxyjr", "datasag")) + .withFolder(new DatasetFolder().withName("nihgwqapnedg")) + .withAdditionalProperties(mapOf("type", "auwzizxbmpgc"))), + new DatasetResourceInner().withId("kulpiujwaasi") + .withProperties(new Dataset().withDescription("vqtmnub") + .withStructure("datakpzksmondjmq") + .withSchema("datavypomgkopkwho") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("v") + .withParameters(mapOf("mocmbqfqvmk", "dataqgxy"))) + .withParameters(mapOf("yat", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("datapvhelxprg"))) + .withAnnotations(Arrays.asList("datakcbcue")) + .withFolder(new DatasetFolder().withName("xgc")) + .withAdditionalProperties(mapOf("type", "cpwi"))), + new DatasetResourceInner().withId("zgmwznmabikns") + .withProperties(new Dataset().withDescription("byuqerpqlp") + .withStructure("datacciuqgbdbutau") + .withSchema("databtkuwhh") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hykojoxafnndlpic") + .withParameters(mapOf("kkpwdreqnovvq", "dataymkcdyhb"))) + .withParameters(mapOf("rsndsytgadgvra", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataxywsuws"), + "arrwlquu", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("dataneqn"))) + .withAnnotations(Arrays.asList("dataqkacewii")) + .withFolder(new DatasetFolder().withName("ubjibww")) + .withAdditionalProperties(mapOf("type", "i"))), + new DatasetResourceInner().withId("v") + .withProperties(new Dataset().withDescription("bldtlww") + .withStructure("datakdmtncvokotll") + .withSchema("datayhgsy") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cogjltdtbn") + .withParameters(mapOf("ocrkvcikh", "datad", "qgxqquezikyw", "datavpa", "lla", "datagxk"))) + .withParameters(mapOf("ivgvvcna", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datauipiccjzk"), + "rdvstkwqqtch", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datayrnxxmueedn"))) + .withAnnotations(Arrays.asList("datamfmtdaaygdvw", "datagpiohgwxrtfudxe", "dataxg")) + .withFolder(new DatasetFolder().withName("gvr")) + .withAdditionalProperties(mapOf("type", "gj"))))) + .withNextLink("glrvimjwosytxi"); model = BinaryData.fromObject(model).toObject(DatasetListResponse.class); - Assertions.assertEquals("fsynljphuop", model.value().get(0).id()); - Assertions.assertEquals("uxvypomgkopkwh", model.value().get(0).properties().description()); - Assertions.assertEquals("ysmocmbqfqvmkcxo", - model.value().get(0).properties().linkedServiceName().referenceName()); + Assertions.assertEquals("uyfta", model.value().get(0).id()); + Assertions.assertEquals("fuzmuvpbtt", model.value().get(0).properties().description()); + Assertions.assertEquals("hdneuelfph", model.value().get(0).properties().linkedServiceName().referenceName()); Assertions.assertEquals(ParameterType.OBJECT, - model.value().get(0).properties().parameters().get("piexpbtgiw").type()); - Assertions.assertEquals("hkoymkcdyhbp", model.value().get(0).properties().folder().name()); - Assertions.assertEquals("imfnjhfjx", model.nextLink()); + model.value().get(0).properties().parameters().get("uflrwd").type()); + Assertions.assertEquals("nihgwqapnedg", model.value().get(0).properties().folder().name()); + Assertions.assertEquals("glrvimjwosytxi", model.nextLink()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetLocationTests.java index 348bec0c0352d..f34d94833338f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetLocationTests.java @@ -13,14 +13,15 @@ public final class DatasetLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DatasetLocation model = BinaryData.fromString( - "{\"type\":\"DatasetLocation\",\"folderPath\":\"datasx\",\"fileName\":\"dataofuworimmovzwde\",\"\":{\"elgwewi\":\"datamvhzfovanyrvaprt\",\"j\":\"datafyaqandmymnqo\"}}") + "{\"type\":\"ee\",\"folderPath\":\"datamjenvjeatea\",\"fileName\":\"datacrxoxdj\",\"\":{\"ryyinz\":\"datan\",\"bvxvza\":\"datadolrndw\",\"mcxqqxmyzklao\":\"dataedoyqxlunkft\"}}") .toObject(DatasetLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DatasetLocation model = new DatasetLocation().withFolderPath("datasx").withFileName("dataofuworimmovzwde") - .withAdditionalProperties(mapOf("type", "DatasetLocation")); + DatasetLocation model = new DatasetLocation().withFolderPath("datamjenvjeatea") + .withFileName("datacrxoxdj") + .withAdditionalProperties(mapOf("type", "ee")); model = BinaryData.fromObject(model).toObject(DatasetLocation.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetReferenceTests.java index 70f32eb794e65..18592c6c9c09f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetReferenceTests.java @@ -13,18 +13,18 @@ public final class DatasetReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - DatasetReference model = BinaryData.fromString( - "{\"referenceName\":\"ezxlhdjzqdca\",\"parameters\":{\"iybmrzoep\":\"datapsozjiihj\",\"gv\":\"dataxwdvwnj\",\"ursqf\":\"datanmx\"}}") + DatasetReference model = BinaryData + .fromString("{\"referenceName\":\"mnitmujd\",\"parameters\":{\"xfzuvrzmzqmzj\":\"dataclyymffhmjpddn\"}}") .toObject(DatasetReference.class); - Assertions.assertEquals("ezxlhdjzqdca", model.referenceName()); + Assertions.assertEquals("mnitmujd", model.referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DatasetReference model = new DatasetReference().withReferenceName("ezxlhdjzqdca") - .withParameters(mapOf("iybmrzoep", "datapsozjiihj", "gv", "dataxwdvwnj", "ursqf", "datanmx")); + DatasetReference model = new DatasetReference().withReferenceName("mnitmujd") + .withParameters(mapOf("xfzuvrzmzqmzj", "dataclyymffhmjpddn")); model = BinaryData.fromObject(model).toObject(DatasetReference.class); - Assertions.assertEquals("ezxlhdjzqdca", model.referenceName()); + Assertions.assertEquals("mnitmujd", model.referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetResourceInnerTests.java index 4b97137e77bd1..5a4480aad3c3d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetResourceInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetResourceInnerTests.java @@ -20,32 +20,34 @@ public final class DatasetResourceInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DatasetResourceInner model = BinaryData.fromString( - "{\"properties\":{\"type\":\"Dataset\",\"description\":\"szkkfoqre\",\"structure\":\"datakzikfjawneaivxwc\",\"schema\":\"datalpcirelsf\",\"linkedServiceName\":{\"referenceName\":\"aenwabf\",\"parameters\":{\"nozj\":\"datalddxbjhwua\"}},\"parameters\":{\"ag\":{\"type\":\"Array\",\"defaultValue\":\"dataoulpjrv\"}},\"annotations\":[\"dataimjwosyt\",\"dataitc\"],\"folder\":{\"name\":\"cktqumiekkezzi\"},\"\":{\"hdgqggeb\":\"datayf\"}},\"name\":\"nyga\",\"type\":\"idb\",\"etag\":\"atpxl\",\"id\":\"xcyjmoadsuvarmy\"}") + "{\"properties\":{\"type\":\"skfc\",\"description\":\"qumiek\",\"structure\":\"datazzikhlyfjhdg\",\"schema\":\"datagebdunygaeq\",\"linkedServiceName\":{\"referenceName\":\"db\",\"parameters\":{\"xllrxcyjm\":\"datat\",\"su\":\"dataa\"}},\"parameters\":{\"rw\":{\"type\":\"String\",\"defaultValue\":\"datawdmjsjqbjhhyx\"}},\"annotations\":[\"dataoduhp\"],\"folder\":{\"name\":\"gymare\"},\"\":{\"ubeddg\":\"datajxqugjhky\"}},\"name\":\"ofwq\",\"type\":\"qal\",\"etag\":\"mnjijpxacqqudf\",\"id\":\"yxbaaabjyvayf\"}") .toObject(DatasetResourceInner.class); - Assertions.assertEquals("xcyjmoadsuvarmy", model.id()); - Assertions.assertEquals("szkkfoqre", model.properties().description()); - Assertions.assertEquals("aenwabf", model.properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.properties().parameters().get("ag").type()); - Assertions.assertEquals("cktqumiekkezzi", model.properties().folder().name()); + Assertions.assertEquals("yxbaaabjyvayf", model.id()); + Assertions.assertEquals("qumiek", model.properties().description()); + Assertions.assertEquals("db", model.properties().linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.properties().parameters().get("rw").type()); + Assertions.assertEquals("gymare", model.properties().folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DatasetResourceInner model = new DatasetResourceInner().withId("xcyjmoadsuvarmy").withProperties(new Dataset() - .withDescription("szkkfoqre").withStructure("datakzikfjawneaivxwc").withSchema("datalpcirelsf") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("aenwabf") - .withParameters(mapOf("nozj", "datalddxbjhwua"))) - .withParameters( - mapOf("ag", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataoulpjrv"))) - .withAnnotations(Arrays.asList("dataimjwosyt", "dataitc")) - .withFolder(new DatasetFolder().withName("cktqumiekkezzi")) - .withAdditionalProperties(mapOf("type", "Dataset"))); + DatasetResourceInner model = new DatasetResourceInner().withId("yxbaaabjyvayf") + .withProperties(new Dataset().withDescription("qumiek") + .withStructure("datazzikhlyfjhdg") + .withSchema("datagebdunygaeq") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("db") + .withParameters(mapOf("xllrxcyjm", "datat", "su", "dataa"))) + .withParameters(mapOf("rw", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datawdmjsjqbjhhyx"))) + .withAnnotations(Arrays.asList("dataoduhp")) + .withFolder(new DatasetFolder().withName("gymare")) + .withAdditionalProperties(mapOf("type", "skfc"))); model = BinaryData.fromObject(model).toObject(DatasetResourceInner.class); - Assertions.assertEquals("xcyjmoadsuvarmy", model.id()); - Assertions.assertEquals("szkkfoqre", model.properties().description()); - Assertions.assertEquals("aenwabf", model.properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.properties().parameters().get("ag").type()); - Assertions.assertEquals("cktqumiekkezzi", model.properties().folder().name()); + Assertions.assertEquals("yxbaaabjyvayf", model.id()); + Assertions.assertEquals("qumiek", model.properties().description()); + Assertions.assertEquals("db", model.properties().linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.properties().parameters().get("rw").type()); + Assertions.assertEquals("gymare", model.properties().folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetSchemaDataElementTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetSchemaDataElementTests.java index 830e8166fc698..2d228314fac2c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetSchemaDataElementTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetSchemaDataElementTests.java @@ -12,15 +12,16 @@ public final class DatasetSchemaDataElementTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - DatasetSchemaDataElement model = BinaryData.fromString( - "{\"name\":\"datamgsdaluyckhefrbh\",\"type\":\"datauerbgpxebjl\",\"\":{\"tnsewou\":\"dataaytujraxdtpryjm\",\"s\":\"dataly\",\"lmpctwj\":\"datavyljurkeposehqq\",\"erxxxoteehkhowgo\":\"datadsdlzmk\"}}") + DatasetSchemaDataElement model = BinaryData + .fromString("{\"name\":\"datavbennmfkbpjnrt\",\"type\":\"dataw\",\"\":{\"syiurzt\":\"datahropmdu\"}}") .toObject(DatasetSchemaDataElement.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DatasetSchemaDataElement model = new DatasetSchemaDataElement().withName("datamgsdaluyckhefrbh") - .withType("datauerbgpxebjl").withAdditionalProperties(mapOf()); + DatasetSchemaDataElement model = new DatasetSchemaDataElement().withName("datavbennmfkbpjnrt") + .withType("dataw") + .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(DatasetSchemaDataElement.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetStorageFormatTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetStorageFormatTests.java index 63c7a7df3b79f..d306fc166d7a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetStorageFormatTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetStorageFormatTests.java @@ -13,14 +13,15 @@ public final class DatasetStorageFormatTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DatasetStorageFormat model = BinaryData.fromString( - "{\"type\":\"DatasetStorageFormat\",\"serializer\":\"datavhxowpcbapnpxra\",\"deserializer\":\"datawbmpspfeylqloc\",\"\":{\"slavxjfiuof\":\"datajexayglxrkgjnmzp\",\"sqywjopacky\":\"dataieidzlv\",\"kmfngpmillxgjs\":\"dataydv\"}}") + "{\"type\":\"tjhffecqkoqy\",\"serializer\":\"dataergaghpuzx\",\"deserializer\":\"datayehhfdyldhg\",\"\":{\"iyuq\":\"datazfz\"}}") .toObject(DatasetStorageFormat.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DatasetStorageFormat model = new DatasetStorageFormat().withSerializer("datavhxowpcbapnpxra") - .withDeserializer("datawbmpspfeylqloc").withAdditionalProperties(mapOf("type", "DatasetStorageFormat")); + DatasetStorageFormat model = new DatasetStorageFormat().withSerializer("dataergaghpuzx") + .withDeserializer("datayehhfdyldhg") + .withAdditionalProperties(mapOf("type", "tjhffecqkoqy")); model = BinaryData.fromObject(model).toObject(DatasetStorageFormat.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetTests.java index 03d6f93a2362a..8cf77cfa93bc0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetTests.java @@ -19,30 +19,34 @@ public final class DatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { Dataset model = BinaryData.fromString( - "{\"type\":\"Dataset\",\"description\":\"mjsjqb\",\"structure\":\"datahyxxrwlycoduhpk\",\"schema\":\"datagymare\",\"linkedServiceName\":{\"referenceName\":\"n\",\"parameters\":{\"dgssofwqmzqal\":\"dataqugjhkycube\",\"cqqudf\":\"datarmnjijpx\",\"ayffim\":\"databyxbaaabjy\",\"gsexne\":\"datazrtuzq\"}},\"parameters\":{\"ewzsyyceuzsoib\":{\"type\":\"Int\",\"defaultValue\":\"datanw\"}},\"annotations\":[\"datapfrxtrthzvay\",\"datadwkqbrq\",\"databpaxhexiilivpdt\",\"datairqtdqoa\"],\"folder\":{\"name\":\"uzf\"},\"\":{\"zwl\":\"datauyfxrxxleptramxj\",\"tdooaoj\":\"datanwxuqlcvydyp\"}}") + "{\"type\":\"m\",\"description\":\"rtuzqogs\",\"structure\":\"datanevfdnw\",\"schema\":\"datamewzsyyc\",\"linkedServiceName\":{\"referenceName\":\"uzsoi\",\"parameters\":{\"qbrqubpaxhexiili\":\"datadpfrxtrthzvaytdw\"}},\"parameters\":{\"oruzfgsquyfxrxx\":{\"type\":\"String\",\"defaultValue\":\"datairqtdqoa\"},\"lwnwxuqlcvydyp\":{\"type\":\"SecureString\",\"defaultValue\":\"datatramxjez\"}},\"annotations\":[\"dataooaojkniodkooebw\",\"dataujhemmsbvdkcrodt\",\"datainfwjlfltkacjve\",\"datakdlfoa\"],\"folder\":{\"name\":\"kfpagao\"},\"\":{\"jnsjervtiagxsd\":\"datalpqblylsyxk\",\"beyvpnqicvinvkjj\":\"datazuempsbzkf\",\"yhmlwpaztzp\":\"datadxrbuukzcle\",\"ckw\":\"datafn\"}}") .toObject(Dataset.class); - Assertions.assertEquals("mjsjqb", model.description()); - Assertions.assertEquals("n", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("ewzsyyceuzsoib").type()); - Assertions.assertEquals("uzf", model.folder().name()); + Assertions.assertEquals("rtuzqogs", model.description()); + Assertions.assertEquals("uzsoi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("oruzfgsquyfxrxx").type()); + Assertions.assertEquals("kfpagao", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Dataset model = new Dataset().withDescription("mjsjqb").withStructure("datahyxxrwlycoduhpk") - .withSchema("datagymare") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("n") - .withParameters(mapOf("dgssofwqmzqal", "dataqugjhkycube", "cqqudf", "datarmnjijpx", "ayffim", - "databyxbaaabjy", "gsexne", "datazrtuzq"))) - .withParameters(mapOf("ewzsyyceuzsoib", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datanw"))) - .withAnnotations(Arrays.asList("datapfrxtrthzvay", "datadwkqbrq", "databpaxhexiilivpdt", "datairqtdqoa")) - .withFolder(new DatasetFolder().withName("uzf")).withAdditionalProperties(mapOf("type", "Dataset")); + Dataset model = new Dataset().withDescription("rtuzqogs") + .withStructure("datanevfdnw") + .withSchema("datamewzsyyc") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("uzsoi") + .withParameters(mapOf("qbrqubpaxhexiili", "datadpfrxtrthzvaytdw"))) + .withParameters(mapOf("oruzfgsquyfxrxx", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datairqtdqoa"), + "lwnwxuqlcvydyp", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datatramxjez"))) + .withAnnotations( + Arrays.asList("dataooaojkniodkooebw", "dataujhemmsbvdkcrodt", "datainfwjlfltkacjve", "datakdlfoa")) + .withFolder(new DatasetFolder().withName("kfpagao")) + .withAdditionalProperties(mapOf("type", "m")); model = BinaryData.fromObject(model).toObject(Dataset.class); - Assertions.assertEquals("mjsjqb", model.description()); - Assertions.assertEquals("n", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("ewzsyyceuzsoib").type()); - Assertions.assertEquals("uzf", model.folder().name()); + Assertions.assertEquals("rtuzqogs", model.description()); + Assertions.assertEquals("uzsoi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("oruzfgsquyfxrxx").type()); + Assertions.assertEquals("kfpagao", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsCreateOrUpdateWithResponseMockTests.java index eb20c20e39cac..bf4fe4b897a2f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsCreateOrUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsCreateOrUpdateWithResponseMockTests.java @@ -6,11 +6,9 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.Dataset; import com.azure.resourcemanager.datafactory.models.DatasetFolder; @@ -18,7 +16,6 @@ import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; import com.azure.resourcemanager.datafactory.models.ParameterSpecification; import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.Arrays; @@ -26,58 +23,43 @@ import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DatasetsCreateOrUpdateWithResponseMockTests { @Test public void testCreateOrUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"type\":\"Dataset\",\"description\":\"kvqqtf\",\"structure\":\"dataxoxweuoklwt\",\"schema\":\"datacxndheocjcjocun\",\"linkedServiceName\":{\"referenceName\":\"nwutverplh\",\"parameters\":{\"bhltr\":\"datadvdqcohjwzy\",\"wpudvuphizztklk\":\"databhlhyqgfimllrad\",\"mdefkp\":\"datahdeeht\"}},\"parameters\":{\"opcnnpjulpw\":{\"type\":\"String\",\"defaultValue\":\"datavya\"},\"zjazepbjukikd\":{\"type\":\"Array\",\"defaultValue\":\"datawlwcurkf\"}},\"annotations\":[\"datauxmpnugujiw\",\"dataduns\"],\"folder\":{\"name\":\"obanxshltfghy\"},\"\":{\"vjbfgkqudx\":\"dataruqrobknenpybus\",\"kgawnaeoe\":\"datajrndbinqqr\",\"pvaxyi\":\"dataid\"}},\"name\":\"zpfdoetetis\",\"type\":\"xeclwl\",\"etag\":\"drdnfmxomup\",\"id\":\"pcxiv\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"type\":\"gfcbatflrpbg\",\"description\":\"dhnmyyagoebytkg\",\"structure\":\"datavbydmeraegfyr\",\"schema\":\"datalnbdtkojmmcnlsfo\",\"linkedServiceName\":{\"referenceName\":\"rywsavdijbi\",\"parameters\":{\"umz\":\"dataw\",\"nfjdgfmes\":\"datanjxnoqxgfvgpimtn\"}},\"parameters\":{\"d\":{\"type\":\"Object\",\"defaultValue\":\"dataofqbaeoozjncurn\"},\"dw\":{\"type\":\"Float\",\"defaultValue\":\"datalxktseaaha\"}},\"annotations\":[\"dataqwopjn\",\"dataa\"],\"folder\":{\"name\":\"xqpmdojbmxjohu\"},\"\":{\"jtre\":\"datanbi\",\"wiautvehpvlm\":\"datablw\",\"f\":\"databnlmzeqhqfr\"}},\"name\":\"ubtrtaipjifedowc\",\"type\":\"csg\",\"etag\":\"qyx\",\"id\":\"utqnvnet\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - DatasetResource response = manager.datasets().define("jweelkvikig") - .withExistingFactory("zapuunuyokftd", "wezplnzvrh") - .withProperties(new Dataset().withDescription("rkwgsq").withStructure("datarpcxwthkljktujfc") - .withSchema("dataqfryketwrzx") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("miy") - .withParameters(mapOf("yqnjfjyppix", "datag", "lwyet", "dataubkfjt"))) - .withParameters(mapOf("civanlypspnj", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datasp"), - "aixrbwbkrsmkeiun", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataoangrl"), - "tzzmcrmhhfcai", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datamedzfox"))) - .withAnnotations(Arrays.asList("dataiyuzufd", "datasbvygnfljvraqko", "datacozfauhn", "dataxdy")) - .withFolder(new DatasetFolder().withName("grzcjpkzmhaxtwji")) - .withAdditionalProperties(mapOf("type", "Dataset"))) - .withIfMatch("vvagvqrwrchwd").create(); + DatasetResource response = manager.datasets() + .define("btnuqdde") + .withExistingFactory("utixykjlypmvo", "hfnsfbntdnoth") + .withProperties(new Dataset().withDescription("mnsunxfdpulpn") + .withStructure("datayxbbdxnnaeyczbky") + .withSchema("datapnbdzjuq") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("nqyusvgrba") + .withParameters(mapOf("dzhafpbce", "dataxtkuchbn"))) + .withParameters(mapOf("ixsalgzz", + new ParameterSpecification().withType(ParameterType.ARRAY) + .withDefaultValue("dataountffqaehbumoqi"))) + .withAnnotations(Arrays.asList("datakdcagmmegukef")) + .withFolder(new DatasetFolder().withName("g")) + .withAdditionalProperties(mapOf("type", "khq"))) + .withIfMatch("vpvzfvegumsqd") + .create(); - Assertions.assertEquals("pcxiv", response.id()); - Assertions.assertEquals("kvqqtf", response.properties().description()); - Assertions.assertEquals("nwutverplh", response.properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, response.properties().parameters().get("opcnnpjulpw").type()); - Assertions.assertEquals("obanxshltfghy", response.properties().folder().name()); + Assertions.assertEquals("utqnvnet", response.id()); + Assertions.assertEquals("dhnmyyagoebytkg", response.properties().description()); + Assertions.assertEquals("rywsavdijbi", response.properties().linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, response.properties().parameters().get("d").type()); + Assertions.assertEquals("xqpmdojbmxjohu", response.properties().folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsDeleteWithResponseMockTests.java index 8ad0e80132ce5..1c897f9e76f72 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsDeleteWithResponseMockTests.java @@ -6,47 +6,28 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DatasetsDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.datasets().deleteWithResponse("dqfynrdagmihxjpf", "zpuibczlr", "wfrmqbmcmg", - com.azure.core.util.Context.NONE); + manager.datasets().deleteWithResponse("pkkifjtxfdy", "ymijadh", "kapfqj", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsGetWithResponseMockTests.java index 2d461ea0dbbc8..86a9d923545d0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsGetWithResponseMockTests.java @@ -6,56 +6,39 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.DatasetResource; import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DatasetsGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"type\":\"Dataset\",\"description\":\"cwtnzf\",\"structure\":\"dataghnfqwjwwhsf\",\"schema\":\"dataxlbclvp\",\"linkedServiceName\":{\"referenceName\":\"butyrsravsscbls\",\"parameters\":{\"yoilnixwxwaquuvb\":\"datacafgdtuzclfbvv\"}},\"parameters\":{\"zoibip\":{\"type\":\"String\",\"defaultValue\":\"datafeslxwlm\"},\"ufjahu\":{\"type\":\"Bool\",\"defaultValue\":\"datalomidvicd\"},\"klths\":{\"type\":\"Float\",\"defaultValue\":\"databdt\"}},\"annotations\":[\"datanxd\",\"datalovkt\",\"datafdipsshxxo\"],\"folder\":{\"name\":\"dryysv\"},\"\":{\"bfnkj\":\"datazs\",\"pbirltzyuahnlx\":\"datawty\",\"wqmzezf\":\"datadpjssdtysnl\",\"eukqioqhp\":\"datafjjjzcxtzkoloos\"}},\"name\":\"qkkacw\",\"type\":\"qmxkxfmwbrvsl\",\"etag\":\"rlaudemzrp\",\"id\":\"usujbibbgcloknh\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - DatasetResource response = manager.datasets().getWithResponse("kldxc", "qjnnhotwqkgvr", "limzsutmsmdibzv", - "tempsaykcxu", com.azure.core.util.Context.NONE).getValue(); - - Assertions.assertEquals("usujbibbgcloknh", response.id()); - Assertions.assertEquals("cwtnzf", response.properties().description()); - Assertions.assertEquals("butyrsravsscbls", response.properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, response.properties().parameters().get("zoibip").type()); - Assertions.assertEquals("dryysv", response.properties().folder().name()); + = "{\"properties\":{\"type\":\"h\",\"description\":\"vgow\",\"structure\":\"datapqfsshjlpznxjym\",\"schema\":\"datalj\",\"linkedServiceName\":{\"referenceName\":\"groawlta\",\"parameters\":{\"eltrnogfyaimpk\":\"datamrreajp\",\"lwubkmdusnwok\":\"databgrug\",\"inrgqzlw\":\"datauxcmaukirzlfp\"}},\"parameters\":{\"kr\":{\"type\":\"String\",\"defaultValue\":\"datatxcjnwzvlzwy\"}},\"annotations\":[\"dataqgknpgqzmgivkf\"],\"folder\":{\"name\":\"aklxpwhvuihjl\"},\"\":{\"bq\":\"dataqinrfu\",\"jtrxqd\":\"datatyoaccrlydml\",\"ufe\":\"datapmhgqihuqajh\"}},\"name\":\"vy\",\"type\":\"ont\",\"etag\":\"vquwxxouflncah\",\"id\":\"lgnmyioalnkr\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + DatasetResource response = manager.datasets() + .getWithResponse("ewyvwwvkicuvphkh", "zes", "sy", "vjkwpkn", com.azure.core.util.Context.NONE) + .getValue(); + + Assertions.assertEquals("lgnmyioalnkr", response.id()); + Assertions.assertEquals("vgow", response.properties().description()); + Assertions.assertEquals("groawlta", response.properties().linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, response.properties().parameters().get("kr").type()); + Assertions.assertEquals("aklxpwhvuihjl", response.properties().folder().name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsListByFactoryMockTests.java index d37a9986d6776..394fcd4ff1dfc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsListByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsListByFactoryMockTests.java @@ -6,58 +6,41 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.DatasetResource; import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class DatasetsListByFactoryMockTests { @Test public void testListByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"type\":\"Dataset\",\"description\":\"cxlg\",\"structure\":\"dataxoymjx\",\"schema\":\"datantjhvcorobmqudz\",\"linkedServiceName\":{\"referenceName\":\"pzk\",\"parameters\":{\"q\":\"dataazkcyg\"}},\"parameters\":{\"ss\":{\"type\":\"Object\",\"defaultValue\":\"datapgujibkwwyfsqg\"},\"ohpwnrmhlotknb\":{\"type\":\"Object\",\"defaultValue\":\"dataazvrmulsje\"}},\"annotations\":[\"datazqmudmefsxmdmlow\"],\"folder\":{\"name\":\"xpwfvtwgnm\"},\"\":{\"cj\":\"datarxwkomjsfkdvb\"}},\"name\":\"njnwpivfplb\",\"type\":\"qec\",\"etag\":\"wkzuaxsrmadakj\",\"id\":\"puvyvobkk\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"properties\":{\"type\":\"fvevsqvk\",\"description\":\"ytkluewthydgz\",\"structure\":\"databbfun\",\"schema\":\"dataywlvkpxz\",\"linkedServiceName\":{\"referenceName\":\"ioxarwxhpufvuc\",\"parameters\":{\"wyiyqmrcsekhuzi\":\"datamjbzekrw\",\"udmpsuqpraqjscni\":\"datae\",\"kpewtbyciedxs\":\"datapvx\",\"qoaxieu\":\"datayjjhrvembi\"}},\"parameters\":{\"pwbfb\":{\"type\":\"Float\",\"defaultValue\":\"datakhjxgukzz\"},\"sxuzda\":{\"type\":\"SecureString\",\"defaultValue\":\"datarqsipqbyv\"}},\"annotations\":[\"datamilpztwzjkbaudtp\"],\"folder\":{\"name\":\"qkntnvgwgtgxggm\"},\"\":{\"bkazv\":\"dataulqpzqxcygevgjzr\",\"pcleniozqruq\":\"databsbkyfbmwzb\",\"pyymlwallde\":\"datarpwz\"}},\"name\":\"rhgohdv\",\"type\":\"svrpnoxbokmqviv\",\"etag\":\"fbzrfmfadvfkjdwu\",\"id\":\"o\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PagedIterable response - = manager.datasets().listByFactory("srxjfapiodsnz", "ezwjq", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("puvyvobkk", response.iterator().next().id()); - Assertions.assertEquals("cxlg", response.iterator().next().properties().description()); - Assertions.assertEquals("pzk", response.iterator().next().properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, - response.iterator().next().properties().parameters().get("ss").type()); - Assertions.assertEquals("xpwfvtwgnm", response.iterator().next().properties().folder().name()); + = manager.datasets().listByFactory("aqjpsjrpkgvs", "wdcz", com.azure.core.util.Context.NONE); + + Assertions.assertEquals("o", response.iterator().next().id()); + Assertions.assertEquals("ytkluewthydgz", response.iterator().next().properties().description()); + Assertions.assertEquals("ioxarwxhpufvuc", + response.iterator().next().properties().linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, + response.iterator().next().properties().parameters().get("pwbfb").type()); + Assertions.assertEquals("qkntnvgwgtgxggm", response.iterator().next().properties().folder().name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2SourceTests.java index 969cd580c134c..bffa5d319cf0c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2SourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2SourceTests.java @@ -11,15 +11,19 @@ public final class Db2SourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { Db2Source model = BinaryData.fromString( - "{\"type\":\"Db2Source\",\"query\":\"datafmzeu\",\"queryTimeout\":\"dataz\",\"additionalColumns\":\"dataqeqcbygqcwzyto\",\"sourceRetryCount\":\"dataqcthgqyvaoaz\",\"sourceRetryWait\":\"dataykkcqaf\",\"maxConcurrentConnections\":\"datajgixsjhi\",\"disableMetricsCollection\":\"datayekslllzsqolckwh\",\"\":{\"loifxzdohfvxavhf\":\"databnnhwp\",\"qwwzpbamcfr\":\"datal\",\"nrmbcklfpemgfv\":\"dataaytcygoom\"}}") + "{\"type\":\"ehdydyybz\",\"query\":\"datawjmohqzzkplqmca\",\"queryTimeout\":\"datagfpqwwugfw\",\"additionalColumns\":\"datajcewbqaibkyeys\",\"sourceRetryCount\":\"dataylhdxcjqdvci\",\"sourceRetryWait\":\"datazkui\",\"maxConcurrentConnections\":\"datavghvecjhbttmhne\",\"disableMetricsCollection\":\"datarzieyxxidab\",\"\":{\"sfcryqrr\":\"datakknaacseqonl\"}}") .toObject(Db2Source.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Db2Source model = new Db2Source().withSourceRetryCount("dataqcthgqyvaoaz").withSourceRetryWait("dataykkcqaf") - .withMaxConcurrentConnections("datajgixsjhi").withDisableMetricsCollection("datayekslllzsqolckwh") - .withQueryTimeout("dataz").withAdditionalColumns("dataqeqcbygqcwzyto").withQuery("datafmzeu"); + Db2Source model = new Db2Source().withSourceRetryCount("dataylhdxcjqdvci") + .withSourceRetryWait("datazkui") + .withMaxConcurrentConnections("datavghvecjhbttmhne") + .withDisableMetricsCollection("datarzieyxxidab") + .withQueryTimeout("datagfpqwwugfw") + .withAdditionalColumns("datajcewbqaibkyeys") + .withQuery("datawjmohqzzkplqmca"); model = BinaryData.fromObject(model).toObject(Db2Source.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTests.java index e36cef13a64c5..04a24afd58232 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTests.java @@ -19,34 +19,35 @@ public final class Db2TableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { Db2TableDataset model = BinaryData.fromString( - "{\"type\":\"Db2Table\",\"typeProperties\":{\"tableName\":\"dataefeclflxcj\",\"schema\":\"datazwncvdefxonz\",\"table\":\"datacjptnn\"},\"description\":\"rcjqpzj\",\"structure\":\"datapjrrhpgsjbioag\",\"schema\":\"datai\",\"linkedServiceName\":{\"referenceName\":\"ehmdqvaolidxd\",\"parameters\":{\"rsvxphtjnhptj\":\"datavkjcim\",\"yzhimm\":\"datarkd\",\"izuzjd\":\"datadtdtft\",\"kqoyimxpggk\":\"datargyzcslazp\"}},\"parameters\":{\"deylpbyb\":{\"type\":\"Bool\",\"defaultValue\":\"databgacnqpjuytv\"},\"ifm\":{\"type\":\"Int\",\"defaultValue\":\"datab\"},\"au\":{\"type\":\"Array\",\"defaultValue\":\"datapwdj\"},\"aaaxx\":{\"type\":\"Int\",\"defaultValue\":\"datahznurttu\"}},\"annotations\":[\"datajmdkqtxfrm\",\"dataecxstowa\",\"dataehxuihwes\"],\"folder\":{\"name\":\"aqgblkkncyp\"},\"\":{\"piobnhrfbrjokjwq\":\"datavspsaneyvae\",\"zwfwlrfdjwlzseod\":\"datamraqnilppqcaig\",\"zy\":\"dataqfdrs\"}}") + "{\"type\":\"alknuyapvibzic\",\"typeProperties\":{\"tableName\":\"datarlcck\",\"schema\":\"datafpjmspau\",\"table\":\"dataqom\"},\"description\":\"ichca\",\"structure\":\"datakhjxnrkbn\",\"schema\":\"dataccklzhzn\",\"linkedServiceName\":{\"referenceName\":\"gvlxyx\",\"parameters\":{\"dprtqjytdcfe\":\"datatigpksywicklktg\",\"gmlamoaxc\":\"datauhbdwbvjs\"}},\"parameters\":{\"cw\":{\"type\":\"Int\",\"defaultValue\":\"datakvbpbl\"},\"vlryszfh\":{\"type\":\"Array\",\"defaultValue\":\"dataheeocnqoubve\"},\"zhh\":{\"type\":\"Float\",\"defaultValue\":\"datafhxohz\"}},\"annotations\":[\"datafuxnvkdslcofuvtf\"],\"folder\":{\"name\":\"ouisakl\"},\"\":{\"ub\":\"dataddxqfus\",\"p\":\"datasspmjvailfauyv\",\"wvoglff\":\"datawlkqdgwbztrthl\"}}") .toObject(Db2TableDataset.class); - Assertions.assertEquals("rcjqpzj", model.description()); - Assertions.assertEquals("ehmdqvaolidxd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("deylpbyb").type()); - Assertions.assertEquals("aqgblkkncyp", model.folder().name()); + Assertions.assertEquals("ichca", model.description()); + Assertions.assertEquals("gvlxyx", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("cw").type()); + Assertions.assertEquals("ouisakl", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Db2TableDataset model - = new Db2TableDataset().withDescription("rcjqpzj").withStructure("datapjrrhpgsjbioag").withSchema("datai") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ehmdqvaolidxd") - .withParameters(mapOf("rsvxphtjnhptj", "datavkjcim", "yzhimm", "datarkd", "izuzjd", "datadtdtft", - "kqoyimxpggk", "datargyzcslazp"))) - .withParameters(mapOf("deylpbyb", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("databgacnqpjuytv"), - "ifm", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datab"), "au", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datapwdj"), "aaaxx", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datahznurttu"))) - .withAnnotations(Arrays.asList("datajmdkqtxfrm", "dataecxstowa", "dataehxuihwes")) - .withFolder(new DatasetFolder().withName("aqgblkkncyp")).withTableName("dataefeclflxcj") - .withSchemaTypePropertiesSchema("datazwncvdefxonz").withTable("datacjptnn"); + Db2TableDataset model = new Db2TableDataset().withDescription("ichca") + .withStructure("datakhjxnrkbn") + .withSchema("dataccklzhzn") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("gvlxyx") + .withParameters(mapOf("dprtqjytdcfe", "datatigpksywicklktg", "gmlamoaxc", "datauhbdwbvjs"))) + .withParameters(mapOf("cw", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datakvbpbl"), "vlryszfh", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataheeocnqoubve"), "zhh", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datafhxohz"))) + .withAnnotations(Arrays.asList("datafuxnvkdslcofuvtf")) + .withFolder(new DatasetFolder().withName("ouisakl")) + .withTableName("datarlcck") + .withSchemaTypePropertiesSchema("datafpjmspau") + .withTable("dataqom"); model = BinaryData.fromObject(model).toObject(Db2TableDataset.class); - Assertions.assertEquals("rcjqpzj", model.description()); - Assertions.assertEquals("ehmdqvaolidxd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("deylpbyb").type()); - Assertions.assertEquals("aqgblkkncyp", model.folder().name()); + Assertions.assertEquals("ichca", model.description()); + Assertions.assertEquals("gvlxyx", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("cw").type()); + Assertions.assertEquals("ouisakl", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTypePropertiesTests.java index 270bc3a267cb8..a5550284607ca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTypePropertiesTests.java @@ -10,15 +10,16 @@ public final class Db2TableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - Db2TableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"dataxserw\",\"schema\":\"datauhytjwgetfi\",\"table\":\"datan\"}") - .toObject(Db2TableDatasetTypeProperties.class); + Db2TableDatasetTypeProperties model = BinaryData.fromString( + "{\"tableName\":\"datahgslormhbtofcvx\",\"schema\":\"datalhcnsdylmnqunk\",\"table\":\"databptmsgkwedwlxtzh\"}") + .toObject(Db2TableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Db2TableDatasetTypeProperties model = new Db2TableDatasetTypeProperties().withTableName("dataxserw") - .withSchema("datauhytjwgetfi").withTable("datan"); + Db2TableDatasetTypeProperties model = new Db2TableDatasetTypeProperties().withTableName("datahgslormhbtofcvx") + .withSchema("datalhcnsdylmnqunk") + .withTable("databptmsgkwedwlxtzh"); model = BinaryData.fromObject(model).toObject(Db2TableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTests.java index 5431b1ba17466..11cc1a61e5ad4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTests.java @@ -25,70 +25,76 @@ public final class DeleteActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DeleteActivity model = BinaryData.fromString( - "{\"type\":\"Delete\",\"typeProperties\":{\"recursive\":\"datacrsbrhkdemaxoaj\",\"maxConcurrentConnections\":756643616,\"enableLogging\":\"dataachsojgageyxa\",\"logStorageSettings\":{\"linkedServiceName\":{\"referenceName\":\"kzkzp\",\"parameters\":{\"ml\":\"databmgfxwynzbe\"}},\"path\":\"datatgbgcmut\",\"logLevel\":\"datawd\",\"enableReliableLogging\":\"datapuufuh\",\"\":{\"qctqv\":\"datamfdrxyej\"}},\"dataset\":{\"referenceName\":\"ahiiat\",\"parameters\":{\"fchnhjsa\":\"datapoxogvpsm\",\"yikgmlplqgpskyn\":\"datapwx\",\"zk\":\"datak\",\"blgvezhi\":\"datanlvwtsl\"}},\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datayqwlxk\",\"disableMetricsCollection\":\"dataysyutnredzkovt\",\"\":{\"xmyblway\":\"datacaprxh\",\"wyfy\":\"datapaggkrumpu\",\"jdrvjktvpy\":\"datahcboipxhghicwv\"}}},\"linkedServiceName\":{\"referenceName\":\"ooytilsmise\",\"parameters\":{\"qu\":\"datajmm\",\"zh\":\"datacrzgluqacebcn\",\"wkgcpfz\":\"dataaumjuruspflvgl\"}},\"policy\":{\"timeout\":\"databrqgwvx\",\"retry\":\"dataqmvsr\",\"retryIntervalInSeconds\":1080614288,\"secureInput\":false,\"secureOutput\":false,\"\":{\"uoyownygbra\":\"dataob\"}},\"name\":\"cwhebyczweg\",\"description\":\"dp\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"ksqifrgm\",\"dependencyConditions\":[\"Completed\"],\"\":{\"l\":\"datardglecmeg\",\"ryhztwxuizakejo\":\"datadlt\",\"gqezgbqiiweoa\":\"datajnlxjhrzgnfqq\"}},{\"activity\":\"eokrarzkzatznvye\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Skipped\",\"Completed\"],\"\":{\"ukmxgslzbpnlfz\":\"dataz\",\"uotwzes\":\"datawm\"}}],\"userProperties\":[{\"name\":\"cpcpeur\",\"value\":\"dataofzmvt\"}],\"\":{\"h\":\"datacbjrptltytbqhe\",\"slxzwvygquiwcfq\":\"datawjlbygqfmeeuuurx\",\"wwdev\":\"dataob\"}}") + "{\"type\":\"forujfluomaltvv\",\"typeProperties\":{\"recursive\":\"datavsz\",\"maxConcurrentConnections\":1886602185,\"enableLogging\":\"datazbxqfmhypwg\",\"logStorageSettings\":{\"linkedServiceName\":{\"referenceName\":\"vspbdju\",\"parameters\":{\"pswewgdawhzdhszk\":\"datafnlqnrdsmr\",\"zmyvdabgctm\":\"datadlilkw\",\"kgrubs\":\"datantlrfxnok\"}},\"path\":\"dataznhdkxh\",\"logLevel\":\"datainjer\",\"enableReliableLogging\":\"dataurchn\",\"\":{\"rqhqqkhzpwsa\":\"dataelsvosvqjthluo\",\"vxwnmiumduwpq\":\"datawsentrcdz\",\"fbgkyonadtywzrnx\":\"datadduvxmrbbgli\",\"ygjbcfprioab\":\"dataktokiptxmdad\"}},\"dataset\":{\"referenceName\":\"xwid\",\"parameters\":{\"xtsywrmmhaxmo\":\"dataonnolrs\",\"armnseigoalxwuqu\":\"datauotexlpqydgfzet\",\"ghs\":\"datazrskdovgkpqzzrx\"}},\"storeSettings\":{\"type\":\"ogimihxyxe\",\"maxConcurrentConnections\":\"dataawixdcytdqami\",\"disableMetricsCollection\":\"datab\",\"\":{\"dzoauvwjkgpzco\":\"databca\",\"aqxztywzaq\":\"datawcnnzacqludq\",\"zlzpowsefpg\":\"datafqtstmyfebb\",\"pzbsytwt\":\"dataw\"}}},\"linkedServiceName\":{\"referenceName\":\"mafqsnwupuuby\",\"parameters\":{\"fnvxcomp\":\"datajyyngwzqyhrxoek\",\"pzbyudkoa\":\"datagnsmh\"}},\"policy\":{\"timeout\":\"dataaaocjlw\",\"retry\":\"datauwcrextdy\",\"retryIntervalInSeconds\":1718647080,\"secureInput\":false,\"secureOutput\":false,\"\":{\"xrhveyngzjxjbk\":\"datam\",\"fqimjldeluq\":\"datatahguvqghuehgcqh\"}},\"name\":\"udhtdapkdahy\",\"description\":\"tixrkjogyqrmt\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"ibyfy\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Skipped\"],\"\":{\"al\":\"datauqylmlunquvl\",\"ztlxfgynq\":\"dataw\",\"fq\":\"datadnaidacsku\",\"rcrknnruceuwfmrc\":\"dataxzdlfswubjvs\"}}],\"userProperties\":[{\"name\":\"nj\",\"value\":\"datak\"},{\"name\":\"zhtovs\",\"value\":\"dataieoth\"}],\"\":{\"kdkd\":\"datakprv\",\"pcghcf\":\"datadsmavtndgfmtxim\",\"erybdiajeeahweru\":\"dataduqefdtpur\"}}") .toObject(DeleteActivity.class); - Assertions.assertEquals("cwhebyczweg", model.name()); - Assertions.assertEquals("dp", model.description()); + Assertions.assertEquals("udhtdapkdahy", model.name()); + Assertions.assertEquals("tixrkjogyqrmt", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("ksqifrgm", model.dependsOn().get(0).activity()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("ibyfy", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("cpcpeur", model.userProperties().get(0).name()); - Assertions.assertEquals("ooytilsmise", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1080614288, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("nj", model.userProperties().get(0).name()); + Assertions.assertEquals("mafqsnwupuuby", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1718647080, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals(756643616, model.maxConcurrentConnections()); - Assertions.assertEquals("kzkzp", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("ahiiat", model.dataset().referenceName()); + Assertions.assertEquals(1886602185, model.maxConcurrentConnections()); + Assertions.assertEquals("vspbdju", model.logStorageSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("xwid", model.dataset().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DeleteActivity model - = new DeleteActivity().withName("cwhebyczweg").withDescription("dp").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("ksqifrgm") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("eokrarzkzatznvye") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("cpcpeur").withValue("dataofzmvt"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ooytilsmise") - .withParameters(mapOf("qu", "datajmm", "zh", "datacrzgluqacebcn", "wkgcpfz", "dataaumjuruspflvgl"))) - .withPolicy(new ActivityPolicy().withTimeout("databrqgwvx").withRetry("dataqmvsr") - .withRetryIntervalInSeconds(1080614288).withSecureInput(false).withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withRecursive("datacrsbrhkdemaxoaj").withMaxConcurrentConnections(756643616) - .withEnableLogging("dataachsojgageyxa") - .withLogStorageSettings(new LogStorageSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("kzkzp") - .withParameters(mapOf("ml", "databmgfxwynzbe"))) - .withPath("datatgbgcmut").withLogLevel("datawd").withEnableReliableLogging("datapuufuh") - .withAdditionalProperties(mapOf())) - .withDataset(new DatasetReference().withReferenceName("ahiiat") - .withParameters(mapOf("fchnhjsa", "datapoxogvpsm", "yikgmlplqgpskyn", "datapwx", "zk", "datak", - "blgvezhi", "datanlvwtsl"))) - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datayqwlxk") - .withDisableMetricsCollection("dataysyutnredzkovt") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))); + DeleteActivity model = new DeleteActivity().withName("udhtdapkdahy") + .withDescription("tixrkjogyqrmt") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("ibyfy") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED, + DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("nj").withValue("datak"), + new UserProperty().withName("zhtovs").withValue("dataieoth"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("mafqsnwupuuby") + .withParameters(mapOf("fnvxcomp", "datajyyngwzqyhrxoek", "pzbyudkoa", "datagnsmh"))) + .withPolicy(new ActivityPolicy().withTimeout("dataaaocjlw") + .withRetry("datauwcrextdy") + .withRetryIntervalInSeconds(1718647080) + .withSecureInput(false) + .withSecureOutput(false) + .withAdditionalProperties(mapOf())) + .withRecursive("datavsz") + .withMaxConcurrentConnections(1886602185) + .withEnableLogging("datazbxqfmhypwg") + .withLogStorageSettings(new LogStorageSettings() + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vspbdju") + .withParameters(mapOf("pswewgdawhzdhszk", "datafnlqnrdsmr", "zmyvdabgctm", "datadlilkw", "kgrubs", + "datantlrfxnok"))) + .withPath("dataznhdkxh") + .withLogLevel("datainjer") + .withEnableReliableLogging("dataurchn") + .withAdditionalProperties(mapOf())) + .withDataset(new DatasetReference().withReferenceName("xwid") + .withParameters(mapOf("xtsywrmmhaxmo", "dataonnolrs", "armnseigoalxwuqu", "datauotexlpqydgfzet", "ghs", + "datazrskdovgkpqzzrx"))) + .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataawixdcytdqami") + .withDisableMetricsCollection("datab") + .withAdditionalProperties(mapOf("type", "ogimihxyxe"))); model = BinaryData.fromObject(model).toObject(DeleteActivity.class); - Assertions.assertEquals("cwhebyczweg", model.name()); - Assertions.assertEquals("dp", model.description()); + Assertions.assertEquals("udhtdapkdahy", model.name()); + Assertions.assertEquals("tixrkjogyqrmt", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("ksqifrgm", model.dependsOn().get(0).activity()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("ibyfy", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("cpcpeur", model.userProperties().get(0).name()); - Assertions.assertEquals("ooytilsmise", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1080614288, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("nj", model.userProperties().get(0).name()); + Assertions.assertEquals("mafqsnwupuuby", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1718647080, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals(756643616, model.maxConcurrentConnections()); - Assertions.assertEquals("kzkzp", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("ahiiat", model.dataset().referenceName()); + Assertions.assertEquals(1886602185, model.maxConcurrentConnections()); + Assertions.assertEquals("vspbdju", model.logStorageSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("xwid", model.dataset().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTypePropertiesTests.java index d14a1d27423d3..545f979342edd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTypePropertiesTests.java @@ -18,30 +18,34 @@ public final class DeleteActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DeleteActivityTypeProperties model = BinaryData.fromString( - "{\"recursive\":\"datatejhvggykirqks\",\"maxConcurrentConnections\":1105166320,\"enableLogging\":\"datammmimsyiwc\",\"logStorageSettings\":{\"linkedServiceName\":{\"referenceName\":\"qjb\",\"parameters\":{\"fkwdtsbjmchsefe\":\"datamlmibvczdjkonlv\",\"n\":\"datajyfaqdwfa\"}},\"path\":\"dataetslxerhwlvh\",\"logLevel\":\"dataxxgeladqziljrsyc\",\"enableReliableLogging\":\"datansznjs\",\"\":{\"uyyqsonfxsf\":\"datajupukhxpi\",\"ytedspkduhz\":\"dataedjnxicufxt\",\"ttesqpjcuuy\":\"datavbgcf\",\"bahtlopbns\":\"datatuindpmrijncaqgt\"}},\"dataset\":{\"referenceName\":\"jzrnjcagagmgulln\",\"parameters\":{\"bqzwutakbv\":\"datanoxrmabbetzc\",\"cdue\":\"dataqguaubmcwpllojd\",\"eajrcrbkwc\":\"datatddigmmj\"}},\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"dataxguihmjcemk\",\"disableMetricsCollection\":\"datacbvh\",\"\":{\"mzntroafz\":\"dataiafzwhr\",\"ogfo\":\"dataq\",\"ojjbky\":\"datarryzbqpksoaxszuh\"}}}") + "{\"recursive\":\"dataoeyyx\",\"maxConcurrentConnections\":2063070863,\"enableLogging\":\"datakkglahdwxyit\",\"logStorageSettings\":{\"linkedServiceName\":{\"referenceName\":\"foek\",\"parameters\":{\"gfy\":\"datajvtfzaqno\",\"s\":\"datafohuxpfxkjrhgw\"}},\"path\":\"dataewkkqvkuzifsg\",\"logLevel\":\"datalfk\",\"enableReliableLogging\":\"datamwezsirhpigq\",\"\":{\"ywbssli\":\"datadrcj\",\"qqeslnaoxke\":\"datahcpuddbzxi\",\"fzyxamyjhp\":\"datautrlzzztg\",\"ily\":\"datazuvsjblqmddtp\"}},\"dataset\":{\"referenceName\":\"joboqts\",\"parameters\":{\"uvcfmtmmpvoa\":\"datauywg\"}},\"storeSettings\":{\"type\":\"lxgtu\",\"maxConcurrentConnections\":\"datajgtsk\",\"disableMetricsCollection\":\"databjylo\",\"\":{\"qsubzi\":\"datacbqocerbwa\",\"nhesw\":\"dataelphauldalspe\",\"vgviycjulun\":\"datallqyvblfprskxhg\",\"bnjpivoizxkh\":\"datatuficip\"}}}") .toObject(DeleteActivityTypeProperties.class); - Assertions.assertEquals(1105166320, model.maxConcurrentConnections()); - Assertions.assertEquals("qjb", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("jzrnjcagagmgulln", model.dataset().referenceName()); + Assertions.assertEquals(2063070863, model.maxConcurrentConnections()); + Assertions.assertEquals("foek", model.logStorageSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("joboqts", model.dataset().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DeleteActivityTypeProperties model = new DeleteActivityTypeProperties().withRecursive("datatejhvggykirqks") - .withMaxConcurrentConnections(1105166320).withEnableLogging("datammmimsyiwc") + DeleteActivityTypeProperties model = new DeleteActivityTypeProperties().withRecursive("dataoeyyx") + .withMaxConcurrentConnections(2063070863) + .withEnableLogging("datakkglahdwxyit") .withLogStorageSettings(new LogStorageSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("qjb") - .withParameters(mapOf("fkwdtsbjmchsefe", "datamlmibvczdjkonlv", "n", "datajyfaqdwfa"))) - .withPath("dataetslxerhwlvh").withLogLevel("dataxxgeladqziljrsyc") - .withEnableReliableLogging("datansznjs").withAdditionalProperties(mapOf())) - .withDataset(new DatasetReference().withReferenceName("jzrnjcagagmgulln").withParameters( - mapOf("bqzwutakbv", "datanoxrmabbetzc", "cdue", "dataqguaubmcwpllojd", "eajrcrbkwc", "datatddigmmj"))) - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataxguihmjcemk") - .withDisableMetricsCollection("datacbvh").withAdditionalProperties(mapOf("type", "StoreReadSettings"))); + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("foek") + .withParameters(mapOf("gfy", "datajvtfzaqno", "s", "datafohuxpfxkjrhgw"))) + .withPath("dataewkkqvkuzifsg") + .withLogLevel("datalfk") + .withEnableReliableLogging("datamwezsirhpigq") + .withAdditionalProperties(mapOf())) + .withDataset( + new DatasetReference().withReferenceName("joboqts").withParameters(mapOf("uvcfmtmmpvoa", "datauywg"))) + .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datajgtsk") + .withDisableMetricsCollection("databjylo") + .withAdditionalProperties(mapOf("type", "lxgtu"))); model = BinaryData.fromObject(model).toObject(DeleteActivityTypeProperties.class); - Assertions.assertEquals(1105166320, model.maxConcurrentConnections()); - Assertions.assertEquals("qjb", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("jzrnjcagagmgulln", model.dataset().referenceName()); + Assertions.assertEquals(2063070863, model.maxConcurrentConnections()); + Assertions.assertEquals("foek", model.logStorageSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("joboqts", model.dataset().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteDataFlowDebugSessionRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteDataFlowDebugSessionRequestTests.java index 6416b5ab534c0..3068221d0b189 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteDataFlowDebugSessionRequestTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteDataFlowDebugSessionRequestTests.java @@ -12,14 +12,14 @@ public final class DeleteDataFlowDebugSessionRequestTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DeleteDataFlowDebugSessionRequest model - = BinaryData.fromString("{\"sessionId\":\"pnwjfujq\"}").toObject(DeleteDataFlowDebugSessionRequest.class); - Assertions.assertEquals("pnwjfujq", model.sessionId()); + = BinaryData.fromString("{\"sessionId\":\"oucs\"}").toObject(DeleteDataFlowDebugSessionRequest.class); + Assertions.assertEquals("oucs", model.sessionId()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DeleteDataFlowDebugSessionRequest model = new DeleteDataFlowDebugSessionRequest().withSessionId("pnwjfujq"); + DeleteDataFlowDebugSessionRequest model = new DeleteDataFlowDebugSessionRequest().withSessionId("oucs"); model = BinaryData.fromObject(model).toObject(DeleteDataFlowDebugSessionRequest.class); - Assertions.assertEquals("pnwjfujq", model.sessionId()); + Assertions.assertEquals("oucs", model.sessionId()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextReadSettingsTests.java index 5fa156e4c2f02..ae87972d06bf8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextReadSettingsTests.java @@ -14,15 +14,15 @@ public final class DelimitedTextReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DelimitedTextReadSettings model = BinaryData.fromString( - "{\"type\":\"DelimitedTextReadSettings\",\"skipLineCount\":\"dataaexrzxvffqc\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"qdibygrqcid\":\"datahrztbyulkoepss\",\"pftrdicstrbq\":\"datawkxikxiqxlxoksy\",\"katccetyyv\":\"dataatkliopgw\"}},\"\":{\"nsdp\":\"dataobb\",\"lzk\":\"datairt\"}}") + "{\"type\":\"nxhq\",\"skipLineCount\":\"datamnswxq\",\"compressionProperties\":{\"type\":\"ffcanvr\",\"\":{\"hsz\":\"datalgxmgghut\",\"gpbi\":\"dataljyogcpwn\"}},\"\":{\"qdikuvjclspdh\":\"datamfo\",\"awtpwnk\":\"datahkflwnlp\",\"ozfygvsfafcar\":\"datawxlplqni\"}}") .toObject(DelimitedTextReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DelimitedTextReadSettings model - = new DelimitedTextReadSettings().withSkipLineCount("dataaexrzxvffqc").withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings"))); + DelimitedTextReadSettings model = new DelimitedTextReadSettings().withSkipLineCount("datamnswxq") + .withCompressionProperties( + new CompressionReadSettings().withAdditionalProperties(mapOf("type", "ffcanvr"))); model = BinaryData.fromObject(model).toObject(DelimitedTextReadSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSinkTests.java index bae008696c7b4..68d3cc623da20 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSinkTests.java @@ -17,25 +17,30 @@ public final class DelimitedTextSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DelimitedTextSink model = BinaryData.fromString( - "{\"type\":\"DelimitedTextSink\",\"storeSettings\":{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"datakqah\",\"disableMetricsCollection\":\"datagnapkpaiedo\",\"copyBehavior\":\"dataoaoavezwc\",\"metadata\":[{\"name\":\"datalrvlg\",\"value\":\"datarcdiqhvhcbukaw\"},{\"name\":\"datazfjtockgqaawyys\",\"value\":\"dataoeql\"},{\"name\":\"dataw\",\"value\":\"datarfyww\"},{\"name\":\"dataipkhqhvktcztm\",\"value\":\"datak\"}],\"\":{\"bxxxqfrn\":\"dataspkgxemvlyaprj\"}},\"formatSettings\":{\"type\":\"DelimitedTextWriteSettings\",\"quoteAllText\":\"datahmx\",\"fileExtension\":\"datapxfauvgtoino\",\"maxRowsPerFile\":\"datamyvvfapfbmrwhkne\",\"fileNamePrefix\":\"dataoo\",\"\":{\"gupiosibg\":\"datadspdd\",\"skyrttnrikss\":\"datalaxuybxjwny\"}},\"writeBatchSize\":\"dataiksjpkign\",\"writeBatchTimeout\":\"datao\",\"sinkRetryCount\":\"datauivipbfsxpslp\",\"sinkRetryWait\":\"datazpqydnokkkg\",\"maxConcurrentConnections\":\"datacjyft\",\"disableMetricsCollection\":\"dataeftzetjclaqx\",\"\":{\"agpdsuyywnaqgo\":\"dataq\",\"zwlhufsgc\":\"dataxuhhvgddfzcny\",\"gtdeg\":\"datawr\"}}") + "{\"type\":\"qaq\",\"storeSettings\":{\"type\":\"johwwtlerhpf\",\"maxConcurrentConnections\":\"datarqnjush\",\"disableMetricsCollection\":\"datahtvnqcmrrgmlwgo\",\"copyBehavior\":\"datascsddlcnwbi\",\"metadata\":[{\"name\":\"datangeffrghwd\",\"value\":\"dataowhrrguvdrggucw\"},{\"name\":\"datalmsikl\",\"value\":\"datam\"},{\"name\":\"datacphgimi\",\"value\":\"datahrbs\"}],\"\":{\"qfbgeblp\":\"dataublouelf\",\"swgfjrg\":\"datawckmnpzubzq\"}},\"formatSettings\":{\"type\":\"uop\",\"quoteAllText\":\"dataklfswzsyigxsyxhy\",\"fileExtension\":\"dataczab\",\"maxRowsPerFile\":\"dataeu\",\"fileNamePrefix\":\"datazf\",\"\":{\"sspcutgkm\":\"dataa\"}},\"writeBatchSize\":\"datalnhgisiwzzk\",\"writeBatchTimeout\":\"dataqrngl\",\"sinkRetryCount\":\"datatu\",\"sinkRetryWait\":\"datafwdkpadktsyy\",\"maxConcurrentConnections\":\"dataojrfqtfk\",\"disableMetricsCollection\":\"dataupmdajqpdvvzb\",\"\":{\"mvtqhn\":\"dataxokiffqpwdyzset\",\"cprkqywyb\":\"dataoij\"}}") .toObject(DelimitedTextSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DelimitedTextSink model = new DelimitedTextSink().withWriteBatchSize("dataiksjpkign") - .withWriteBatchTimeout("datao").withSinkRetryCount("datauivipbfsxpslp").withSinkRetryWait("datazpqydnokkkg") - .withMaxConcurrentConnections("datacjyft").withDisableMetricsCollection("dataeftzetjclaqx") - .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("datakqah") - .withDisableMetricsCollection("datagnapkpaiedo").withCopyBehavior("dataoaoavezwc") - .withMetadata(Arrays.asList(new MetadataItem().withName("datalrvlg").withValue("datarcdiqhvhcbukaw"), - new MetadataItem().withName("datazfjtockgqaawyys").withValue("dataoeql"), - new MetadataItem().withName("dataw").withValue("datarfyww"), - new MetadataItem().withName("dataipkhqhvktcztm").withValue("datak"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings"))) - .withFormatSettings( - new DelimitedTextWriteSettings().withQuoteAllText("datahmx").withFileExtension("datapxfauvgtoino") - .withMaxRowsPerFile("datamyvvfapfbmrwhkne").withFileNamePrefix("dataoo")); + DelimitedTextSink model = new DelimitedTextSink().withWriteBatchSize("datalnhgisiwzzk") + .withWriteBatchTimeout("dataqrngl") + .withSinkRetryCount("datatu") + .withSinkRetryWait("datafwdkpadktsyy") + .withMaxConcurrentConnections("dataojrfqtfk") + .withDisableMetricsCollection("dataupmdajqpdvvzb") + .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("datarqnjush") + .withDisableMetricsCollection("datahtvnqcmrrgmlwgo") + .withCopyBehavior("datascsddlcnwbi") + .withMetadata( + Arrays.asList(new MetadataItem().withName("datangeffrghwd").withValue("dataowhrrguvdrggucw"), + new MetadataItem().withName("datalmsikl").withValue("datam"), + new MetadataItem().withName("datacphgimi").withValue("datahrbs"))) + .withAdditionalProperties(mapOf("type", "johwwtlerhpf"))) + .withFormatSettings(new DelimitedTextWriteSettings().withQuoteAllText("dataklfswzsyigxsyxhy") + .withFileExtension("dataczab") + .withMaxRowsPerFile("dataeu") + .withFileNamePrefix("datazf")); model = BinaryData.fromObject(model).toObject(DelimitedTextSink.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSourceTests.java index 08fca22a55010..8e563d3f1295d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSourceTests.java @@ -16,21 +16,23 @@ public final class DelimitedTextSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DelimitedTextSource model = BinaryData.fromString( - "{\"type\":\"DelimitedTextSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datamg\",\"disableMetricsCollection\":\"databqxlsam\",\"\":{\"gvmowyzxqhuhmldh\":\"dataqhwsojnbb\"}},\"formatSettings\":{\"type\":\"DelimitedTextReadSettings\",\"skipLineCount\":\"datackfu\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"dztqhqhwpuaermaw\":\"datao\",\"otwfu\":\"datalnsdazqcem\",\"dfszxbups\":\"datathisxzhik\"}},\"\":{\"tajwkrxmz\":\"datajpgxcgqkhy\"}},\"additionalColumns\":\"datawfn\",\"sourceRetryCount\":\"datalwovkzbomjby\",\"sourceRetryWait\":\"dataprkbzraljwfnc\",\"maxConcurrentConnections\":\"dataaylcpgzmx\",\"disableMetricsCollection\":\"datappqajdm\",\"\":{\"iminetbzfwfuxdt\":\"datantqqguhvnwr\",\"xxquyfft\":\"datajcsqkedlc\"}}") + "{\"type\":\"hoikemhohxab\",\"storeSettings\":{\"type\":\"ubhmiuxypvua\",\"maxConcurrentConnections\":\"datajtbnek\",\"disableMetricsCollection\":\"datazbfbuqeq\",\"\":{\"pp\":\"datazarr\"}},\"formatSettings\":{\"type\":\"ycxpzattaloo\",\"skipLineCount\":\"datayqnrjd\",\"compressionProperties\":{\"type\":\"arvzewo\",\"\":{\"romeawthycbigpi\":\"datapzxkjqecjf\",\"sulmuwlawa\":\"datatpxhzjnpa\",\"a\":\"dataheoxxqg\"}},\"\":{\"zslrprft\":\"datamxwt\"}},\"additionalColumns\":\"datafvouy\",\"sourceRetryCount\":\"dataoowpoogoz\",\"sourceRetryWait\":\"datacc\",\"maxConcurrentConnections\":\"datap\",\"disableMetricsCollection\":\"datakg\",\"\":{\"jkajlogvfnwq\":\"datajeqqjqa\"}}") .toObject(DelimitedTextSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DelimitedTextSource model = new DelimitedTextSource().withSourceRetryCount("datalwovkzbomjby") - .withSourceRetryWait("dataprkbzraljwfnc").withMaxConcurrentConnections("dataaylcpgzmx") - .withDisableMetricsCollection("datappqajdm") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datamg") - .withDisableMetricsCollection("databqxlsam") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withFormatSettings(new DelimitedTextReadSettings().withSkipLineCount("datackfu").withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings")))) - .withAdditionalColumns("datawfn"); + DelimitedTextSource model = new DelimitedTextSource().withSourceRetryCount("dataoowpoogoz") + .withSourceRetryWait("datacc") + .withMaxConcurrentConnections("datap") + .withDisableMetricsCollection("datakg") + .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datajtbnek") + .withDisableMetricsCollection("datazbfbuqeq") + .withAdditionalProperties(mapOf("type", "ubhmiuxypvua"))) + .withFormatSettings(new DelimitedTextReadSettings().withSkipLineCount("datayqnrjd") + .withCompressionProperties( + new CompressionReadSettings().withAdditionalProperties(mapOf("type", "arvzewo")))) + .withAdditionalColumns("datafvouy"); model = BinaryData.fromObject(model).toObject(DelimitedTextSource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextWriteSettingsTests.java index b9312800c7973..6aff7e7a919d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextWriteSettingsTests.java @@ -11,15 +11,16 @@ public final class DelimitedTextWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DelimitedTextWriteSettings model = BinaryData.fromString( - "{\"type\":\"DelimitedTextWriteSettings\",\"quoteAllText\":\"datatetfdpu\",\"fileExtension\":\"datavzdtjbesfumed\",\"maxRowsPerFile\":\"dataafkvqhb\",\"fileNamePrefix\":\"datavivqkyaghfvub\",\"\":{\"bjrsdiufqxrl\":\"datasveguxaxijpny\",\"etidne\":\"dataijosuzegmcmlzm\",\"avpqyha\":\"datarvjgwnmxcngow\"}}") + "{\"type\":\"wuzanpoyrqjoniln\",\"quoteAllText\":\"dataduxbungmpn\",\"fileExtension\":\"dataytgu\",\"maxRowsPerFile\":\"datafxgl\",\"fileNamePrefix\":\"datajugoazzyz\",\"\":{\"dcloqpgzdbo\":\"dataest\"}}") .toObject(DelimitedTextWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DelimitedTextWriteSettings model - = new DelimitedTextWriteSettings().withQuoteAllText("datatetfdpu").withFileExtension("datavzdtjbesfumed") - .withMaxRowsPerFile("dataafkvqhb").withFileNamePrefix("datavivqkyaghfvub"); + DelimitedTextWriteSettings model = new DelimitedTextWriteSettings().withQuoteAllText("dataduxbungmpn") + .withFileExtension("dataytgu") + .withMaxRowsPerFile("datafxgl") + .withFileNamePrefix("datajugoazzyz"); model = BinaryData.fromObject(model).toObject(DelimitedTextWriteSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DependencyReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DependencyReferenceTests.java index 9872370dba56f..cc68d6ef1359a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DependencyReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DependencyReferenceTests.java @@ -10,8 +10,7 @@ public final class DependencyReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - DependencyReference model - = BinaryData.fromString("{\"type\":\"DependencyReference\"}").toObject(DependencyReference.class); + DependencyReference model = BinaryData.fromString("{\"type\":\"rttshr\"}").toObject(DependencyReference.class); } @org.junit.jupiter.api.Test diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DistcpSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DistcpSettingsTests.java index 77cb68ac920a9..382593a756ecb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DistcpSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DistcpSettingsTests.java @@ -11,14 +11,15 @@ public final class DistcpSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DistcpSettings model = BinaryData.fromString( - "{\"resourceManagerEndpoint\":\"datawcp\",\"tempScriptPath\":\"dataaosk\",\"distcpOptions\":\"dataalljsoasxjjklm\"}") + "{\"resourceManagerEndpoint\":\"dataapbdbicjzntiblx\",\"tempScriptPath\":\"dataygosuhroicjt\",\"distcpOptions\":\"datadymoanpkcmdixiux\"}") .toObject(DistcpSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DistcpSettings model = new DistcpSettings().withResourceManagerEndpoint("datawcp") - .withTempScriptPath("dataaosk").withDistcpOptions("dataalljsoasxjjklm"); + DistcpSettings model = new DistcpSettings().withResourceManagerEndpoint("dataapbdbicjzntiblx") + .withTempScriptPath("dataygosuhroicjt") + .withDistcpOptions("datadymoanpkcmdixiux"); model = BinaryData.fromObject(model).toObject(DistcpSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTests.java index 68f470ddd245e..a1c452c1dd8d1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTests.java @@ -19,35 +19,35 @@ public final class DocumentDbCollectionDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DocumentDbCollectionDataset model = BinaryData.fromString( - "{\"type\":\"DocumentDbCollection\",\"typeProperties\":{\"collectionName\":\"datawxdowum\"},\"description\":\"ukrcdiohcl\",\"structure\":\"datadnhfknebwedd\",\"schema\":\"datayzcwy\",\"linkedServiceName\":{\"referenceName\":\"smkaqldqab\",\"parameters\":{\"qbqxfbbigcfd\":\"datap\",\"bmjyyrqaedwovoc\":\"dataofxn\",\"ayokrwfmihw\":\"datatjgo\"}},\"parameters\":{\"vothmkhjaoz\":{\"type\":\"Array\",\"defaultValue\":\"datadbfobdc\"},\"rhjvszfqbokndwpp\":{\"type\":\"Bool\",\"defaultValue\":\"datafcnjhbpoelhscmy\"},\"lynzlyvap\":{\"type\":\"SecureString\",\"defaultValue\":\"dataojoevzzufytdx\"},\"lcuhaizi\":{\"type\":\"Bool\",\"defaultValue\":\"databuoggtdl\"}},\"annotations\":[\"dataylzeohlpsftq\",\"datarvmhvbvvcpwt\",\"datasuspnhmzy\"],\"folder\":{\"name\":\"etevrntfknwacy\"},\"\":{\"atvcsxr\":\"dataotctkhfhf\",\"cubleh\":\"datahnmizhvprhqq\"}}") + "{\"type\":\"cffrx\",\"typeProperties\":{\"collectionName\":\"dataivdfwgqjh\"},\"description\":\"ezkhzp\",\"structure\":\"dataqissen\",\"schema\":\"datarusyzai\",\"linkedServiceName\":{\"referenceName\":\"npsjnpc\",\"parameters\":{\"lvvmnbkxja\":\"dataujyduonbdawsao\",\"xqkdyqjvzvcgrds\":\"datasbbdddwokqxai\",\"hvzf\":\"datazesfkqqx\",\"wukokgoojj\":\"databrouszxac\"}},\"parameters\":{\"unhgbtzvxxvsbcuu\":{\"type\":\"String\",\"defaultValue\":\"databc\"},\"urqmw\":{\"type\":\"Array\",\"defaultValue\":\"datafnkcnihkswxm\"},\"svnkxmyt\":{\"type\":\"Object\",\"defaultValue\":\"datapntumotahb\"}},\"annotations\":[\"dataedr\"],\"folder\":{\"name\":\"gc\"},\"\":{\"zzmiemqyft\":\"datarhkv\",\"mghpakbqyh\":\"datapqoswgfqv\",\"jm\":\"datasornfbmeqagkn\"}}") .toObject(DocumentDbCollectionDataset.class); - Assertions.assertEquals("ukrcdiohcl", model.description()); - Assertions.assertEquals("smkaqldqab", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("vothmkhjaoz").type()); - Assertions.assertEquals("etevrntfknwacy", model.folder().name()); + Assertions.assertEquals("ezkhzp", model.description()); + Assertions.assertEquals("npsjnpc", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("unhgbtzvxxvsbcuu").type()); + Assertions.assertEquals("gc", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DocumentDbCollectionDataset model = new DocumentDbCollectionDataset().withDescription("ukrcdiohcl") - .withStructure("datadnhfknebwedd").withSchema("datayzcwy") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("smkaqldqab").withParameters( - mapOf("qbqxfbbigcfd", "datap", "bmjyyrqaedwovoc", "dataofxn", "ayokrwfmihw", "datatjgo"))) - .withParameters(mapOf("vothmkhjaoz", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datadbfobdc"), - "rhjvszfqbokndwpp", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datafcnjhbpoelhscmy"), - "lynzlyvap", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("dataojoevzzufytdx"), - "lcuhaizi", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("databuoggtdl"))) - .withAnnotations(Arrays.asList("dataylzeohlpsftq", "datarvmhvbvvcpwt", "datasuspnhmzy")) - .withFolder(new DatasetFolder().withName("etevrntfknwacy")).withCollectionName("datawxdowum"); + DocumentDbCollectionDataset model = new DocumentDbCollectionDataset().withDescription("ezkhzp") + .withStructure("dataqissen") + .withSchema("datarusyzai") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("npsjnpc") + .withParameters(mapOf("lvvmnbkxja", "dataujyduonbdawsao", "xqkdyqjvzvcgrds", "datasbbdddwokqxai", + "hvzf", "datazesfkqqx", "wukokgoojj", "databrouszxac"))) + .withParameters(mapOf("unhgbtzvxxvsbcuu", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("databc"), "urqmw", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datafnkcnihkswxm"), + "svnkxmyt", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datapntumotahb"))) + .withAnnotations(Arrays.asList("dataedr")) + .withFolder(new DatasetFolder().withName("gc")) + .withCollectionName("dataivdfwgqjh"); model = BinaryData.fromObject(model).toObject(DocumentDbCollectionDataset.class); - Assertions.assertEquals("ukrcdiohcl", model.description()); - Assertions.assertEquals("smkaqldqab", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("vothmkhjaoz").type()); - Assertions.assertEquals("etevrntfknwacy", model.folder().name()); + Assertions.assertEquals("ezkhzp", model.description()); + Assertions.assertEquals("npsjnpc", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("unhgbtzvxxvsbcuu").type()); + Assertions.assertEquals("gc", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTypePropertiesTests.java index a30a9d3270578..ffd5992dcc919 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class DocumentDbCollectionDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DocumentDbCollectionDatasetTypeProperties model - = BinaryData.fromString("{\"collectionName\":\"datakplobzgottaksadz\"}") + = BinaryData.fromString("{\"collectionName\":\"databnyevztnjawrhule\"}") .toObject(DocumentDbCollectionDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DocumentDbCollectionDatasetTypeProperties model - = new DocumentDbCollectionDatasetTypeProperties().withCollectionName("datakplobzgottaksadz"); + = new DocumentDbCollectionDatasetTypeProperties().withCollectionName("databnyevztnjawrhule"); model = BinaryData.fromObject(model).toObject(DocumentDbCollectionDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSinkTests.java index 64aaa715290b8..36a5cead66f78 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSinkTests.java @@ -11,17 +11,20 @@ public final class DocumentDbCollectionSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DocumentDbCollectionSink model = BinaryData.fromString( - "{\"type\":\"DocumentDbCollectionSink\",\"nestingSeparator\":\"datapxftyhfc\",\"writeBehavior\":\"dataqsd\",\"writeBatchSize\":\"datafqaawr\",\"writeBatchTimeout\":\"datatzslfrztpnry\",\"sinkRetryCount\":\"dataxajr\",\"sinkRetryWait\":\"dataighlhddvno\",\"maxConcurrentConnections\":\"datasowbtnfqlwc\",\"disableMetricsCollection\":\"datajphtehoucmeuuua\",\"\":{\"kqtiuve\":\"datatlcxofqjninrskq\"}}") + "{\"type\":\"snzxipgfkcodoune\",\"nestingSeparator\":\"dataetqcxoamxum\",\"writeBehavior\":\"dataduhixomxvbru\",\"writeBatchSize\":\"datagnwmcizclnqexln\",\"writeBatchTimeout\":\"datapwdfjsj\",\"sinkRetryCount\":\"datandrkncfoqds\",\"sinkRetryWait\":\"datalcvpq\",\"maxConcurrentConnections\":\"datasfdpik\",\"disableMetricsCollection\":\"datag\",\"\":{\"ophcwzdwvy\":\"datagrppwoligflj\",\"vy\":\"datazo\"}}") .toObject(DocumentDbCollectionSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DocumentDbCollectionSink model - = new DocumentDbCollectionSink().withWriteBatchSize("datafqaawr").withWriteBatchTimeout("datatzslfrztpnry") - .withSinkRetryCount("dataxajr").withSinkRetryWait("dataighlhddvno") - .withMaxConcurrentConnections("datasowbtnfqlwc").withDisableMetricsCollection("datajphtehoucmeuuua") - .withNestingSeparator("datapxftyhfc").withWriteBehavior("dataqsd"); + DocumentDbCollectionSink model = new DocumentDbCollectionSink().withWriteBatchSize("datagnwmcizclnqexln") + .withWriteBatchTimeout("datapwdfjsj") + .withSinkRetryCount("datandrkncfoqds") + .withSinkRetryWait("datalcvpq") + .withMaxConcurrentConnections("datasfdpik") + .withDisableMetricsCollection("datag") + .withNestingSeparator("dataetqcxoamxum") + .withWriteBehavior("dataduhixomxvbru"); model = BinaryData.fromObject(model).toObject(DocumentDbCollectionSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSourceTests.java index 91e8c847e80b4..0b53d7f769e63 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSourceTests.java @@ -11,16 +11,20 @@ public final class DocumentDbCollectionSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DocumentDbCollectionSource model = BinaryData.fromString( - "{\"type\":\"DocumentDbCollectionSource\",\"query\":\"databutmxtijs\",\"nestingSeparator\":\"datatdp\",\"queryTimeout\":\"datajtwibwcd\",\"additionalColumns\":\"datamnswxq\",\"sourceRetryCount\":\"datahffcanvrdtdl\",\"sourceRetryWait\":\"datamgghutl\",\"maxConcurrentConnections\":\"datazzljyog\",\"disableMetricsCollection\":\"datawnegpbiuwnxhqelj\",\"\":{\"dhg\":\"datalqdikuvjcls\",\"kiw\":\"datakflwnlpbawtpw\"}}") + "{\"type\":\"yiheheimuqqmd\",\"query\":\"datawfrmhookef\",\"nestingSeparator\":\"datafexakctlcps\",\"queryTimeout\":\"datancco\",\"additionalColumns\":\"dataqxmdievkmrso\",\"sourceRetryCount\":\"datawxfmrm\",\"sourceRetryWait\":\"dataf\",\"maxConcurrentConnections\":\"datasypmt\",\"disableMetricsCollection\":\"datavszlaf\",\"\":{\"lcnurh\":\"dataldhhkdeviwpzhfx\"}}") .toObject(DocumentDbCollectionSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DocumentDbCollectionSource model = new DocumentDbCollectionSource().withSourceRetryCount("datahffcanvrdtdl") - .withSourceRetryWait("datamgghutl").withMaxConcurrentConnections("datazzljyog") - .withDisableMetricsCollection("datawnegpbiuwnxhqelj").withQuery("databutmxtijs") - .withNestingSeparator("datatdp").withQueryTimeout("datajtwibwcd").withAdditionalColumns("datamnswxq"); + DocumentDbCollectionSource model = new DocumentDbCollectionSource().withSourceRetryCount("datawxfmrm") + .withSourceRetryWait("dataf") + .withMaxConcurrentConnections("datasypmt") + .withDisableMetricsCollection("datavszlaf") + .withQuery("datawfrmhookef") + .withNestingSeparator("datafexakctlcps") + .withQueryTimeout("datancco") + .withAdditionalColumns("dataqxmdievkmrso"); model = BinaryData.fromObject(model).toObject(DocumentDbCollectionSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillDatasetTypePropertiesTests.java index 181c8d16ce7db..5906bde99d9e5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillDatasetTypePropertiesTests.java @@ -10,15 +10,16 @@ public final class DrillDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - DrillDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datavpnyldjd\",\"table\":\"datavdryknkx\",\"schema\":\"dataxhnrjl\"}") - .toObject(DrillDatasetTypeProperties.class); + DrillDatasetTypeProperties model + = BinaryData.fromString("{\"tableName\":\"datauklx\",\"table\":\"datalmzpyq\",\"schema\":\"datahuecxhgs\"}") + .toObject(DrillDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DrillDatasetTypeProperties model = new DrillDatasetTypeProperties().withTableName("datavpnyldjd") - .withTable("datavdryknkx").withSchema("dataxhnrjl"); + DrillDatasetTypeProperties model = new DrillDatasetTypeProperties().withTableName("datauklx") + .withTable("datalmzpyq") + .withSchema("datahuecxhgs"); model = BinaryData.fromObject(model).toObject(DrillDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillSourceTests.java index 5f27e16c61375..df431a23337e9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillSourceTests.java @@ -11,15 +11,19 @@ public final class DrillSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DrillSource model = BinaryData.fromString( - "{\"type\":\"DrillSource\",\"query\":\"dataxtkmknacnfzcy\",\"queryTimeout\":\"datahdjpagwszm\",\"additionalColumns\":\"datagzfeyexbg\",\"sourceRetryCount\":\"datayo\",\"sourceRetryWait\":\"datawigvqgc\",\"maxConcurrentConnections\":\"datacqjg\",\"disableMetricsCollection\":\"dataxpbpj\",\"\":{\"ohehhtl\":\"datanvdabaodiytxq\"}}") + "{\"type\":\"wgzhbwhjv\",\"query\":\"datauz\",\"queryTimeout\":\"datal\",\"additionalColumns\":\"datak\",\"sourceRetryCount\":\"datauhvygxckit\",\"sourceRetryWait\":\"dataxoco\",\"maxConcurrentConnections\":\"datasgx\",\"disableMetricsCollection\":\"datajvdajxebmliyrc\",\"\":{\"kukra\":\"dataa\",\"hwdicntqsrhacjsb\":\"datand\"}}") .toObject(DrillSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DrillSource model = new DrillSource().withSourceRetryCount("datayo").withSourceRetryWait("datawigvqgc") - .withMaxConcurrentConnections("datacqjg").withDisableMetricsCollection("dataxpbpj") - .withQueryTimeout("datahdjpagwszm").withAdditionalColumns("datagzfeyexbg").withQuery("dataxtkmknacnfzcy"); + DrillSource model = new DrillSource().withSourceRetryCount("datauhvygxckit") + .withSourceRetryWait("dataxoco") + .withMaxConcurrentConnections("datasgx") + .withDisableMetricsCollection("datajvdajxebmliyrc") + .withQueryTimeout("datal") + .withAdditionalColumns("datak") + .withQuery("datauz"); model = BinaryData.fromObject(model).toObject(DrillSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillTableDatasetTests.java index 58e40a2c1ac5f..d1a5f2a043524 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillTableDatasetTests.java @@ -19,35 +19,33 @@ public final class DrillTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DrillTableDataset model = BinaryData.fromString( - "{\"type\":\"DrillTable\",\"typeProperties\":{\"tableName\":\"dataugdcr\",\"table\":\"databplvhmhur\",\"schema\":\"datadjlz\"},\"description\":\"m\",\"structure\":\"dataghqeuyaorservpv\",\"schema\":\"dataorsbeg\",\"linkedServiceName\":{\"referenceName\":\"lmexafjqzyhz\",\"parameters\":{\"nsskydigt\":\"datavtuqvirl\",\"ocsvjekejchxzj\":\"datajx\",\"yfyixecmasjnfgng\":\"datacwuzs\"}},\"parameters\":{\"wxtxuzhvojyf\":{\"type\":\"String\",\"defaultValue\":\"dataeyvfxbfckmoaljax\"},\"jef\":{\"type\":\"Float\",\"defaultValue\":\"datalbkjcdzuiygtcyz\"},\"lrlkbh\":{\"type\":\"Int\",\"defaultValue\":\"dataaldjcgldry\"},\"xefppq\":{\"type\":\"String\",\"defaultValue\":\"datamxcaujbfomfbozpj\"}},\"annotations\":[\"datannjjthpsnxebycy\",\"datapohxubn\"],\"folder\":{\"name\":\"sebcxno\"},\"\":{\"gspjlf\":\"datadyzssjlmykdygj\",\"ngwqxcrbcrgyoim\":\"datah\",\"z\":\"datas\",\"htvagwnnw\":\"datacctvkog\"}}") + "{\"type\":\"dz\",\"typeProperties\":{\"tableName\":\"datav\",\"table\":\"dataxlffqgdodnkqyip\",\"schema\":\"datamjtda\"},\"description\":\"imeftgdrf\",\"structure\":\"datalflz\",\"schema\":\"datavdava\",\"linkedServiceName\":{\"referenceName\":\"xiefcorzbidaeb\",\"parameters\":{\"fajw\":\"dataicew\",\"wwsr\":\"dataylciobb\"}},\"parameters\":{\"mnteevfg\":{\"type\":\"Object\",\"defaultValue\":\"dataecuuuex\"}},\"annotations\":[\"dataezraqsddkod\",\"datagxqfkyr\"],\"folder\":{\"name\":\"zzeglwdzfss\"},\"\":{\"ddkkraj\":\"datagaok\"}}") .toObject(DrillTableDataset.class); - Assertions.assertEquals("m", model.description()); - Assertions.assertEquals("lmexafjqzyhz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("wxtxuzhvojyf").type()); - Assertions.assertEquals("sebcxno", model.folder().name()); + Assertions.assertEquals("imeftgdrf", model.description()); + Assertions.assertEquals("xiefcorzbidaeb", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("mnteevfg").type()); + Assertions.assertEquals("zzeglwdzfss", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DrillTableDataset model = new DrillTableDataset().withDescription("m").withStructure("dataghqeuyaorservpv") - .withSchema("dataorsbeg") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lmexafjqzyhz").withParameters( - mapOf("nsskydigt", "datavtuqvirl", "ocsvjekejchxzj", "datajx", "yfyixecmasjnfgng", "datacwuzs"))) - .withParameters(mapOf("wxtxuzhvojyf", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataeyvfxbfckmoaljax"), - "jef", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datalbkjcdzuiygtcyz"), - "lrlkbh", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataaldjcgldry"), - "xefppq", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datamxcaujbfomfbozpj"))) - .withAnnotations(Arrays.asList("datannjjthpsnxebycy", "datapohxubn")) - .withFolder(new DatasetFolder().withName("sebcxno")).withTableName("dataugdcr").withTable("databplvhmhur") - .withSchemaTypePropertiesSchema("datadjlz"); + DrillTableDataset model = new DrillTableDataset().withDescription("imeftgdrf") + .withStructure("datalflz") + .withSchema("datavdava") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xiefcorzbidaeb") + .withParameters(mapOf("fajw", "dataicew", "wwsr", "dataylciobb"))) + .withParameters(mapOf("mnteevfg", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataecuuuex"))) + .withAnnotations(Arrays.asList("dataezraqsddkod", "datagxqfkyr")) + .withFolder(new DatasetFolder().withName("zzeglwdzfss")) + .withTableName("datav") + .withTable("dataxlffqgdodnkqyip") + .withSchemaTypePropertiesSchema("datamjtda"); model = BinaryData.fromObject(model).toObject(DrillTableDataset.class); - Assertions.assertEquals("m", model.description()); - Assertions.assertEquals("lmexafjqzyhz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("wxtxuzhvojyf").type()); - Assertions.assertEquals("sebcxno", model.folder().name()); + Assertions.assertEquals("imeftgdrf", model.description()); + Assertions.assertEquals("xiefcorzbidaeb", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("mnteevfg").type()); + Assertions.assertEquals("zzeglwdzfss", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTests.java index ad6a7ac0411cb..06c228214bb82 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTests.java @@ -19,32 +19,35 @@ public final class DynamicsAXResourceDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DynamicsAXResourceDataset model = BinaryData.fromString( - "{\"type\":\"DynamicsAXResource\",\"typeProperties\":{\"path\":\"datahrau\"},\"description\":\"ovlx\",\"structure\":\"datavm\",\"schema\":\"datapniqwxmrgmnkgtlh\",\"linkedServiceName\":{\"referenceName\":\"krazkioiyecz\",\"parameters\":{\"qzhehgvmmnoyzg\":\"datamsvzngh\",\"pluzypkf\":\"databn\",\"xilzvxot\":\"datadf\",\"ytsqmbwcacwaaqa\":\"dataoilqcdvhyefqh\"}},\"parameters\":{\"qlreqbrcmmdts\":{\"type\":\"Int\",\"defaultValue\":\"dataaxxra\"},\"cznbabow\":{\"type\":\"Bool\",\"defaultValue\":\"datamx\"},\"ejh\":{\"type\":\"Int\",\"defaultValue\":\"datarnmjwkowxqzkkag\"}},\"annotations\":[\"dataphr\"],\"folder\":{\"name\":\"peajzzy\"},\"\":{\"eyrftxytjayp\":\"dataamzmzfnt\"}}") + "{\"type\":\"xchquo\",\"typeProperties\":{\"path\":\"datatjuzvyj\"},\"description\":\"qrcsksxq\",\"structure\":\"datalrvuvdagv\",\"schema\":\"datacdp\",\"linkedServiceName\":{\"referenceName\":\"cvfyeqyodi\",\"parameters\":{\"mevljbcuwrfgpjfv\":\"datasapqhipajsniv\",\"gvcykzzu\":\"datawkseodvlmd\"}},\"parameters\":{\"vvcrsm\":{\"type\":\"Array\",\"defaultValue\":\"databbmuml\"},\"mnrut\":{\"type\":\"Int\",\"defaultValue\":\"datamxwcv\"},\"f\":{\"type\":\"Array\",\"defaultValue\":\"dataeq\"},\"opecvpkb\":{\"type\":\"Object\",\"defaultValue\":\"datat\"}},\"annotations\":[\"datanowpajfhxsmu\",\"databzadzglmuuzpsu\",\"datasypxmul\",\"datahfrerkqpyfjxkb\"],\"folder\":{\"name\":\"buq\"},\"\":{\"hyvdbrdvsv\":\"databdjkmnxsggnow\"}}") .toObject(DynamicsAXResourceDataset.class); - Assertions.assertEquals("ovlx", model.description()); - Assertions.assertEquals("krazkioiyecz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("qlreqbrcmmdts").type()); - Assertions.assertEquals("peajzzy", model.folder().name()); + Assertions.assertEquals("qrcsksxq", model.description()); + Assertions.assertEquals("cvfyeqyodi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("vvcrsm").type()); + Assertions.assertEquals("buq", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DynamicsAXResourceDataset model = new DynamicsAXResourceDataset().withDescription("ovlx") - .withStructure("datavm").withSchema("datapniqwxmrgmnkgtlh") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("krazkioiyecz") - .withParameters(mapOf("qzhehgvmmnoyzg", "datamsvzngh", "pluzypkf", "databn", "xilzvxot", "datadf", - "ytsqmbwcacwaaqa", "dataoilqcdvhyefqh"))) - .withParameters(mapOf("qlreqbrcmmdts", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataaxxra"), "cznbabow", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datamx"), "ejh", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datarnmjwkowxqzkkag"))) - .withAnnotations(Arrays.asList("dataphr")).withFolder(new DatasetFolder().withName("peajzzy")) - .withPath("datahrau"); + DynamicsAXResourceDataset model = new DynamicsAXResourceDataset().withDescription("qrcsksxq") + .withStructure("datalrvuvdagv") + .withSchema("datacdp") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cvfyeqyodi") + .withParameters(mapOf("mevljbcuwrfgpjfv", "datasapqhipajsniv", "gvcykzzu", "datawkseodvlmd"))) + .withParameters(mapOf("vvcrsm", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("databbmuml"), "mnrut", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datamxwcv"), "f", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataeq"), "opecvpkb", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datat"))) + .withAnnotations( + Arrays.asList("datanowpajfhxsmu", "databzadzglmuuzpsu", "datasypxmul", "datahfrerkqpyfjxkb")) + .withFolder(new DatasetFolder().withName("buq")) + .withPath("datatjuzvyj"); model = BinaryData.fromObject(model).toObject(DynamicsAXResourceDataset.class); - Assertions.assertEquals("ovlx", model.description()); - Assertions.assertEquals("krazkioiyecz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("qlreqbrcmmdts").type()); - Assertions.assertEquals("peajzzy", model.folder().name()); + Assertions.assertEquals("qrcsksxq", model.description()); + Assertions.assertEquals("cvfyeqyodi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("vvcrsm").type()); + Assertions.assertEquals("buq", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTypePropertiesTests.java index 515b1bf9e2a23..59c3ea36ed21f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTypePropertiesTests.java @@ -11,13 +11,13 @@ public final class DynamicsAXResourceDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DynamicsAXResourceDatasetTypeProperties model - = BinaryData.fromString("{\"path\":\"datadrj\"}").toObject(DynamicsAXResourceDatasetTypeProperties.class); + = BinaryData.fromString("{\"path\":\"datahbtyc\"}").toObject(DynamicsAXResourceDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DynamicsAXResourceDatasetTypeProperties model - = new DynamicsAXResourceDatasetTypeProperties().withPath("datadrj"); + = new DynamicsAXResourceDatasetTypeProperties().withPath("datahbtyc"); model = BinaryData.fromObject(model).toObject(DynamicsAXResourceDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXSourceTests.java index b115ce1bfec1c..a9db7654a7a70 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXSourceTests.java @@ -11,16 +11,20 @@ public final class DynamicsAXSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DynamicsAXSource model = BinaryData.fromString( - "{\"type\":\"DynamicsAXSource\",\"query\":\"datanazjvyiiezdnez\",\"httpRequestTimeout\":\"dataqzd\",\"queryTimeout\":\"datamyutzttroymi\",\"additionalColumns\":\"datakuz\",\"sourceRetryCount\":\"datacegyztzhcfuwm\",\"sourceRetryWait\":\"dataz\",\"maxConcurrentConnections\":\"datamklroogflh\",\"disableMetricsCollection\":\"dataspxblyokjwsszye\",\"\":{\"dhnferdggji\":\"datau\",\"gczjxou\":\"datanzvoehgwgqgcnkg\"}}") + "{\"type\":\"j\",\"query\":\"datatsgvvizayg\",\"httpRequestTimeout\":\"dataml\",\"queryTimeout\":\"dataijpayvlnzwicqopw\",\"additionalColumns\":\"datadleegwlhanyueizh\",\"sourceRetryCount\":\"datax\",\"sourceRetryWait\":\"datahxiotlfzbj\",\"maxConcurrentConnections\":\"datauqkb\",\"disableMetricsCollection\":\"dataicoaysargqkgaus\",\"\":{\"mlkxvfejdgojavqe\":\"datadyfyjee\",\"smnxcc\":\"dataekkvayyyowj\",\"hlokhmkqy\":\"datas\",\"ddwfhfjfato\":\"dataa\"}}") .toObject(DynamicsAXSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DynamicsAXSource model = new DynamicsAXSource().withSourceRetryCount("datacegyztzhcfuwm") - .withSourceRetryWait("dataz").withMaxConcurrentConnections("datamklroogflh") - .withDisableMetricsCollection("dataspxblyokjwsszye").withQueryTimeout("datamyutzttroymi") - .withAdditionalColumns("datakuz").withQuery("datanazjvyiiezdnez").withHttpRequestTimeout("dataqzd"); + DynamicsAXSource model = new DynamicsAXSource().withSourceRetryCount("datax") + .withSourceRetryWait("datahxiotlfzbj") + .withMaxConcurrentConnections("datauqkb") + .withDisableMetricsCollection("dataicoaysargqkgaus") + .withQueryTimeout("dataijpayvlnzwicqopw") + .withAdditionalColumns("datadleegwlhanyueizh") + .withQuery("datatsgvvizayg") + .withHttpRequestTimeout("dataml"); model = BinaryData.fromObject(model).toObject(DynamicsAXSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTests.java index b4269c17616b6..89e1661abfc60 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTests.java @@ -19,33 +19,34 @@ public final class DynamicsCrmEntityDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DynamicsCrmEntityDataset model = BinaryData.fromString( - "{\"type\":\"DynamicsCrmEntity\",\"typeProperties\":{\"entityName\":\"dataiwh\"},\"description\":\"cfjnc\",\"structure\":\"datadilo\",\"schema\":\"dataajwjuriarsbcll\",\"linkedServiceName\":{\"referenceName\":\"nhzcknjxizb\",\"parameters\":{\"mlxppdndzkfevuii\":\"dataygzkztxfexwacyy\",\"kcj\":\"dataiib\",\"nopm\":\"datatqdcizeqqfop\",\"xqlyoazyfbkmvl\":\"datatdsfh\"}},\"parameters\":{\"kvhyejth\":{\"type\":\"Bool\",\"defaultValue\":\"datajzsvmaigb\"},\"ergwlckihbam\":{\"type\":\"Float\",\"defaultValue\":\"datacb\"},\"zjwdizcr\":{\"type\":\"Array\",\"defaultValue\":\"dataokknpu\"},\"c\":{\"type\":\"Object\",\"defaultValue\":\"dataiujz\"}},\"annotations\":[\"datagkr\",\"dataw\",\"datasykkbxktxbbwl\",\"datanwzoknvu\"],\"folder\":{\"name\":\"lggbqaolgzub\"},\"\":{\"kmixwewzls\":\"datalkvggcmfnsffet\",\"bthhxmoevvude\":\"datagsmepnqvxgvoh\",\"nvwxqhpjhubohxv\":\"datapfhga\"}}") + "{\"type\":\"uvqhxtozfgdkw\",\"typeProperties\":{\"entityName\":\"datazbvb\"},\"description\":\"urklp\",\"structure\":\"datagfuzkeutuipj\",\"schema\":\"datazjwaqdzqydewu\",\"linkedServiceName\":{\"referenceName\":\"xyll\",\"parameters\":{\"qwcz\":\"dataevt\",\"iysdhorkslhraqki\":\"datachwbekmuynfpkyvn\",\"qwmwqoguflteat\":\"datalwkffl\"}},\"parameters\":{\"nvxwtdqtcbjdbtqy\":{\"type\":\"Float\",\"defaultValue\":\"datazjxnjtqbgysibt\"},\"oawh\":{\"type\":\"Object\",\"defaultValue\":\"dataunsaujqgbbjvo\"}},\"annotations\":[\"datambcsloygsabdgd\",\"datae\",\"dataonsdunr\"],\"folder\":{\"name\":\"zuatqhgzuyxt\"},\"\":{\"sffavdhpiwrmuwk\":\"datadbqskgqjbvitptp\",\"yldqtmggcpd\":\"datajwbyfdwfbw\",\"zctwymzsk\":\"datamegaj\",\"eiphgliupqs\":\"dataqkkge\"}}") .toObject(DynamicsCrmEntityDataset.class); - Assertions.assertEquals("cfjnc", model.description()); - Assertions.assertEquals("nhzcknjxizb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("kvhyejth").type()); - Assertions.assertEquals("lggbqaolgzub", model.folder().name()); + Assertions.assertEquals("urklp", model.description()); + Assertions.assertEquals("xyll", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("nvxwtdqtcbjdbtqy").type()); + Assertions.assertEquals("zuatqhgzuyxt", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DynamicsCrmEntityDataset model = new DynamicsCrmEntityDataset().withDescription("cfjnc") - .withStructure("datadilo").withSchema("dataajwjuriarsbcll") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("nhzcknjxizb") - .withParameters(mapOf("mlxppdndzkfevuii", "dataygzkztxfexwacyy", "kcj", "dataiib", "nopm", - "datatqdcizeqqfop", "xqlyoazyfbkmvl", "datatdsfh"))) - .withParameters(mapOf("kvhyejth", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datajzsvmaigb"), - "ergwlckihbam", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datacb"), - "zjwdizcr", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataokknpu"), - "c", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataiujz"))) - .withAnnotations(Arrays.asList("datagkr", "dataw", "datasykkbxktxbbwl", "datanwzoknvu")) - .withFolder(new DatasetFolder().withName("lggbqaolgzub")).withEntityName("dataiwh"); + DynamicsCrmEntityDataset model = new DynamicsCrmEntityDataset().withDescription("urklp") + .withStructure("datagfuzkeutuipj") + .withSchema("datazjwaqdzqydewu") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xyll") + .withParameters(mapOf("qwcz", "dataevt", "iysdhorkslhraqki", "datachwbekmuynfpkyvn", "qwmwqoguflteat", + "datalwkffl"))) + .withParameters(mapOf("nvxwtdqtcbjdbtqy", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datazjxnjtqbgysibt"), + "oawh", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataunsaujqgbbjvo"))) + .withAnnotations(Arrays.asList("datambcsloygsabdgd", "datae", "dataonsdunr")) + .withFolder(new DatasetFolder().withName("zuatqhgzuyxt")) + .withEntityName("datazbvb"); model = BinaryData.fromObject(model).toObject(DynamicsCrmEntityDataset.class); - Assertions.assertEquals("cfjnc", model.description()); - Assertions.assertEquals("nhzcknjxizb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("kvhyejth").type()); - Assertions.assertEquals("lggbqaolgzub", model.folder().name()); + Assertions.assertEquals("urklp", model.description()); + Assertions.assertEquals("xyll", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("nvxwtdqtcbjdbtqy").type()); + Assertions.assertEquals("zuatqhgzuyxt", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTypePropertiesTests.java index e1620a642d2ed..10204077956a1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTypePropertiesTests.java @@ -10,14 +10,15 @@ public final class DynamicsCrmEntityDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - DynamicsCrmEntityDatasetTypeProperties model = BinaryData.fromString("{\"entityName\":\"dataaybvrhho\"}") - .toObject(DynamicsCrmEntityDatasetTypeProperties.class); + DynamicsCrmEntityDatasetTypeProperties model + = BinaryData.fromString("{\"entityName\":\"dataobkjlcaxsqcomjiq\"}") + .toObject(DynamicsCrmEntityDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { DynamicsCrmEntityDatasetTypeProperties model - = new DynamicsCrmEntityDatasetTypeProperties().withEntityName("dataaybvrhho"); + = new DynamicsCrmEntityDatasetTypeProperties().withEntityName("dataobkjlcaxsqcomjiq"); model = BinaryData.fromObject(model).toObject(DynamicsCrmEntityDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmSourceTests.java index d2aa39564d63f..728b8b6fbeb43 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmSourceTests.java @@ -11,15 +11,18 @@ public final class DynamicsCrmSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DynamicsCrmSource model = BinaryData.fromString( - "{\"type\":\"DynamicsCrmSource\",\"query\":\"datasjhh\",\"additionalColumns\":\"datawizqvgadolep\",\"sourceRetryCount\":\"datalzjhaqxfams\",\"sourceRetryWait\":\"datacesvvrabbyfhzy\",\"maxConcurrentConnections\":\"datarxenp\",\"disableMetricsCollection\":\"dataanlb\",\"\":{\"hoxcgfyzluilzgp\":\"datadwrcjtanbwxhlqio\",\"nxjnqm\":\"datahjakz\",\"vpfyupgojrwpoxuy\":\"datajslwmjlpb\"}}") + "{\"type\":\"ipuyefhhdrmg\",\"query\":\"dataqlafi\",\"additionalColumns\":\"dataw\",\"sourceRetryCount\":\"datafgyxkgqwmpghxpcx\",\"sourceRetryWait\":\"databnk\",\"maxConcurrentConnections\":\"datacodhxsbx\",\"disableMetricsCollection\":\"datafwxdouuyzpaq\",\"\":{\"fhk\":\"datajxgedtmzh\"}}") .toObject(DynamicsCrmSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DynamicsCrmSource model = new DynamicsCrmSource().withSourceRetryCount("datalzjhaqxfams") - .withSourceRetryWait("datacesvvrabbyfhzy").withMaxConcurrentConnections("datarxenp") - .withDisableMetricsCollection("dataanlb").withQuery("datasjhh").withAdditionalColumns("datawizqvgadolep"); + DynamicsCrmSource model = new DynamicsCrmSource().withSourceRetryCount("datafgyxkgqwmpghxpcx") + .withSourceRetryWait("databnk") + .withMaxConcurrentConnections("datacodhxsbx") + .withDisableMetricsCollection("datafwxdouuyzpaq") + .withQuery("dataqlafi") + .withAdditionalColumns("dataw"); model = BinaryData.fromObject(model).toObject(DynamicsCrmSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTests.java index cae459c2cce07..aecd37c60d441 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTests.java @@ -19,29 +19,31 @@ public final class DynamicsEntityDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DynamicsEntityDataset model = BinaryData.fromString( - "{\"type\":\"DynamicsEntity\",\"typeProperties\":{\"entityName\":\"datammtbt\"},\"description\":\"u\",\"structure\":\"datavvraabeurdeewlsu\",\"schema\":\"datacbwkdwjyj\",\"linkedServiceName\":{\"referenceName\":\"zni\",\"parameters\":{\"cr\":\"dataofmftasp\"}},\"parameters\":{\"nuwqxungro\":{\"type\":\"Float\",\"defaultValue\":\"datatrnighm\"}},\"annotations\":[\"datafmsxjwdy\",\"datawxm\"],\"folder\":{\"name\":\"ow\"},\"\":{\"chy\":\"dataeerclbltbhpw\",\"gpruccwme\":\"dataurjwmvwryvdifkii\"}}") + "{\"type\":\"itaxju\",\"typeProperties\":{\"entityName\":\"datakitzm\"},\"description\":\"tjlxsgcemegdzdvy\",\"structure\":\"datau\",\"schema\":\"dataf\",\"linkedServiceName\":{\"referenceName\":\"yzufldifnivlutgg\",\"parameters\":{\"oiyygkts\":\"datacxauhvcgzxhklsqx\",\"xxoxwfzbkv\":\"dataj\"}},\"parameters\":{\"snbwutlvuwm\":{\"type\":\"Bool\",\"defaultValue\":\"dataxphsowbe\"}},\"annotations\":[\"dataustihtgrafjajvky\",\"datammjczvog\"],\"folder\":{\"name\":\"rjenn\"},\"\":{\"xnrp\":\"dataaeuwqdwxhhlbmyph\",\"ywbihqbtodjfyx\":\"datahewokyqsfkxf\",\"rugyozzzawnjdv\":\"datavkvwzdmvddqw\"}}") .toObject(DynamicsEntityDataset.class); - Assertions.assertEquals("u", model.description()); - Assertions.assertEquals("zni", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("nuwqxungro").type()); - Assertions.assertEquals("ow", model.folder().name()); + Assertions.assertEquals("tjlxsgcemegdzdvy", model.description()); + Assertions.assertEquals("yzufldifnivlutgg", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("snbwutlvuwm").type()); + Assertions.assertEquals("rjenn", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DynamicsEntityDataset model = new DynamicsEntityDataset().withDescription("u") - .withStructure("datavvraabeurdeewlsu").withSchema("datacbwkdwjyj") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("zni").withParameters(mapOf("cr", "dataofmftasp"))) - .withParameters(mapOf("nuwqxungro", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datatrnighm"))) - .withAnnotations(Arrays.asList("datafmsxjwdy", "datawxm")).withFolder(new DatasetFolder().withName("ow")) - .withEntityName("datammtbt"); + DynamicsEntityDataset model = new DynamicsEntityDataset().withDescription("tjlxsgcemegdzdvy") + .withStructure("datau") + .withSchema("dataf") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("yzufldifnivlutgg") + .withParameters(mapOf("oiyygkts", "datacxauhvcgzxhklsqx", "xxoxwfzbkv", "dataj"))) + .withParameters(mapOf("snbwutlvuwm", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataxphsowbe"))) + .withAnnotations(Arrays.asList("dataustihtgrafjajvky", "datammjczvog")) + .withFolder(new DatasetFolder().withName("rjenn")) + .withEntityName("datakitzm"); model = BinaryData.fromObject(model).toObject(DynamicsEntityDataset.class); - Assertions.assertEquals("u", model.description()); - Assertions.assertEquals("zni", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("nuwqxungro").type()); - Assertions.assertEquals("ow", model.folder().name()); + Assertions.assertEquals("tjlxsgcemegdzdvy", model.description()); + Assertions.assertEquals("yzufldifnivlutgg", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("snbwutlvuwm").type()); + Assertions.assertEquals("rjenn", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTypePropertiesTests.java index 34f874abe19a4..a3b42c7fcc27f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTypePropertiesTests.java @@ -10,14 +10,13 @@ public final class DynamicsEntityDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - DynamicsEntityDatasetTypeProperties model = BinaryData.fromString("{\"entityName\":\"datatxsytrtexegwmrq\"}") - .toObject(DynamicsEntityDatasetTypeProperties.class); + DynamicsEntityDatasetTypeProperties model + = BinaryData.fromString("{\"entityName\":\"datarho\"}").toObject(DynamicsEntityDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DynamicsEntityDatasetTypeProperties model - = new DynamicsEntityDatasetTypeProperties().withEntityName("datatxsytrtexegwmrq"); + DynamicsEntityDatasetTypeProperties model = new DynamicsEntityDatasetTypeProperties().withEntityName("datarho"); model = BinaryData.fromObject(model).toObject(DynamicsEntityDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsSourceTests.java index 65d58f8ed4db9..5927dab89acf3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsSourceTests.java @@ -11,15 +11,18 @@ public final class DynamicsSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { DynamicsSource model = BinaryData.fromString( - "{\"type\":\"DynamicsSource\",\"query\":\"datamtxbi\",\"additionalColumns\":\"datal\",\"sourceRetryCount\":\"dataeqotvocjktihnwyv\",\"sourceRetryWait\":\"datasgfdmtfnbvtxq\",\"maxConcurrentConnections\":\"datab\",\"disableMetricsCollection\":\"dataqbdiahjxcdhp\",\"\":{\"yn\":\"datawsfddyqp\",\"lj\":\"dataowmjsur\"}}") + "{\"type\":\"pmwo\",\"query\":\"datawibtkr\",\"additionalColumns\":\"datagbzrlfsewusqupkr\",\"sourceRetryCount\":\"datajin\",\"sourceRetryWait\":\"datartecfvzslttkp\",\"maxConcurrentConnections\":\"datakujceeczhsdpfoa\",\"disableMetricsCollection\":\"dataahuwxodddqz\",\"\":{\"qnao\":\"datarytgsocqkdclb\",\"mp\":\"datas\",\"uaysxh\":\"datacbhezau\"}}") .toObject(DynamicsSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - DynamicsSource model = new DynamicsSource().withSourceRetryCount("dataeqotvocjktihnwyv") - .withSourceRetryWait("datasgfdmtfnbvtxq").withMaxConcurrentConnections("datab") - .withDisableMetricsCollection("dataqbdiahjxcdhp").withQuery("datamtxbi").withAdditionalColumns("datal"); + DynamicsSource model = new DynamicsSource().withSourceRetryCount("datajin") + .withSourceRetryWait("datartecfvzslttkp") + .withMaxConcurrentConnections("datakujceeczhsdpfoa") + .withDisableMetricsCollection("dataahuwxodddqz") + .withQuery("datawibtkr") + .withAdditionalColumns("datagbzrlfsewusqupkr"); model = BinaryData.fromObject(model).toObject(DynamicsSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaObjectDatasetTests.java index 5a79cfe049861..b66715ced25b2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaObjectDatasetTests.java @@ -19,33 +19,35 @@ public final class EloquaObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { EloquaObjectDataset model = BinaryData.fromString( - "{\"type\":\"EloquaObject\",\"typeProperties\":{\"tableName\":\"dataqejex\"},\"description\":\"lhuhdkubgyw\",\"structure\":\"datarklpdyehjrwcflv\",\"schema\":\"dataocaywmfvuhz\",\"linkedServiceName\":{\"referenceName\":\"olhve\",\"parameters\":{\"ymlhklmnjqzm\":\"datagsfmhwdxqu\",\"bthb\":\"dataynhitrnwqgq\"}},\"parameters\":{\"ywzrqeiadd\":{\"type\":\"Bool\",\"defaultValue\":\"datarjocogwfv\"},\"z\":{\"type\":\"Int\",\"defaultValue\":\"datauvnlmdcnutiexm\"},\"tfqhhvvwzprjaaai\":{\"type\":\"String\",\"defaultValue\":\"databqvioyn\"},\"cn\":{\"type\":\"String\",\"defaultValue\":\"datatvavlyaqtl\"}},\"annotations\":[\"datafzvz\",\"datazqcrlkorvbzg\",\"datahenfsfyq\"],\"folder\":{\"name\":\"wmh\"},\"\":{\"agwaakktbjort\":\"datafbywjiaaosla\",\"lislxyljzbkd\":\"datatkajqhsnsej\"}}") + "{\"type\":\"ljnzpqhqqsarkyul\",\"typeProperties\":{\"tableName\":\"dataqrwwbdrwro\"},\"description\":\"mea\",\"structure\":\"datajqenhekzazvgeytl\",\"schema\":\"datas\",\"linkedServiceName\":{\"referenceName\":\"fcvedks\",\"parameters\":{\"crhnx\":\"datatolkyqfnj\",\"wunrtcsra\":\"datamfv\",\"nbunsodtevzshqy\":\"datawhyxfshmbydmv\",\"pshaezc\":\"dataeb\"}},\"parameters\":{\"ejst\":{\"type\":\"Array\",\"defaultValue\":\"datarnovqdmh\"},\"jakgk\":{\"type\":\"SecureString\",\"defaultValue\":\"dataa\"},\"hsobvcnsbioez\":{\"type\":\"String\",\"defaultValue\":\"dataqdsoqzhxwdjoxwkb\"}},\"annotations\":[\"datahn\",\"dataz\"],\"folder\":{\"name\":\"z\"},\"\":{\"cmwbejywwwvn\":\"datagtkxncwdytnlr\",\"fyziruqvgnjxiak\":\"datawwkrmqevrhhafqfu\",\"yugmb\":\"datayjmzbmxgikyl\",\"loxtvq\":\"datadcv\"}}") .toObject(EloquaObjectDataset.class); - Assertions.assertEquals("lhuhdkubgyw", model.description()); - Assertions.assertEquals("olhve", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("ywzrqeiadd").type()); - Assertions.assertEquals("wmh", model.folder().name()); + Assertions.assertEquals("mea", model.description()); + Assertions.assertEquals("fcvedks", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("ejst").type()); + Assertions.assertEquals("z", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - EloquaObjectDataset model = new EloquaObjectDataset().withDescription("lhuhdkubgyw") - .withStructure("datarklpdyehjrwcflv").withSchema("dataocaywmfvuhz") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("olhve") - .withParameters(mapOf("ymlhklmnjqzm", "datagsfmhwdxqu", "bthb", "dataynhitrnwqgq"))) - .withParameters(mapOf("ywzrqeiadd", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datarjocogwfv"), "z", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datauvnlmdcnutiexm"), - "tfqhhvvwzprjaaai", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("databqvioyn"), "cn", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datatvavlyaqtl"))) - .withAnnotations(Arrays.asList("datafzvz", "datazqcrlkorvbzg", "datahenfsfyq")) - .withFolder(new DatasetFolder().withName("wmh")).withTableName("dataqejex"); + EloquaObjectDataset model = new EloquaObjectDataset().withDescription("mea") + .withStructure("datajqenhekzazvgeytl") + .withSchema("datas") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("fcvedks") + .withParameters(mapOf("crhnx", "datatolkyqfnj", "wunrtcsra", "datamfv", "nbunsodtevzshqy", + "datawhyxfshmbydmv", "pshaezc", "dataeb"))) + .withParameters(mapOf("ejst", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datarnovqdmh"), "jakgk", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataa"), + "hsobvcnsbioez", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataqdsoqzhxwdjoxwkb"))) + .withAnnotations(Arrays.asList("datahn", "dataz")) + .withFolder(new DatasetFolder().withName("z")) + .withTableName("dataqrwwbdrwro"); model = BinaryData.fromObject(model).toObject(EloquaObjectDataset.class); - Assertions.assertEquals("lhuhdkubgyw", model.description()); - Assertions.assertEquals("olhve", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("ywzrqeiadd").type()); - Assertions.assertEquals("wmh", model.folder().name()); + Assertions.assertEquals("mea", model.description()); + Assertions.assertEquals("fcvedks", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("ejst").type()); + Assertions.assertEquals("z", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaSourceTests.java index 90f90b9d15f93..44ed2dce99567 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaSourceTests.java @@ -11,16 +11,19 @@ public final class EloquaSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { EloquaSource model = BinaryData.fromString( - "{\"type\":\"EloquaSource\",\"query\":\"datavijdr\",\"queryTimeout\":\"datayqvhz\",\"additionalColumns\":\"datayvhrenozl\",\"sourceRetryCount\":\"dataqfghlosho\",\"sourceRetryWait\":\"datakpcmtsbandesalv\",\"maxConcurrentConnections\":\"datawrljmlo\",\"disableMetricsCollection\":\"datatzvtfyqe\",\"\":{\"xhcygfg\":\"databsyni\",\"aosttbwap\":\"datamdbazggr\"}}") + "{\"type\":\"kjithx\",\"query\":\"dataqrc\",\"queryTimeout\":\"dataovljtrmlxrqllug\",\"additionalColumns\":\"datambwdkziwwb\",\"sourceRetryCount\":\"datavpkvc\",\"sourceRetryWait\":\"datawcfshhck\",\"maxConcurrentConnections\":\"datafm\",\"disableMetricsCollection\":\"dataprtk\",\"\":{\"cugwkuhqbwgmznv\":\"datawuep\",\"qkieyqpubw\":\"datawcnj\",\"cjrbsq\":\"datazxetx\"}}") .toObject(EloquaSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - EloquaSource model - = new EloquaSource().withSourceRetryCount("dataqfghlosho").withSourceRetryWait("datakpcmtsbandesalv") - .withMaxConcurrentConnections("datawrljmlo").withDisableMetricsCollection("datatzvtfyqe") - .withQueryTimeout("datayqvhz").withAdditionalColumns("datayvhrenozl").withQuery("datavijdr"); + EloquaSource model = new EloquaSource().withSourceRetryCount("datavpkvc") + .withSourceRetryWait("datawcfshhck") + .withMaxConcurrentConnections("datafm") + .withDisableMetricsCollection("dataprtk") + .withQueryTimeout("dataovljtrmlxrqllug") + .withAdditionalColumns("datambwdkziwwb") + .withQuery("dataqrc"); model = BinaryData.fromObject(model).toObject(EloquaSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EntityReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EntityReferenceTests.java index f5c771b43ff6a..42ea4b8fe7e91 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EntityReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EntityReferenceTests.java @@ -13,19 +13,19 @@ public final class EntityReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { EntityReference model - = BinaryData.fromString("{\"type\":\"IntegrationRuntimeReference\",\"referenceName\":\"bkubzqazdlrkvi\"}") + = BinaryData.fromString("{\"type\":\"IntegrationRuntimeReference\",\"referenceName\":\"agugwxdwtfmfj\"}") .toObject(EntityReference.class); Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.INTEGRATION_RUNTIME_REFERENCE, model.type()); - Assertions.assertEquals("bkubzqazdlrkvi", model.referenceName()); + Assertions.assertEquals("agugwxdwtfmfj", model.referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { EntityReference model = new EntityReference().withType(IntegrationRuntimeEntityReferenceType.INTEGRATION_RUNTIME_REFERENCE) - .withReferenceName("bkubzqazdlrkvi"); + .withReferenceName("agugwxdwtfmfj"); model = BinaryData.fromObject(model).toObject(EntityReference.class); Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.INTEGRATION_RUNTIME_REFERENCE, model.type()); - Assertions.assertEquals("bkubzqazdlrkvi", model.referenceName()); + Assertions.assertEquals("agugwxdwtfmfj", model.referenceName()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTests.java index d39b0a2cb0313..8169c1d9b7979 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTests.java @@ -12,18 +12,18 @@ public final class EnvironmentVariableSetupTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { EnvironmentVariableSetup model = BinaryData.fromString( - "{\"type\":\"EnvironmentVariableSetup\",\"typeProperties\":{\"variableName\":\"cz\",\"variableValue\":\"tniwfcu\"}}") + "{\"type\":\"zcufysalbjfkj\",\"typeProperties\":{\"variableName\":\"jguzsyfwamhmeo\",\"variableValue\":\"xbzagwnompvyis\"}}") .toObject(EnvironmentVariableSetup.class); - Assertions.assertEquals("cz", model.variableName()); - Assertions.assertEquals("tniwfcu", model.variableValue()); + Assertions.assertEquals("jguzsyfwamhmeo", model.variableName()); + Assertions.assertEquals("xbzagwnompvyis", model.variableValue()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { EnvironmentVariableSetup model - = new EnvironmentVariableSetup().withVariableName("cz").withVariableValue("tniwfcu"); + = new EnvironmentVariableSetup().withVariableName("jguzsyfwamhmeo").withVariableValue("xbzagwnompvyis"); model = BinaryData.fromObject(model).toObject(EnvironmentVariableSetup.class); - Assertions.assertEquals("cz", model.variableName()); - Assertions.assertEquals("tniwfcu", model.variableValue()); + Assertions.assertEquals("jguzsyfwamhmeo", model.variableName()); + Assertions.assertEquals("xbzagwnompvyis", model.variableValue()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTypePropertiesTests.java index a40f2dbe0d0de..7301ae02e537a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTypePropertiesTests.java @@ -12,18 +12,19 @@ public final class EnvironmentVariableSetupTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { EnvironmentVariableSetupTypeProperties model - = BinaryData.fromString("{\"variableName\":\"whxm\",\"variableValue\":\"ibxkcegc\"}") + = BinaryData.fromString("{\"variableName\":\"xqfzmhkridipwczj\",\"variableValue\":\"nmpifj\"}") .toObject(EnvironmentVariableSetupTypeProperties.class); - Assertions.assertEquals("whxm", model.variableName()); - Assertions.assertEquals("ibxkcegc", model.variableValue()); + Assertions.assertEquals("xqfzmhkridipwczj", model.variableName()); + Assertions.assertEquals("nmpifj", model.variableValue()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { EnvironmentVariableSetupTypeProperties model - = new EnvironmentVariableSetupTypeProperties().withVariableName("whxm").withVariableValue("ibxkcegc"); + = new EnvironmentVariableSetupTypeProperties().withVariableName("xqfzmhkridipwczj") + .withVariableValue("nmpifj"); model = BinaryData.fromObject(model).toObject(EnvironmentVariableSetupTypeProperties.class); - Assertions.assertEquals("whxm", model.variableName()); - Assertions.assertEquals("ibxkcegc", model.variableValue()); + Assertions.assertEquals("xqfzmhkridipwczj", model.variableName()); + Assertions.assertEquals("nmpifj", model.variableValue()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTests.java index 49583f7ef9901..9eb6df0157182 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTests.java @@ -21,36 +21,42 @@ public final class ExcelDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExcelDataset model = BinaryData.fromString( - "{\"type\":\"Excel\",\"typeProperties\":{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datapjzobdwbcpr\",\"fileName\":\"datawkuh\",\"\":{\"awp\":\"datanaczkfwfat\",\"cumecsaaqg\":\"datanskivdwgt\",\"hzhrbkhtm\":\"dataqbdiuycsbskowkr\",\"ucyhfaimq\":\"dataowiuasfgq\"}},\"sheetName\":\"dataruozkgyfp\",\"sheetIndex\":\"datae\",\"range\":\"datafm\",\"firstRowAsHeader\":\"datask\",\"compression\":{\"type\":\"dataxvlzjxplhpevasyn\",\"level\":\"datazjyielbqrvv\",\"\":{\"cqx\":\"dataknmp\",\"bn\":\"dataiqasif\",\"jkht\":\"datanstlpwqpjn\",\"ztsm\":\"datapsvyouweuiyxfw\"}},\"nullValue\":\"datab\"},\"description\":\"yllznfhkqytkztad\",\"structure\":\"datagfzdgjfcycrsvl\",\"schema\":\"datayhigqkzjuqwqaj\",\"linkedServiceName\":{\"referenceName\":\"uzxp\",\"parameters\":{\"soscien\":\"datayoipnfdb\",\"rtek\":\"datazfvbennmfkbpj\",\"iurztv\":\"datawnthropmduds\"}},\"parameters\":{\"hpuzxkpye\":{\"type\":\"SecureString\",\"defaultValue\":\"datafecqkoqyouerga\"}},\"annotations\":[\"datadyldhgyedzfzqiy\",\"dataqhtdereunokakzwh\",\"datajlwyxedzn\"],\"folder\":{\"name\":\"fomckewv\"},\"\":{\"jt\":\"datafopxf\",\"uywezygvadg\":\"datadyzoutxfptofhg\",\"gehlufbortbnu\":\"dataaqwvkgjpytpmpvdn\",\"axzs\":\"datak\"}}") + "{\"type\":\"dscnns\",\"typeProperties\":{\"location\":{\"type\":\"rfihscj\",\"folderPath\":\"datakmhzbh\",\"fileName\":\"datahf\",\"\":{\"nuqqkotauratnicp\":\"datahbnnxemv\"}},\"sheetName\":\"datazsclefyrletn\",\"sheetIndex\":\"datalmfdg\",\"range\":\"databbuypwovvvsflee\",\"firstRowAsHeader\":\"dataoqayrehjuqwv\",\"compression\":{\"type\":\"dataxrlzhpziha\",\"level\":\"datanqqz\",\"\":{\"gmkfw\":\"dataqzubfonfd\",\"ewfhxwyrkbre\":\"datajcw\",\"ynjpchamkaepl\":\"datazl\",\"uywevtjrieikmwl\":\"dataajubo\"}},\"nullValue\":\"dataklfnisyxgucbmt\"},\"description\":\"cyyuvtzrxzhclec\",\"structure\":\"datatzqzcloy\",\"schema\":\"datau\",\"linkedServiceName\":{\"referenceName\":\"gi\",\"parameters\":{\"mwb\":\"datagyresgzsd\",\"lb\":\"dataorj\",\"xqjsiuepm\":\"datahychakvyrfbqvum\"}},\"parameters\":{\"t\":{\"type\":\"Object\",\"defaultValue\":\"datalpqmp\"},\"mrtuxyp\":{\"type\":\"String\",\"defaultValue\":\"datavulb\"}},\"annotations\":[\"datacaeo\",\"dataifq\",\"dataywjflobh\"],\"folder\":{\"name\":\"momfecorkfroc\"},\"\":{\"sylslurbqfygpnyh\":\"dataxldjmzezbj\"}}") .toObject(ExcelDataset.class); - Assertions.assertEquals("yllznfhkqytkztad", model.description()); - Assertions.assertEquals("uzxp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("hpuzxkpye").type()); - Assertions.assertEquals("fomckewv", model.folder().name()); + Assertions.assertEquals("cyyuvtzrxzhclec", model.description()); + Assertions.assertEquals("gi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("t").type()); + Assertions.assertEquals("momfecorkfroc", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ExcelDataset model = new ExcelDataset().withDescription("yllznfhkqytkztad").withStructure("datagfzdgjfcycrsvl") - .withSchema("datayhigqkzjuqwqaj") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("uzxp").withParameters( - mapOf("soscien", "datayoipnfdb", "rtek", "datazfvbennmfkbpj", "iurztv", "datawnthropmduds"))) - .withParameters(mapOf("hpuzxkpye", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datafecqkoqyouerga"))) - .withAnnotations(Arrays.asList("datadyldhgyedzfzqiy", "dataqhtdereunokakzwh", "datajlwyxedzn")) - .withFolder(new DatasetFolder().withName("fomckewv")) - .withLocation(new DatasetLocation().withFolderPath("datapjzobdwbcpr").withFileName("datawkuh") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withSheetName("dataruozkgyfp").withSheetIndex("datae").withRange("datafm").withFirstRowAsHeader("datask") - .withCompression(new DatasetCompression().withType("dataxvlzjxplhpevasyn").withLevel("datazjyielbqrvv") + ExcelDataset model = new ExcelDataset().withDescription("cyyuvtzrxzhclec") + .withStructure("datatzqzcloy") + .withSchema("datau") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("gi") + .withParameters(mapOf("mwb", "datagyresgzsd", "lb", "dataorj", "xqjsiuepm", "datahychakvyrfbqvum"))) + .withParameters(mapOf("t", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datalpqmp"), "mrtuxyp", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datavulb"))) + .withAnnotations(Arrays.asList("datacaeo", "dataifq", "dataywjflobh")) + .withFolder(new DatasetFolder().withName("momfecorkfroc")) + .withLocation(new DatasetLocation().withFolderPath("datakmhzbh") + .withFileName("datahf") + .withAdditionalProperties(mapOf("type", "rfihscj"))) + .withSheetName("datazsclefyrletn") + .withSheetIndex("datalmfdg") + .withRange("databbuypwovvvsflee") + .withFirstRowAsHeader("dataoqayrehjuqwv") + .withCompression(new DatasetCompression().withType("dataxrlzhpziha") + .withLevel("datanqqz") .withAdditionalProperties(mapOf())) - .withNullValue("datab"); + .withNullValue("dataklfnisyxgucbmt"); model = BinaryData.fromObject(model).toObject(ExcelDataset.class); - Assertions.assertEquals("yllznfhkqytkztad", model.description()); - Assertions.assertEquals("uzxp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("hpuzxkpye").type()); - Assertions.assertEquals("fomckewv", model.folder().name()); + Assertions.assertEquals("cyyuvtzrxzhclec", model.description()); + Assertions.assertEquals("gi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("t").type()); + Assertions.assertEquals("momfecorkfroc", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTypePropertiesTests.java index 50db8bbb7436d..977d4e169baf8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTypePropertiesTests.java @@ -15,19 +15,24 @@ public final class ExcelDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExcelDatasetTypeProperties model = BinaryData.fromString( - "{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"dataxxyjisskobqoclfl\",\"fileName\":\"dataewyhxessm\",\"\":{\"bdxmd\":\"datadpsqeq\",\"dibfmthi\":\"dataesbrujbjpppktl\",\"ejrnmi\":\"dataat\"}},\"sheetName\":\"dataqplgtkihonikz\",\"sheetIndex\":\"datazfffjilzfbpnt\",\"range\":\"datakensckh\",\"firstRowAsHeader\":\"datacarmoyfx\",\"compression\":{\"type\":\"datawykuqdnd\",\"level\":\"dataykhkg\",\"\":{\"cd\":\"datavdgtfpeer\"}},\"nullValue\":\"datanrnjrcuf\"}") + "{\"location\":{\"type\":\"zu\",\"folderPath\":\"datacagqyvouprsyt\",\"fileName\":\"datass\",\"\":{\"hgyqvpbfj\":\"datagwfnivrxpfduio\"}},\"sheetName\":\"dataqzucfzluczdquur\",\"sheetIndex\":\"datarmvhvzi\",\"range\":\"databprnqujywzcqygg\",\"firstRowAsHeader\":\"datawsvh\",\"compression\":{\"type\":\"datagqiwy\",\"level\":\"datatobdrrpnre\",\"\":{\"tt\":\"datansbfjhhlwtpkvege\",\"qsttewuvcysjeuf\":\"datazkgtzqn\",\"lpditfnonpi\":\"datax\"}},\"nullValue\":\"dataxlvrhprrvbwonleq\"}") .toObject(ExcelDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ExcelDatasetTypeProperties model = new ExcelDatasetTypeProperties() - .withLocation(new DatasetLocation().withFolderPath("dataxxyjisskobqoclfl").withFileName("dataewyhxessm") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withSheetName("dataqplgtkihonikz").withSheetIndex("datazfffjilzfbpnt").withRange("datakensckh") - .withFirstRowAsHeader("datacarmoyfx").withCompression(new DatasetCompression().withType("datawykuqdnd") - .withLevel("dataykhkg").withAdditionalProperties(mapOf())) - .withNullValue("datanrnjrcuf"); + .withLocation(new DatasetLocation().withFolderPath("datacagqyvouprsyt") + .withFileName("datass") + .withAdditionalProperties(mapOf("type", "zu"))) + .withSheetName("dataqzucfzluczdquur") + .withSheetIndex("datarmvhvzi") + .withRange("databprnqujywzcqygg") + .withFirstRowAsHeader("datawsvh") + .withCompression(new DatasetCompression().withType("datagqiwy") + .withLevel("datatobdrrpnre") + .withAdditionalProperties(mapOf())) + .withNullValue("dataxlvrhprrvbwonleq"); model = BinaryData.fromObject(model).toObject(ExcelDatasetTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelSourceTests.java index c7e1896f5fcaa..0c8454fa73faa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelSourceTests.java @@ -14,18 +14,20 @@ public final class ExcelSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExcelSource model = BinaryData.fromString( - "{\"type\":\"ExcelSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datanpdjomd\",\"disableMetricsCollection\":\"datadwosjxywwvilkyht\",\"\":{\"jgsjmcybrp\":\"datayhdb\",\"uuhdk\":\"datajenbxtkghrrx\",\"yxnhu\":\"datakizyx\",\"zyrpdxygfpqxseme\":\"datajwipfryivpe\"}},\"additionalColumns\":\"datapdmm\",\"sourceRetryCount\":\"dataft\",\"sourceRetryWait\":\"dataqe\",\"maxConcurrentConnections\":\"datajopotefek\",\"disableMetricsCollection\":\"datamg\",\"\":{\"dkvhui\":\"datawidnrd\",\"hwbead\":\"datadyhodisypgapfd\"}}") + "{\"type\":\"rw\",\"storeSettings\":{\"type\":\"bubyabtowbuuhlw\",\"maxConcurrentConnections\":\"datavzuxfsmfpd\",\"disableMetricsCollection\":\"dataoqpzwfvnoy\",\"\":{\"ljzrqw\":\"datajylhv\",\"kejyavk\":\"datatswemot\"}},\"additionalColumns\":\"datavctqaq\",\"sourceRetryCount\":\"dataeyklxs\",\"sourceRetryWait\":\"databrszltvmmdsngoa\",\"maxConcurrentConnections\":\"datam\",\"disableMetricsCollection\":\"datahhnsf\",\"\":{\"jooep\":\"datakcgdnhacexibomr\"}}") .toObject(ExcelSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ExcelSource model = new ExcelSource().withSourceRetryCount("dataft").withSourceRetryWait("dataqe") - .withMaxConcurrentConnections("datajopotefek").withDisableMetricsCollection("datamg") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datanpdjomd") - .withDisableMetricsCollection("datadwosjxywwvilkyht") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withAdditionalColumns("datapdmm"); + ExcelSource model = new ExcelSource().withSourceRetryCount("dataeyklxs") + .withSourceRetryWait("databrszltvmmdsngoa") + .withMaxConcurrentConnections("datam") + .withDisableMetricsCollection("datahhnsf") + .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datavzuxfsmfpd") + .withDisableMetricsCollection("dataoqpzwfvnoy") + .withAdditionalProperties(mapOf("type", "bubyabtowbuuhlw"))) + .withAdditionalColumns("datavctqaq"); model = BinaryData.fromObject(model).toObject(ExcelSource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTests.java index 6408492dfcdf9..140fb7dd65081 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTests.java @@ -27,85 +27,86 @@ public final class ExecuteDataFlowActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExecuteDataFlowActivity model = BinaryData.fromString( - "{\"type\":\"ExecuteDataFlow\",\"typeProperties\":{\"dataFlow\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ksoxykrma\",\"datasetParameters\":\"datanlbvahjlvbnlxebg\",\"parameters\":{\"yy\":\"datapsged\",\"zffzhbhelg\":\"dataueifmtgntlfdiqzv\",\"y\":\"datalrdgpudbimehdx\",\"ut\":\"datayfhwkbhapfnyo\"},\"\":{\"gzunbcvfz\":\"datahjrmfejeihnh\",\"afrqqfgudobutkq\":\"datapyirngfujv\"}},\"staging\":{\"linkedService\":{\"referenceName\":\"xxcaxgraiki\",\"parameters\":{\"asbfcblvakhdi\":\"datadnchrvsfnlgwp\"}},\"folderPath\":\"dataxtfvoasdhdii\"},\"integrationRuntime\":{\"referenceName\":\"znffmxtmqartpdyh\",\"parameters\":{\"fjcowrzqyoc\":\"dataxmrhxpmtztvxfgli\"}},\"compute\":{\"computeType\":\"datagrtnitaampg\",\"coreCount\":\"datayvpxpcjnb\"},\"traceLevel\":\"datafexzzijtctfew\",\"continueOnError\":\"datawtzp\",\"runConcurrently\":\"datayluvqp\",\"sourceStagingConcurrency\":\"datavoyqs\"},\"linkedServiceName\":{\"referenceName\":\"twlxv\",\"parameters\":{\"sxcqto\":\"dataaldss\"}},\"policy\":{\"timeout\":\"datanxinl\",\"retry\":\"datacc\",\"retryIntervalInSeconds\":999097881,\"secureInput\":false,\"secureOutput\":false,\"\":{\"wybbda\":\"datafxnenhyhdusaykrj\"}},\"name\":\"dqttzslt\",\"description\":\"mdacetjmap\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"jxdirdcxuiamr\",\"dependencyConditions\":[\"Succeeded\",\"Failed\"],\"\":{\"yivvtx\":\"datacqofpwjjtdz\",\"o\":\"datapem\",\"jdihtxvmnyslpdqd\":\"datatohzfvysvud\"}},{\"activity\":\"zjpp\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"vyyasildbq\":\"datavtymzdtnjxv\",\"mqpjbkb\":\"datagnfxgzzqqwsvj\"}},{\"activity\":\"sugkdvmgpeitfbgy\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Succeeded\"],\"\":{\"ookhcu\":\"datadsgfztmhvuoavp\",\"sutseejtfnjrrxf\":\"datawgbjzznmjwqwyhh\"}},{\"activity\":\"uywzpcx\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Skipped\",\"Succeeded\"],\"\":{\"h\":\"datalowm\",\"tceehqeahlfujp\":\"datauhywdckvcof\",\"uumldunalo\":\"datavtakijwkwed\"}}],\"userProperties\":[{\"name\":\"ikfqcbe\",\"value\":\"datansszu\"},{\"name\":\"dvhqecqqiulwfz\",\"value\":\"dataszgbgtwaquiuzsn\"},{\"name\":\"jgnmpu\",\"value\":\"datasjfvdajmczlvcxm\"}],\"\":{\"tbgkx\":\"databrp\",\"lszcwomayr\":\"dataxwjzleeup\",\"dfxnxtiwi\":\"dataatrjpa\",\"ihsgt\":\"datanho\"}}") + "{\"type\":\"svdbdllmutwmarfb\",\"typeProperties\":{\"dataFlow\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ezee\",\"datasetParameters\":\"dataligunw\",\"parameters\":{\"wxzxroht\":\"dataucvwz\",\"idspe\":\"datac\",\"cmcqslngmsip\":\"dataxdeaisk\"},\"\":{\"eiyfpfaa\":\"datanmdcotjdcxac\"}},\"staging\":{\"linkedService\":{\"referenceName\":\"phuplfopqgcadnt\",\"parameters\":{\"vfpmezdo\":\"datald\",\"xmvxbaaznuwuwbn\":\"datagcofhinehhirbg\"}},\"folderPath\":\"datacdtxxyzgy\"},\"integrationRuntime\":{\"referenceName\":\"diqpadhrijxu\",\"parameters\":{\"sxlqsfp\":\"datacyllp\"}},\"compute\":{\"computeType\":\"datag\",\"coreCount\":\"databjjde\"},\"traceLevel\":\"dataqunernb\",\"continueOnError\":\"datavcabchdzx\",\"runConcurrently\":\"dataslshwaadc\",\"sourceStagingConcurrency\":\"datamnfavllbskl\"},\"linkedServiceName\":{\"referenceName\":\"ceaxorppzbdva\",\"parameters\":{\"scdoqocdrjguhsjl\":\"datagvqtegknaeclr\"}},\"policy\":{\"timeout\":\"dataedswhb\",\"retry\":\"datajuuboyrf\",\"retryIntervalInSeconds\":2024683073,\"secureInput\":true,\"secureOutput\":false,\"\":{\"kasbda\":\"datazsvzi\"}},\"name\":\"zlpgxsczrdmbebx\",\"description\":\"woklrhcen\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"wvvvapdshkbfwe\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Failed\"],\"\":{\"lwyzdphidhkigslc\":\"datatqnnbxgofip\"}},{\"activity\":\"kzl\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"axcbfrnttlrumv\":\"datalu\",\"vjpjnnha\":\"dataxbyedca\"}},{\"activity\":\"ednmqxba\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Failed\"],\"\":{\"g\":\"datacp\"}}],\"userProperties\":[{\"name\":\"muqxsocnkjrt\",\"value\":\"dataifxledjpuga\"},{\"name\":\"scvsj\",\"value\":\"datadmnvtpbohzcaaq\"}],\"\":{\"cbnmzshmqnnbbv\":\"dataaqfvplfy\",\"uwx\":\"datasq\",\"fyddrsairxnw\":\"dataqdtvejilqscjxpro\"}}") .toObject(ExecuteDataFlowActivity.class); - Assertions.assertEquals("dqttzslt", model.name()); - Assertions.assertEquals("mdacetjmap", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("jxdirdcxuiamr", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ikfqcbe", model.userProperties().get(0).name()); - Assertions.assertEquals("twlxv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(999097881, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals("zlpgxsczrdmbebx", model.name()); + Assertions.assertEquals("woklrhcen", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("wvvvapdshkbfwe", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("muqxsocnkjrt", model.userProperties().get(0).name()); + Assertions.assertEquals("ceaxorppzbdva", model.linkedServiceName().referenceName()); + Assertions.assertEquals(2024683073, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.dataFlow().type()); - Assertions.assertEquals("ksoxykrma", model.dataFlow().referenceName()); - Assertions.assertEquals("xxcaxgraiki", model.staging().linkedService().referenceName()); - Assertions.assertEquals("znffmxtmqartpdyh", model.integrationRuntime().referenceName()); + Assertions.assertEquals("ezee", model.dataFlow().referenceName()); + Assertions.assertEquals("phuplfopqgcadnt", model.staging().linkedService().referenceName()); + Assertions.assertEquals("diqpadhrijxu", model.integrationRuntime().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ExecuteDataFlowActivity model - = new ExecuteDataFlowActivity().withName("dqttzslt").withDescription("mdacetjmap") - .withState(ActivityState.INACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("jxdirdcxuiamr") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency() - .withActivity("zjpp").withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("sugkdvmgpeitfbgy") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("uywzpcx") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ikfqcbe").withValue("datansszu"), - new UserProperty().withName("dvhqecqqiulwfz").withValue("dataszgbgtwaquiuzsn"), - new UserProperty().withName("jgnmpu").withValue("datasjfvdajmczlvcxm"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("twlxv") - .withParameters(mapOf("sxcqto", "dataaldss"))) - .withPolicy(new ActivityPolicy().withTimeout("datanxinl").withRetry("datacc") - .withRetryIntervalInSeconds(999097881).withSecureInput(false).withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withDataFlow(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ksoxykrma").withDatasetParameters("datanlbvahjlvbnlxebg") - .withParameters(mapOf("yy", "datapsged", "zffzhbhelg", "dataueifmtgntlfdiqzv", "y", - "datalrdgpudbimehdx", "ut", "datayfhwkbhapfnyo")) - .withAdditionalProperties(mapOf())) - .withStaging(new DataFlowStagingInfo() - .withLinkedService(new LinkedServiceReference().withReferenceName("xxcaxgraiki") - .withParameters(mapOf("asbfcblvakhdi", "datadnchrvsfnlgwp"))) - .withFolderPath("dataxtfvoasdhdii")) - .withIntegrationRuntime(new IntegrationRuntimeReference().withReferenceName("znffmxtmqartpdyh") - .withParameters(mapOf("fjcowrzqyoc", "dataxmrhxpmtztvxfgli"))) - .withCompute(new ExecuteDataFlowActivityTypePropertiesCompute().withComputeType("datagrtnitaampg") - .withCoreCount("datayvpxpcjnb")) - .withTraceLevel("datafexzzijtctfew").withContinueOnError("datawtzp").withRunConcurrently("datayluvqp") - .withSourceStagingConcurrency("datavoyqs"); + ExecuteDataFlowActivity model = new ExecuteDataFlowActivity().withName("zlpgxsczrdmbebx") + .withDescription("woklrhcen") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("wvvvapdshkbfwe") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED, + DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("kzl") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("ednmqxba") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.FAILED, + DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("muqxsocnkjrt").withValue("dataifxledjpuga"), + new UserProperty().withName("scvsj").withValue("datadmnvtpbohzcaaq"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ceaxorppzbdva") + .withParameters(mapOf("scdoqocdrjguhsjl", "datagvqtegknaeclr"))) + .withPolicy(new ActivityPolicy().withTimeout("dataedswhb") + .withRetry("datajuuboyrf") + .withRetryIntervalInSeconds(2024683073) + .withSecureInput(true) + .withSecureOutput(false) + .withAdditionalProperties(mapOf())) + .withDataFlow(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("ezee") + .withDatasetParameters("dataligunw") + .withParameters(mapOf("wxzxroht", "dataucvwz", "idspe", "datac", "cmcqslngmsip", "dataxdeaisk")) + .withAdditionalProperties(mapOf())) + .withStaging(new DataFlowStagingInfo() + .withLinkedService(new LinkedServiceReference().withReferenceName("phuplfopqgcadnt") + .withParameters(mapOf("vfpmezdo", "datald", "xmvxbaaznuwuwbn", "datagcofhinehhirbg"))) + .withFolderPath("datacdtxxyzgy")) + .withIntegrationRuntime(new IntegrationRuntimeReference().withReferenceName("diqpadhrijxu") + .withParameters(mapOf("sxlqsfp", "datacyllp"))) + .withCompute( + new ExecuteDataFlowActivityTypePropertiesCompute().withComputeType("datag").withCoreCount("databjjde")) + .withTraceLevel("dataqunernb") + .withContinueOnError("datavcabchdzx") + .withRunConcurrently("dataslshwaadc") + .withSourceStagingConcurrency("datamnfavllbskl"); model = BinaryData.fromObject(model).toObject(ExecuteDataFlowActivity.class); - Assertions.assertEquals("dqttzslt", model.name()); - Assertions.assertEquals("mdacetjmap", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("jxdirdcxuiamr", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ikfqcbe", model.userProperties().get(0).name()); - Assertions.assertEquals("twlxv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(999097881, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals("zlpgxsczrdmbebx", model.name()); + Assertions.assertEquals("woklrhcen", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("wvvvapdshkbfwe", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("muqxsocnkjrt", model.userProperties().get(0).name()); + Assertions.assertEquals("ceaxorppzbdva", model.linkedServiceName().referenceName()); + Assertions.assertEquals(2024683073, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.dataFlow().type()); - Assertions.assertEquals("ksoxykrma", model.dataFlow().referenceName()); - Assertions.assertEquals("xxcaxgraiki", model.staging().linkedService().referenceName()); - Assertions.assertEquals("znffmxtmqartpdyh", model.integrationRuntime().referenceName()); + Assertions.assertEquals("ezee", model.dataFlow().referenceName()); + Assertions.assertEquals("phuplfopqgcadnt", model.staging().linkedService().referenceName()); + Assertions.assertEquals("diqpadhrijxu", model.integrationRuntime().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTypePropertiesComputeTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTypePropertiesComputeTests.java index b88c756187a10..fda735a276ca6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTypePropertiesComputeTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTypePropertiesComputeTests.java @@ -11,14 +11,15 @@ public final class ExecuteDataFlowActivityTypePropertiesComputeTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExecuteDataFlowActivityTypePropertiesCompute model - = BinaryData.fromString("{\"computeType\":\"datafxuys\",\"coreCount\":\"datavdqzf\"}") + = BinaryData.fromString("{\"computeType\":\"datantnjnkvsnsiphl\",\"coreCount\":\"datacedz\"}") .toObject(ExecuteDataFlowActivityTypePropertiesCompute.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ExecuteDataFlowActivityTypePropertiesCompute model = new ExecuteDataFlowActivityTypePropertiesCompute() - .withComputeType("datafxuys").withCoreCount("datavdqzf"); + ExecuteDataFlowActivityTypePropertiesCompute model + = new ExecuteDataFlowActivityTypePropertiesCompute().withComputeType("datantnjnkvsnsiphl") + .withCoreCount("datacedz"); model = BinaryData.fromObject(model).toObject(ExecuteDataFlowActivityTypePropertiesCompute.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTypePropertiesTests.java index 7f0d921a7baed..d5d3eea6c05d1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTypePropertiesTests.java @@ -20,36 +20,38 @@ public final class ExecuteDataFlowActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExecuteDataFlowActivityTypeProperties model = BinaryData.fromString( - "{\"dataFlow\":{\"type\":\"DataFlowReference\",\"referenceName\":\"gmfnpeluvxsicp\",\"datasetParameters\":\"datakupngo\",\"parameters\":{\"f\":\"dataayrguxfjjg\"},\"\":{\"krppxj\":\"datagrubofhkbjg\",\"ladibsjirhaqedfu\":\"datarujdskkkz\"}},\"staging\":{\"linkedService\":{\"referenceName\":\"akfxz\",\"parameters\":{\"jfrpbdxsjceyyebg\":\"datacwhjv\"}},\"folderPath\":\"datafntrbnvwhq\"},\"integrationRuntime\":{\"referenceName\":\"qdyfugksmxvevu\",\"parameters\":{\"naynlxwukpqcf\":\"datanyuy\",\"hizmzbaqrxzjm\":\"dataxagtiyvdslrrtv\",\"bdwqwh\":\"datatmedoyke\"}},\"compute\":{\"computeType\":\"datayrfjzyiniuua\",\"coreCount\":\"dataisqkotbmhryrif\"},\"traceLevel\":\"datayavhesqnvsqte\",\"continueOnError\":\"dataswnfakcchcnm\",\"runConcurrently\":\"datahduyigadpqpchgv\",\"sourceStagingConcurrency\":\"datagylbmfr\"}") + "{\"dataFlow\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ytkeqjviawspvbcm\",\"datasetParameters\":\"dataeelmiuprfq\",\"parameters\":{\"gkfojnqmdtuyi\":\"datatdnrxrmhewdfua\"},\"\":{\"pdghqsa\":\"datapr\",\"b\":\"datab\",\"xbpqzqaavxxv\":\"datajedyyengnh\",\"ptxdrajihqwfrt\":\"datass\"}},\"staging\":{\"linkedService\":{\"referenceName\":\"to\",\"parameters\":{\"brent\":\"datadevkntyguqsq\"}},\"folderPath\":\"dataamijgq\"},\"integrationRuntime\":{\"referenceName\":\"zvvwyhszewh\",\"parameters\":{\"vwwbxnxlhdindc\":\"datauoibwkiwytskpbre\",\"lie\":\"datatiqacnyc\"}},\"compute\":{\"computeType\":\"datarevw\",\"coreCount\":\"datawtfohcylvj\"},\"traceLevel\":\"datafznae\",\"continueOnError\":\"datayuxrufwdbimj\",\"runConcurrently\":\"dataphyxlxvo\",\"sourceStagingConcurrency\":\"datauwdesyttkaufabtp\"}") .toObject(ExecuteDataFlowActivityTypeProperties.class); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.dataFlow().type()); - Assertions.assertEquals("gmfnpeluvxsicp", model.dataFlow().referenceName()); - Assertions.assertEquals("akfxz", model.staging().linkedService().referenceName()); - Assertions.assertEquals("qdyfugksmxvevu", model.integrationRuntime().referenceName()); + Assertions.assertEquals("ytkeqjviawspvbcm", model.dataFlow().referenceName()); + Assertions.assertEquals("to", model.staging().linkedService().referenceName()); + Assertions.assertEquals("zvvwyhszewh", model.integrationRuntime().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ExecuteDataFlowActivityTypeProperties model - = new ExecuteDataFlowActivityTypeProperties() - .withDataFlow(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("gmfnpeluvxsicp").withDatasetParameters("datakupngo") - .withParameters(mapOf("f", "dataayrguxfjjg")).withAdditionalProperties(mapOf())) - .withStaging( - new DataFlowStagingInfo().withLinkedService(new LinkedServiceReference().withReferenceName("akfxz") - .withParameters(mapOf("jfrpbdxsjceyyebg", "datacwhjv"))).withFolderPath("datafntrbnvwhq")) - .withIntegrationRuntime(new IntegrationRuntimeReference().withReferenceName("qdyfugksmxvevu") - .withParameters(mapOf("naynlxwukpqcf", "datanyuy", "hizmzbaqrxzjm", "dataxagtiyvdslrrtv", "bdwqwh", - "datatmedoyke"))) - .withCompute(new ExecuteDataFlowActivityTypePropertiesCompute().withComputeType("datayrfjzyiniuua") - .withCoreCount("dataisqkotbmhryrif")) - .withTraceLevel("datayavhesqnvsqte").withContinueOnError("dataswnfakcchcnm") - .withRunConcurrently("datahduyigadpqpchgv").withSourceStagingConcurrency("datagylbmfr"); + ExecuteDataFlowActivityTypeProperties model = new ExecuteDataFlowActivityTypeProperties() + .withDataFlow(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("ytkeqjviawspvbcm") + .withDatasetParameters("dataeelmiuprfq") + .withParameters(mapOf("gkfojnqmdtuyi", "datatdnrxrmhewdfua")) + .withAdditionalProperties(mapOf())) + .withStaging(new DataFlowStagingInfo().withLinkedService( + new LinkedServiceReference().withReferenceName("to").withParameters(mapOf("brent", "datadevkntyguqsq"))) + .withFolderPath("dataamijgq")) + .withIntegrationRuntime(new IntegrationRuntimeReference().withReferenceName("zvvwyhszewh") + .withParameters(mapOf("vwwbxnxlhdindc", "datauoibwkiwytskpbre", "lie", "datatiqacnyc"))) + .withCompute(new ExecuteDataFlowActivityTypePropertiesCompute().withComputeType("datarevw") + .withCoreCount("datawtfohcylvj")) + .withTraceLevel("datafznae") + .withContinueOnError("datayuxrufwdbimj") + .withRunConcurrently("dataphyxlxvo") + .withSourceStagingConcurrency("datauwdesyttkaufabtp"); model = BinaryData.fromObject(model).toObject(ExecuteDataFlowActivityTypeProperties.class); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.dataFlow().type()); - Assertions.assertEquals("gmfnpeluvxsicp", model.dataFlow().referenceName()); - Assertions.assertEquals("akfxz", model.staging().linkedService().referenceName()); - Assertions.assertEquals("qdyfugksmxvevu", model.integrationRuntime().referenceName()); + Assertions.assertEquals("ytkeqjviawspvbcm", model.dataFlow().referenceName()); + Assertions.assertEquals("to", model.staging().linkedService().referenceName()); + Assertions.assertEquals("zvvwyhszewh", model.integrationRuntime().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityPolicyTests.java index 33c501b1b6cf5..75bfa19b5280b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityPolicyTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityPolicyTests.java @@ -13,18 +13,18 @@ public final class ExecutePipelineActivityPolicyTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - ExecutePipelineActivityPolicy model = BinaryData.fromString( - "{\"secureInput\":true,\"\":{\"fqbeaizvnnhxgi\":\"dataho\",\"bwpdvedm\":\"datadkrgdascmfnk\",\"onkfb\":\"datackbgxgykxszet\"}}") + ExecutePipelineActivityPolicy model = BinaryData + .fromString("{\"secureInput\":false,\"\":{\"krnow\":\"datakuxajlyiffz\",\"rcwbnmaiq\":\"dataxfykd\"}}") .toObject(ExecutePipelineActivityPolicy.class); - Assertions.assertEquals(true, model.secureInput()); + Assertions.assertEquals(false, model.secureInput()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ExecutePipelineActivityPolicy model - = new ExecutePipelineActivityPolicy().withSecureInput(true).withAdditionalProperties(mapOf()); + = new ExecutePipelineActivityPolicy().withSecureInput(false).withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(ExecutePipelineActivityPolicy.class); - Assertions.assertEquals(true, model.secureInput()); + Assertions.assertEquals(false, model.secureInput()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTests.java index a2822973944af..c47109cfc2f00 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTests.java @@ -22,51 +22,63 @@ public final class ExecutePipelineActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExecutePipelineActivity model = BinaryData.fromString( - "{\"type\":\"ExecutePipeline\",\"policy\":{\"secureInput\":false,\"\":{\"gawqxrenjzlqbt\":\"dataamdffoibxjgcuppw\"}},\"typeProperties\":{\"pipeline\":{\"referenceName\":\"ft\",\"name\":\"pzhox\"},\"parameters\":{\"v\":\"dataynouhjvtef\",\"paralr\":\"dataedfzxsq\",\"iqgqvprl\":\"datasfnjokrf\",\"xdxnmuosoziqcui\":\"datasglqiuqsqzu\"},\"waitOnCompletion\":true},\"name\":\"yaxpukxt\",\"description\":\"ejxwbr\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"xlkwyqoaejylqge\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Failed\"],\"\":{\"t\":\"dataatyoxvbwsatz\",\"eudpyh\":\"datanrortjtyl\"}},{\"activity\":\"nngijnzlokxihf\",\"dependencyConditions\":[\"Failed\"],\"\":{\"rfvbicd\":\"datajlrfwqnssxid\",\"qnqllmqeauizk\":\"datavypfobzxbfc\",\"e\":\"datajqfachfmvqnkgst\"}}],\"userProperties\":[{\"name\":\"vscfpwpqvg\",\"value\":\"datapwm\"}],\"\":{\"mtvwp\":\"datahburxnagvc\",\"zlfhn\":\"datavptqnqbdxwyo\",\"blvcalb\":\"datajuuwmcugveiiegoo\"}}") + "{\"type\":\"yeckb\",\"policy\":{\"secureInput\":true,\"\":{\"jurjc\":\"datafqwqmlisjqon\",\"kbhwwpaeceuvqa\":\"datadelqazbzixgq\",\"m\":\"dataaedvnloq\",\"qxldkhgngy\":\"datajgdbobr\"}},\"typeProperties\":{\"pipeline\":{\"referenceName\":\"fesjksmyeegbe\",\"name\":\"fwnc\"},\"parameters\":{\"v\":\"datazmyds\",\"gcmmvvbwrilc\":\"datakfzorddc\"},\"waitOnCompletion\":false},\"name\":\"depulbxgdf\",\"description\":\"ywmezoi\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"cfkkcpkvujwfyv\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\"],\"\":{\"adfjxfi\":\"dataeysefsi\"}},{\"activity\":\"euqgpt\",\"dependencyConditions\":[\"Failed\",\"Skipped\"],\"\":{\"zl\":\"datavfbrgtoqk\",\"wiitxye\":\"datakjhu\",\"ehhkcutxmqvbh\":\"dataidk\",\"vkrskqgokhpzvph\":\"databwdu\"}},{\"activity\":\"k\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Succeeded\",\"Skipped\"],\"\":{\"geovjiezkwde\":\"datakdhopzymhrfwchi\",\"okiexmfeechltx\":\"dataldocqhlbsv\",\"ua\":\"datayqw\"}},{\"activity\":\"of\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Completed\"],\"\":{\"ogbmeksegd\":\"datai\"}}],\"userProperties\":[{\"name\":\"oc\",\"value\":\"datanmxbhgsimenjh\"},{\"name\":\"wknazcwja\",\"value\":\"datayvno\"}],\"\":{\"bss\":\"datazlm\"}}") .toObject(ExecutePipelineActivity.class); - Assertions.assertEquals("yaxpukxt", model.name()); - Assertions.assertEquals("ejxwbr", model.description()); + Assertions.assertEquals("depulbxgdf", model.name()); + Assertions.assertEquals("ywmezoi", model.description()); Assertions.assertEquals(ActivityState.ACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("xlkwyqoaejylqge", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("vscfpwpqvg", model.userProperties().get(0).name()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals("ft", model.pipeline().referenceName()); - Assertions.assertEquals("pzhox", model.pipeline().name()); - Assertions.assertEquals(true, model.waitOnCompletion()); + Assertions.assertEquals("cfkkcpkvujwfyv", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("oc", model.userProperties().get(0).name()); + Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals("fesjksmyeegbe", model.pipeline().referenceName()); + Assertions.assertEquals("fwnc", model.pipeline().name()); + Assertions.assertEquals(false, model.waitOnCompletion()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ExecutePipelineActivity model = new ExecutePipelineActivity().withName("yaxpukxt").withDescription("ejxwbr") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("xlkwyqoaejylqge") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, - DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("nngijnzlokxihf") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("vscfpwpqvg").withValue("datapwm"))) - .withPolicy(new ExecutePipelineActivityPolicy().withSecureInput(false).withAdditionalProperties(mapOf())) - .withPipeline(new PipelineReference().withReferenceName("ft").withName("pzhox")) - .withParameters(mapOf("v", "dataynouhjvtef", "paralr", "dataedfzxsq", "iqgqvprl", "datasfnjokrf", - "xdxnmuosoziqcui", "datasglqiuqsqzu")) - .withWaitOnCompletion(true); + ExecutePipelineActivity model + = new ExecutePipelineActivity().withName("depulbxgdf") + .withDescription("ywmezoi") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("cfkkcpkvujwfyv") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("euqgpt") + .withDependencyConditions( + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("k") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, + DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("of") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, + DependencyCondition.FAILED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("oc").withValue("datanmxbhgsimenjh"), + new UserProperty().withName("wknazcwja").withValue("datayvno"))) + .withPolicy(new ExecutePipelineActivityPolicy().withSecureInput(true).withAdditionalProperties(mapOf())) + .withPipeline(new PipelineReference().withReferenceName("fesjksmyeegbe").withName("fwnc")) + .withParameters(mapOf("v", "datazmyds", "gcmmvvbwrilc", "datakfzorddc")) + .withWaitOnCompletion(false); model = BinaryData.fromObject(model).toObject(ExecutePipelineActivity.class); - Assertions.assertEquals("yaxpukxt", model.name()); - Assertions.assertEquals("ejxwbr", model.description()); + Assertions.assertEquals("depulbxgdf", model.name()); + Assertions.assertEquals("ywmezoi", model.description()); Assertions.assertEquals(ActivityState.ACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("xlkwyqoaejylqge", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("vscfpwpqvg", model.userProperties().get(0).name()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals("ft", model.pipeline().referenceName()); - Assertions.assertEquals("pzhox", model.pipeline().name()); - Assertions.assertEquals(true, model.waitOnCompletion()); + Assertions.assertEquals("cfkkcpkvujwfyv", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("oc", model.userProperties().get(0).name()); + Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals("fesjksmyeegbe", model.pipeline().referenceName()); + Assertions.assertEquals("fwnc", model.pipeline().name()); + Assertions.assertEquals(false, model.waitOnCompletion()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTypePropertiesTests.java index c97774db16344..2ac464422700b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTypePropertiesTests.java @@ -15,23 +15,22 @@ public final class ExecutePipelineActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExecutePipelineActivityTypeProperties model = BinaryData.fromString( - "{\"pipeline\":{\"referenceName\":\"wfkczldepz\",\"name\":\"znjwdcisceia\"},\"parameters\":{\"mbvccuikpavi\":\"databudnxawfgm\",\"jsuiou\":\"dataxjiz\",\"enqpthcljvqmbu\":\"datatltcrtmebrssrl\",\"yzvtfexcjqrvp\":\"dataensogdvhqqxggnc\"},\"waitOnCompletion\":true}") + "{\"pipeline\":{\"referenceName\":\"joirxngmm\",\"name\":\"rxoidmnsmd\"},\"parameters\":{\"yfiochfx\":\"datakjlhkcogxrs\"},\"waitOnCompletion\":true}") .toObject(ExecutePipelineActivityTypeProperties.class); - Assertions.assertEquals("wfkczldepz", model.pipeline().referenceName()); - Assertions.assertEquals("znjwdcisceia", model.pipeline().name()); + Assertions.assertEquals("joirxngmm", model.pipeline().referenceName()); + Assertions.assertEquals("rxoidmnsmd", model.pipeline().name()); Assertions.assertEquals(true, model.waitOnCompletion()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ExecutePipelineActivityTypeProperties model = new ExecutePipelineActivityTypeProperties() - .withPipeline(new PipelineReference().withReferenceName("wfkczldepz").withName("znjwdcisceia")) - .withParameters(mapOf("mbvccuikpavi", "databudnxawfgm", "jsuiou", "dataxjiz", "enqpthcljvqmbu", - "datatltcrtmebrssrl", "yzvtfexcjqrvp", "dataensogdvhqqxggnc")) + .withPipeline(new PipelineReference().withReferenceName("joirxngmm").withName("rxoidmnsmd")) + .withParameters(mapOf("yfiochfx", "datakjlhkcogxrs")) .withWaitOnCompletion(true); model = BinaryData.fromObject(model).toObject(ExecutePipelineActivityTypeProperties.class); - Assertions.assertEquals("wfkczldepz", model.pipeline().referenceName()); - Assertions.assertEquals("znjwdcisceia", model.pipeline().name()); + Assertions.assertEquals("joirxngmm", model.pipeline().referenceName()); + Assertions.assertEquals("rxoidmnsmd", model.pipeline().name()); Assertions.assertEquals(true, model.waitOnCompletion()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePowerQueryActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePowerQueryActivityTypePropertiesTests.java index 01749465ac609..42c41c4c2e012 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePowerQueryActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePowerQueryActivityTypePropertiesTests.java @@ -24,300 +24,192 @@ public final class ExecutePowerQueryActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExecutePowerQueryActivityTypeProperties model = BinaryData.fromString( - "{\"sinks\":{\"tah\":{\"script\":\"uq\",\"schemaLinkedService\":{\"referenceName\":\"nvckpd\",\"parameters\":{\"yvgukauhnhdhssul\":\"dataiaengydkgicbki\",\"c\":\"datakgfvgxmnan\",\"coruyistjwlnt\":\"dataetrtvyvxbtpqjgb\",\"tu\":\"datapi\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"ncdebpel\",\"parameters\":{\"scztsatfu\":\"datagulymouwnnhbz\",\"hietzovbu\":\"datafqqjydrhwnnux\",\"xcpnxdzpfzmdsl\":\"datavekbknrr\"}},\"name\":\"gqbyoyhfbbb\",\"description\":\"l\",\"dataset\":{\"referenceName\":\"volqprhnchpet\",\"parameters\":{\"usizsnhekpc\":\"datafmttpzwnrmpuv\",\"iee\":\"dataqesvh\"}},\"linkedService\":{\"referenceName\":\"mubzlmmctdkzp\",\"parameters\":{\"xrsxvz\":\"dataagr\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ib\",\"datasetParameters\":\"dataesbjohrvkpnmaa\",\"parameters\":{\"grxooqqdlmbu\":\"datamai\",\"yfgjxmgwk\":\"datakayfjzycogwj\",\"svwauqxhqcvaib\":\"datarlnosqlf\"},\"\":{\"gtziyl\":\"datakoxljtvefbio\",\"lkzzlokmrudepzl\":\"dataxoaallveezesdn\",\"vgpvhtx\":\"datauzcwlbefjh\"}}},\"hpvursmeum\":{\"script\":\"jgfp\",\"schemaLinkedService\":{\"referenceName\":\"fmgudkfoybih\",\"parameters\":{\"wmddvfubr\":\"datakwtatkkufb\",\"rnggwujyukjfsb\":\"dataomff\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"gb\",\"parameters\":{\"gkbf\":\"dataihxwfmuwhgx\",\"avqqxtyhdik\":\"dataqsislaubij\",\"ryi\":\"dataratzgxta\",\"ovu\":\"datalfb\"}},\"name\":\"vhpichsbzgw\",\"description\":\"arbjjswzkzwtfeg\",\"dataset\":{\"referenceName\":\"hwtag\",\"parameters\":{\"uuljnang\":\"datauqudewjnzlq\",\"lwpa\":\"datacddwmnsapg\",\"tpqvhkjbgcqqeyt\":\"datartgrz\"}},\"linkedService\":{\"referenceName\":\"zoiqai\",\"parameters\":{\"qhayplakqglj\":\"datahel\",\"djbsfpaomlgy\":\"datarqatxzek\",\"kjjrjluqlcj\":\"dataazeftivpypsj\",\"cjimsgeslkwwedb\":\"datamr\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"e\",\"datasetParameters\":\"datanyrtlinijcxvqjw\",\"parameters\":{\"vnbk\":\"datapqlna\",\"kqx\":\"dataqassnwvwluzs\"},\"\":{\"jtudn\":\"datawatwv\",\"xokmwnrwuanvjhos\":\"dataswwgrqiqlopb\",\"oul\":\"datainljquqeplfrgv\"}}},\"grapmftziracztls\":{\"script\":\"ps\",\"schemaLinkedService\":{\"referenceName\":\"ypwvhjskkgsf\",\"parameters\":{\"lp\":\"datapqmc\",\"tfrhjfsehtzjbuzl\":\"datazmix\",\"kzbsbcddhl\":\"datanz\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"abs\",\"parameters\":{\"gs\":\"datayjjfj\",\"nxexafql\":\"dataup\"}},\"name\":\"jmbzph\",\"description\":\"rjsgbcro\",\"dataset\":{\"referenceName\":\"ddifywxe\",\"parameters\":{\"ackfupyivqpczx\":\"databijaqyiyefleju\",\"bperkeyhybc\":\"datazlxowgzt\"}},\"linkedService\":{\"referenceName\":\"urdfzynf\",\"parameters\":{\"gmntzeauifcz\":\"dataqrnu\",\"he\":\"datarutfvzdo\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"xtztfnjpwbx\",\"datasetParameters\":\"datan\",\"parameters\":{\"oydeggqwlbx\":\"datavthqj\",\"wazfrsmxgvfmb\":\"dataypkppnzaluaf\",\"ollntvfq\":\"datae\",\"plxkzcfx\":\"datajfzfhspdsraxztai\"},\"\":{\"cq\":\"dataewpqpwefzlreonsq\",\"di\":\"datan\"}}}},\"queries\":[{\"queryName\":\"ghznltjxstjge\",\"dataflowSinks\":[{\"script\":\"wnjo\",\"schemaLinkedService\":{\"referenceName\":\"pjqwxs\",\"parameters\":{\"lppnmrftnf\":\"dataineyjerfgj\",\"p\":\"datasmgsftk\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"kefbfnszeemsfpm\",\"parameters\":{\"nkseq\":\"dataktsinsnikmwnzc\",\"shoeqpv\":\"datavqkkcebjzocuj\"}},\"name\":\"kpgi\",\"description\":\"lkvaiolfrceoc\",\"dataset\":{\"referenceName\":\"reicpsv\",\"parameters\":{\"yq\":\"dataksmwrbwejhoccrl\",\"adtpbbzjevd\":\"datakkpoufu\",\"ozsflnmjschttl\":\"datavnyzhbtnagkndne\"}},\"linkedService\":{\"referenceName\":\"twrnuklshr\",\"parameters\":{\"vpadrfxqudyad\":\"datatchttbdxj\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"tkdtequnbrpvoswj\",\"datasetParameters\":\"datagbgvyz\",\"parameters\":{\"srtqortxeuwb\":\"datadzwubuqxsnc\",\"zxz\":\"datajzwisxsasgfm\"},\"\":{\"dyic\":\"dataqmm\",\"nlabogmfetq\":\"datathr\",\"ngtssoiiyp\":\"datacgkikbuaqdopxbnr\",\"xygztlqszwcwan\":\"datadbpuy\"}}},{\"script\":\"jktqeomagoqfm\",\"schemaLinkedService\":{\"referenceName\":\"lbesgdlskwfi\",\"parameters\":{\"o\":\"dataqmqqrzeo\",\"roszxiwmw\":\"dataooptmpcp\",\"xmskhdvqga\":\"databruuwhhbkynfxxld\",\"awdls\":\"datamcmfbesyhpzros\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"lqnksovvbtlp\",\"parameters\":{\"pdkwv\":\"databzosytturzh\",\"ygwwxentudpvsnll\":\"dataxrxmublfsagp\",\"pmmtlwrwsgyqwfp\":\"datajbb\"}},\"name\":\"pfkyttxgtcovp\",\"description\":\"pgzgq\",\"dataset\":{\"referenceName\":\"p\",\"parameters\":{\"ena\":\"datamkbjgtascxmn\"}},\"linkedService\":{\"referenceName\":\"zxzw\",\"parameters\":{\"lahaddng\":\"datacqcxyjurkfwokzi\",\"wn\":\"dataoezmpkiuzxph\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"hnwcq\",\"datasetParameters\":\"datalbabmddgqbkng\",\"parameters\":{\"shbdvqoivbwe\":\"datawstytengfkr\",\"rivjso\":\"dataggxesxxpnckbhw\",\"ypyfhezluhqcte\":\"dataexgykriwpxc\"},\"\":{\"zn\":\"dataxagcoy\",\"nh\":\"datarscnqexlvgpgoba\",\"maqj\":\"dataktdz\"}}},{\"script\":\"pidvcoghp\",\"schemaLinkedService\":{\"referenceName\":\"vskfrirxlvusghdv\",\"parameters\":{\"lpu\":\"databubaorkc\",\"oywknlzob\":\"datadnpdxx\",\"gi\":\"datahqefza\",\"nzqpfgrqmq\":\"databstx\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"h\",\"parameters\":{\"tjxggspydmul\":\"datathceoujfa\",\"vxy\":\"datalzrhc\",\"eggtyifmfi\":\"dataloxfnzjpg\"}},\"name\":\"x\",\"description\":\"bayxdrwunoo\",\"dataset\":{\"referenceName\":\"q\",\"parameters\":{\"hpuhljmluzvtpt\":\"datarbhqxewdcdnqhkn\",\"gdwgqkoxbghpg\":\"datajabszqcz\"}},\"linkedService\":{\"referenceName\":\"rpab\",\"parameters\":{\"x\":\"datavawmrmwrzmfnjs\",\"vragr\":\"datanst\",\"mjmpxshyxiezmp\":\"dataimunmgtkyzupjn\",\"lfxf\":\"datadaxgwgbpblsas\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ybpwzg\",\"datasetParameters\":\"datan\",\"parameters\":{\"eobr\":\"datautt\"},\"\":{\"b\":\"databftfs\",\"pigfnshlu\":\"dataarfbxza\",\"bt\":\"datarlmpincatsh\",\"udpjn\":\"datagtibmx\"}}},{\"script\":\"txpbv\",\"schemaLinkedService\":{\"referenceName\":\"lnsahpsw\",\"parameters\":{\"h\":\"dataifgsaka\",\"hmckzbuadoxlle\":\"datadybjgyxbwhuta\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"vcyyvpobcxnrwazi\",\"parameters\":{\"ll\":\"dataaaj\",\"cweeakgtrwosps\":\"datakwquwoxnb\",\"fihpydywwj\":\"datakdtdrvihu\",\"uviifuy\":\"dataqdch\"}},\"name\":\"llppljcaq\",\"description\":\"rvchsarff\",\"dataset\":{\"referenceName\":\"etmehdzeyh\",\"parameters\":{\"zl\":\"datazobwpn\",\"nywfyoimw\":\"datarinwtvsb\",\"mtddkyyrpbnqi\":\"dataeoutztlnhg\",\"pishcrxc\":\"datalocyiuiwkr\"}},\"linkedService\":{\"referenceName\":\"nvpiicnwtgyy\",\"parameters\":{\"wynhv\":\"dataujnzxhotyhyjben\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ythqgzip\",\"datasetParameters\":\"dataczzvfdhsmqygm\",\"parameters\":{\"vlixwebjykafiiz\":\"datanlobycrjeaxjgo\"},\"\":{\"lfetlmmdgeb\":\"datatsmcncjtovhcelw\",\"weryzgkcwwndole\":\"dataoqxattthazq\"}}}]},{\"queryName\":\"yaszuoheuifshs\",\"dataflowSinks\":[{\"script\":\"dzikzt\",\"schemaLinkedService\":{\"referenceName\":\"qkwg\",\"parameters\":{\"ap\":\"dataeb\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"fapzfqzvkorsrp\",\"parameters\":{\"wbdk\":\"dataekxmjkiqbqtm\",\"hpfrexbg\":\"datauxda\"}},\"name\":\"wjmsogzc\",\"description\":\"djtwq\",\"dataset\":{\"referenceName\":\"wjevdnpkd\",\"parameters\":{\"qaqbaevh\":\"datalrzvjvlnafpfou\",\"xwt\":\"datawcdjxqxfvklu\"}},\"linkedService\":{\"referenceName\":\"euftpjldljfo\",\"parameters\":{\"jyt\":\"datay\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"mmbxkbtberyql\",\"datasetParameters\":\"dataebmiko\",\"parameters\":{\"waegmlgmgcnll\":\"datagw\",\"qezpsydkgtdwlv\":\"datafbeuugir\",\"thhecmjgbzhd\":\"datafgqhxdyhoozd\"},\"\":{\"f\":\"dataleisawvdwmuytk\",\"opot\":\"datatonyrfmozu\",\"ai\":\"datadkyzyiyvhgdkbfqk\"}}}]},{\"queryName\":\"kenqcvdrp\",\"dataflowSinks\":[{\"script\":\"vtknu\",\"schemaLinkedService\":{\"referenceName\":\"lbax\",\"parameters\":{\"dbix\":\"datarpoasyzzebbrqnn\",\"giydgee\":\"dataudmaniwkwtmqy\",\"llepppdfrgobr\":\"datapivsowcwehjqy\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"xcayyvriuvmme\",\"parameters\":{\"czxkxvrig\":\"dataimmwiri\",\"afpiejbpbdu\":\"datautxzascalwfefyg\",\"o\":\"dataaypsvedxphf\",\"mmitvviqs\":\"dataqqwxjnkbes\"}},\"name\":\"qoujhmdpe\",\"description\":\"sqwjqevwt\",\"dataset\":{\"referenceName\":\"jqnciwy\",\"parameters\":{\"dtin\":\"datangrr\",\"blucxyhtkyq\":\"datactkgllmpku\",\"wkvojr\":\"dataynvtimpgusroqk\"}},\"linkedService\":{\"referenceName\":\"c\",\"parameters\":{\"k\":\"dataqhfkwsmgkomqfv\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"bpxzd\",\"datasetParameters\":\"datafzd\",\"parameters\":{\"utc\":\"databytibpgkidiujf\"},\"\":{\"ywcrouakmwvqtmfq\":\"dataxapksjwaglhwnnf\",\"uugoujsvhezhe\":\"datazvfeljytshjjbo\",\"ayyshf\":\"datayhwo\"}}},{\"script\":\"nfyzjzey\",\"schemaLinkedService\":{\"referenceName\":\"hdrnzeidblredxf\",\"parameters\":{\"gcebuts\":\"dataticwgdivqyb\",\"om\":\"datadgsuhtl\",\"osnlaxeozg\":\"dataqebmfopelyfuliat\",\"ptoentuve\":\"datatuhdgmshuyqehbpr\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"tlfbzlzi\",\"parameters\":{\"ets\":\"datalxw\",\"tjjiearyzzxk\":\"datahetrqudxzrbg\",\"hphurzaz\":\"dataiwpaeumely\",\"rditghbaqumqlfno\":\"dataukgmtrnwwwwlv\"}},\"name\":\"ize\",\"description\":\"yg\",\"dataset\":{\"referenceName\":\"ldy\",\"parameters\":{\"kjvoeuiwyptzefeo\":\"dataryaahlttomlpisj\",\"fsfgabdumhpbcix\":\"datasgikvsnfn\",\"n\":\"dataaypbvgwylta\"}},\"linkedService\":{\"referenceName\":\"elxmulyal\",\"parameters\":{\"yawetkrmqitmcx\":\"datajqbrf\",\"nlor\":\"dataahx\",\"qxjxqqbkfdnski\":\"datadceimlu\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"rjmgmscicqau\",\"datasetParameters\":\"dataxe\",\"parameters\":{\"ji\":\"datacxeecgfsl\",\"kukmdeqrpu\":\"dataayybwxqryyltnfwl\"},\"\":{\"nvuokwjmtehpfn\":\"dataijevf\",\"kwfalgzsgk\":\"dataxtkvpejtdlqorcyp\",\"ducvhhayqx\":\"datauclzmjhiqgi\",\"ujenobf\":\"datacrsho\"}}},{\"script\":\"scauudxfcvtzr\",\"schemaLinkedService\":{\"referenceName\":\"mxbrfqiek\",\"parameters\":{\"otdgvsoypgqoqv\":\"datam\",\"pbgmjodf\":\"datazdbypzcqlauhbk\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"hlipxkxhj\",\"parameters\":{\"homdplvglwx\":\"datasjuvjmnsgvf\",\"ydxmplxzrofscib\":\"datapiwpi\",\"ri\":\"datatxyjq\"}},\"name\":\"zxzkpumzda\",\"description\":\"oqeteavphup\",\"dataset\":{\"referenceName\":\"rp\",\"parameters\":{\"wofhjonq\":\"datagihknnvjgccqef\",\"ufjfuvry\":\"databnrbtattzxvfs\",\"dngtylvdumpmx\":\"databzjvzgyzenveiy\",\"kmrvgdjbl\":\"datafkbbchdypc\"}},\"linkedService\":{\"referenceName\":\"t\",\"parameters\":{\"tgnv\":\"datafjauetzppccfg\",\"gv\":\"datarmmizhdxsybnw\",\"plvrqkmpqs\":\"datac\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"inxwtxtetwqklz\",\"datasetParameters\":\"dataumzw\",\"parameters\":{\"plng\":\"datarvkneo\",\"yfeqajtzquhqrj\":\"datazvugqwxslisgfx\",\"izjwugr\":\"datallgrckoxkpjzyc\",\"ftvylfkecsdfq\":\"dataiopecuxgim\"},\"\":{\"wuldyjmjv\":\"dataqdj\",\"hs\":\"datapldhbapfrriwrmdm\",\"mipkatjyxhvjjvsv\":\"databvnqa\",\"loamgnpf\":\"datamdlysfroyn\"}}}]}],\"dataFlow\":{\"type\":\"DataFlowReference\",\"referenceName\":\"v\",\"datasetParameters\":\"datasnrknikpgjuk\",\"parameters\":{\"ycl\":\"datayl\"},\"\":{\"shmfbzkfeh\":\"datap\",\"fnsuw\":\"dataslv\",\"xahsqorsa\":\"datauroddohngb\",\"lrnd\":\"datad\"}},\"staging\":{\"linkedService\":{\"referenceName\":\"o\",\"parameters\":{\"hxujgyzfsswe\":\"datadmnvai\",\"rnhpxzjk\":\"datanzfdextd\",\"snhtdskenigo\":\"databvzpcec\"}},\"folderPath\":\"datafud\"},\"integrationRuntime\":{\"referenceName\":\"o\",\"parameters\":{\"cqg\":\"dataueqgrcnf\",\"zlmugxpuget\":\"datavlnv\"}},\"compute\":{\"computeType\":\"datalxdddvf\",\"coreCount\":\"dataazvavspjdxay\"},\"traceLevel\":\"datakdqimumaij\",\"continueOnError\":\"datallkyrs\",\"runConcurrently\":\"datayredzhnylir\",\"sourceStagingConcurrency\":\"dataxykplvjs\"}") + "{\"sinks\":{\"aynofwyzpntapg\":{\"script\":\"po\",\"schemaLinkedService\":{\"referenceName\":\"n\",\"parameters\":{\"jhaquxquypcnn\":\"dataxikvjevjapso\",\"ilqtqrtkdeyuo\":\"datacusyq\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"pnaohherlyr\",\"parameters\":{\"ureuhssrdugaxk\":\"datarpruoyjbzylsu\",\"biupjzkyghsjcqq\":\"datagel\"}},\"name\":\"l\",\"description\":\"yaec\",\"dataset\":{\"referenceName\":\"cb\",\"parameters\":{\"eeqywngcvqhvb\":\"dataqodesddj\",\"mvzfp\":\"datazcsspnfxwkjhz\",\"diqsuokyvrzlupy\":\"dataieofvzeihlubd\"}},\"linkedService\":{\"referenceName\":\"lfyddy\",\"parameters\":{\"gjzrd\":\"datadlhytckp\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"snmzlsgalavx\",\"datasetParameters\":\"datavwuhvpipaafvtk\",\"parameters\":{\"hox\":\"datardplhwip\",\"wcdczml\":\"dataeyixbrsjgrjt\"},\"\":{\"tcgybuuubtfxjpg\":\"dataikhcpdohvwy\"}}},\"wdaocwqkxwoq\":{\"script\":\"iyxyelzmu\",\"schemaLinkedService\":{\"referenceName\":\"arucpk\",\"parameters\":{\"tohkrqbgxh\":\"datajnnwobkf\",\"luy\":\"databapf\",\"ipobtbtlmp\":\"dataivls\",\"ogtwxgswu\":\"datarkgtlrmmmsaujx\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"xmwfaehryordin\",\"parameters\":{\"bzekfbuftyxm\":\"datalifstxjufuddt\",\"beibgyquzofy\":\"datalzlsskphwwnnj\"}},\"name\":\"qruoanhjslkoawo\",\"description\":\"etkrtexj\",\"dataset\":{\"referenceName\":\"qsxecejlyhuyhqqq\",\"parameters\":{\"em\":\"datazmhydm\"}},\"linkedService\":{\"referenceName\":\"ijvvbenchtklz\",\"parameters\":{\"nbvpados\":\"datattvykzdlkt\",\"znzrsbs\":\"dataqcvikhbkcvpubvm\",\"fqibwesrgqctrv\":\"datancy\",\"klcgwba\":\"datapg\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ovgthppoddnwhao\",\"datasetParameters\":\"datawphvimst\",\"parameters\":{\"imnfvbfj\":\"dataklfvcirqcgapkyof\"},\"\":{\"iryj\":\"datapxxbfqlfkwji\"}}},\"l\":{\"script\":\"fnojiqtpbfc\",\"schemaLinkedService\":{\"referenceName\":\"hrencxo\",\"parameters\":{\"lpqcctuxxyt\":\"datadgn\",\"afgbfkmqh\":\"dataxjpkuz\",\"nksleurj\":\"datajshvyjnrjr\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"ezpewjxcdvwnptmr\",\"parameters\":{\"zgst\":\"dataaugid\",\"whjfuoips\":\"datasrtzg\"}},\"name\":\"vcqhzejbro\",\"description\":\"lopygrsvyjrqhp\",\"dataset\":{\"referenceName\":\"vmxibpcnmps\",\"parameters\":{\"ri\":\"dataaentip\",\"grcjoycqndgbxtz\":\"datawftrjdyi\",\"gubsidwgyaz\":\"datateszohntch\"}},\"linkedService\":{\"referenceName\":\"efsdoodcmj\",\"parameters\":{\"ckecumevgoaxt\":\"dataydtnpqtwohfh\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"kmuirskoaxstqq\",\"datasetParameters\":\"dataliyxzenii\",\"parameters\":{\"iaot\":\"datamojjmimym\",\"wltblmumbafcmso\":\"dataaxluovzmijir\"},\"\":{\"feelymavin\":\"datankrwwchyqeiguxi\",\"qyvzzrnikan\":\"datamdn\",\"zq\":\"databobagaigtpjj\"}}},\"vxucqxj\":{\"script\":\"ifxlibwd\",\"schemaLinkedService\":{\"referenceName\":\"qxqjvjubjqjxobm\",\"parameters\":{\"trtarneug\":\"datatz\",\"dkkf\":\"dataupkjyosqwcxedk\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"srehgyfif\",\"parameters\":{\"boghj\":\"dataqeis\",\"ae\":\"dataihtcaddfvdkt\",\"dad\":\"databvyuarbycuux\",\"ugmfersbktrei\":\"datalilkmptvmtn\"}},\"name\":\"lszpusb\",\"description\":\"jrkueprpnzbfoldb\",\"dataset\":{\"referenceName\":\"iljgyrpvmaywpr\",\"parameters\":{\"kmanrowdqoj\":\"dataqbnzr\",\"ahskesea\":\"datayabvvbsi\",\"dyessiielbtg\":\"datag\"}},\"linkedService\":{\"referenceName\":\"iwcqeihuyrz\",\"parameters\":{\"ftotpvoehsfwra\":\"datayvquufplmpbvzbt\",\"yuillrrqw\":\"datalqrxewdgzfqsr\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"iqjegeafg\",\"datasetParameters\":\"dataglljcblppnqosnv\",\"parameters\":{\"zmwbxautspnyutf\":\"dataiwgakghvaqbk\"},\"\":{\"nptjmzbirjnddao\":\"dataghn\",\"wmz\":\"datagi\",\"nncvj\":\"datar\"}}}},\"queries\":[{\"queryName\":\"ibliegzjk\",\"dataflowSinks\":[{\"script\":\"i\",\"schemaLinkedService\":{\"referenceName\":\"clvaivsagrfjhcrq\",\"parameters\":{\"aqlcqjnw\":\"dataah\",\"fptvfsvrjd\":\"dataq\",\"vuyxsnm\":\"datazvhxssnqqivv\",\"xmvkcu\":\"datainnisuuakaadbwhs\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"seoqkalekneah\",\"parameters\":{\"njzbv\":\"datajupxibupgt\",\"faxepuvwahfnlk\":\"datawabilyb\"}},\"name\":\"yqpkskbid\",\"description\":\"zjpbhcge\",\"dataset\":{\"referenceName\":\"teqfenhlitcydge\",\"parameters\":{\"ibguwrd\":\"datan\",\"irjkinofw\":\"dataxamjhpqfjpe\"}},\"linkedService\":{\"referenceName\":\"il\",\"parameters\":{\"sbjsekqqtsrupog\":\"dataesyifdrbkprbl\",\"nabfjeoq\":\"datarwku\",\"i\":\"datang\",\"govnrkyb\":\"datazzifuovgipqgt\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"rxcnuyfvr\",\"datasetParameters\":\"datazqoi\",\"parameters\":{\"gzlvqmy\":\"datanvayboub\",\"dqop\":\"datapojbifixdgkvlze\",\"bolczhyqdvxqo\":\"dataabrzrhdezlhsdcp\"},\"\":{\"demfatftzxtrjru\":\"dataos\",\"qpqqzf\":\"dataljfdc\",\"a\":\"dataydywbnerygsif\"}}}]}],\"dataFlow\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ccsvajn\",\"datasetParameters\":\"datauxbyrvgu\",\"parameters\":{\"nk\":\"dataysolmzrfhl\",\"ysbjtsqfhnqxqte\":\"datausb\"},\"\":{\"tuiqcjylkdb\":\"dataxxuws\"}},\"staging\":{\"linkedService\":{\"referenceName\":\"xjk\",\"parameters\":{\"bjzhhjgvuvjsnb\":\"datapposgimtoucls\",\"g\":\"datanuujkjkqyewtlom\",\"izzhln\":\"dataaqvrazthduac\",\"e\":\"datagngqciiopoamg\"}},\"folderPath\":\"dataxidjdptr\"},\"integrationRuntime\":{\"referenceName\":\"egrauyphu\",\"parameters\":{\"mcjrfjxisypkifp\":\"datauipatodfyrfs\",\"lefltubwpeb\":\"dataynh\",\"rdxytkehfoeph\":\"datalndla\",\"mcuqjoukimvhqi\":\"dataphoy\"}},\"compute\":{\"computeType\":\"dataologfxbvllc\",\"coreCount\":\"datanajnc\"},\"traceLevel\":\"dataqnh\",\"continueOnError\":\"databmuimde\",\"runConcurrently\":\"datauohtnjtahdtdc\",\"sourceStagingConcurrency\":\"datahjxv\"}") .toObject(ExecutePowerQueryActivityTypeProperties.class); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.dataFlow().type()); - Assertions.assertEquals("v", model.dataFlow().referenceName()); - Assertions.assertEquals("o", model.staging().linkedService().referenceName()); - Assertions.assertEquals("o", model.integrationRuntime().referenceName()); - Assertions.assertEquals("gqbyoyhfbbb", model.sinks().get("tah").name()); - Assertions.assertEquals("l", model.sinks().get("tah").description()); - Assertions.assertEquals("volqprhnchpet", model.sinks().get("tah").dataset().referenceName()); - Assertions.assertEquals("mubzlmmctdkzp", model.sinks().get("tah").linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get("tah").flowlet().type()); - Assertions.assertEquals("ib", model.sinks().get("tah").flowlet().referenceName()); - Assertions.assertEquals("nvckpd", model.sinks().get("tah").schemaLinkedService().referenceName()); - Assertions.assertEquals("ncdebpel", model.sinks().get("tah").rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("uq", model.sinks().get("tah").script()); - Assertions.assertEquals("ghznltjxstjge", model.queries().get(0).queryName()); - Assertions.assertEquals("kpgi", model.queries().get(0).dataflowSinks().get(0).name()); - Assertions.assertEquals("lkvaiolfrceoc", model.queries().get(0).dataflowSinks().get(0).description()); - Assertions.assertEquals("reicpsv", model.queries().get(0).dataflowSinks().get(0).dataset().referenceName()); - Assertions.assertEquals("twrnuklshr", - model.queries().get(0).dataflowSinks().get(0).linkedService().referenceName()); + Assertions.assertEquals("ccsvajn", model.dataFlow().referenceName()); + Assertions.assertEquals("xjk", model.staging().linkedService().referenceName()); + Assertions.assertEquals("egrauyphu", model.integrationRuntime().referenceName()); + Assertions.assertEquals("l", model.sinks().get("aynofwyzpntapg").name()); + Assertions.assertEquals("yaec", model.sinks().get("aynofwyzpntapg").description()); + Assertions.assertEquals("cb", model.sinks().get("aynofwyzpntapg").dataset().referenceName()); + Assertions.assertEquals("lfyddy", model.sinks().get("aynofwyzpntapg").linkedService().referenceName()); + Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, + model.sinks().get("aynofwyzpntapg").flowlet().type()); + Assertions.assertEquals("snmzlsgalavx", model.sinks().get("aynofwyzpntapg").flowlet().referenceName()); + Assertions.assertEquals("n", model.sinks().get("aynofwyzpntapg").schemaLinkedService().referenceName()); + Assertions.assertEquals("pnaohherlyr", + model.sinks().get("aynofwyzpntapg").rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("po", model.sinks().get("aynofwyzpntapg").script()); + Assertions.assertEquals("ibliegzjk", model.queries().get(0).queryName()); + Assertions.assertEquals("yqpkskbid", model.queries().get(0).dataflowSinks().get(0).name()); + Assertions.assertEquals("zjpbhcge", model.queries().get(0).dataflowSinks().get(0).description()); + Assertions.assertEquals("teqfenhlitcydge", + model.queries().get(0).dataflowSinks().get(0).dataset().referenceName()); + Assertions.assertEquals("il", model.queries().get(0).dataflowSinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.queries().get(0).dataflowSinks().get(0).flowlet().type()); - Assertions.assertEquals("tkdtequnbrpvoswj", - model.queries().get(0).dataflowSinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("pjqwxs", + Assertions.assertEquals("rxcnuyfvr", model.queries().get(0).dataflowSinks().get(0).flowlet().referenceName()); + Assertions.assertEquals("clvaivsagrfjhcrq", model.queries().get(0).dataflowSinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("kefbfnszeemsfpm", + Assertions.assertEquals("seoqkalekneah", model.queries().get(0).dataflowSinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("wnjo", model.queries().get(0).dataflowSinks().get(0).script()); + Assertions.assertEquals("i", model.queries().get(0).dataflowSinks().get(0).script()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ExecutePowerQueryActivityTypeProperties model = new ExecutePowerQueryActivityTypeProperties() - .withDataFlow(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("v").withDatasetParameters("datasnrknikpgjuk").withParameters(mapOf("ycl", "datayl")) - .withAdditionalProperties(mapOf())) - .withStaging(new DataFlowStagingInfo() - .withLinkedService(new LinkedServiceReference().withReferenceName("o").withParameters( - mapOf("hxujgyzfsswe", "datadmnvai", "rnhpxzjk", "datanzfdextd", "snhtdskenigo", "databvzpcec"))) - .withFolderPath("datafud")) - .withIntegrationRuntime(new IntegrationRuntimeReference().withReferenceName("o") - .withParameters(mapOf("cqg", "dataueqgrcnf", "zlmugxpuget", "datavlnv"))) - .withCompute(new ExecuteDataFlowActivityTypePropertiesCompute().withComputeType("datalxdddvf") - .withCoreCount("dataazvavspjdxay")) - .withTraceLevel("datakdqimumaij").withContinueOnError("datallkyrs").withRunConcurrently("datayredzhnylir") - .withSourceStagingConcurrency("dataxykplvjs") - .withSinks(mapOf("tah", - new PowerQuerySink().withName("gqbyoyhfbbb").withDescription("l") - .withDataset(new DatasetReference().withReferenceName("volqprhnchpet") - .withParameters(mapOf("usizsnhekpc", "datafmttpzwnrmpuv", "iee", "dataqesvh"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("mubzlmmctdkzp") - .withParameters(mapOf("xrsxvz", "dataagr"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ib").withDatasetParameters("dataesbjohrvkpnmaa") - .withParameters(mapOf("grxooqqdlmbu", "datamai", "yfgjxmgwk", "datakayfjzycogwj", - "svwauqxhqcvaib", "datarlnosqlf")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("nvckpd") - .withParameters(mapOf("yvgukauhnhdhssul", "dataiaengydkgicbki", "c", "datakgfvgxmnan", - "coruyistjwlnt", "dataetrtvyvxbtpqjgb", "tu", "datapi"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("ncdebpel") - .withParameters(mapOf("scztsatfu", "datagulymouwnnhbz", "hietzovbu", "datafqqjydrhwnnux", - "xcpnxdzpfzmdsl", "datavekbknrr"))) - .withScript("uq"), - "hpvursmeum", - new PowerQuerySink().withName("vhpichsbzgw").withDescription("arbjjswzkzwtfeg") - .withDataset(new DatasetReference().withReferenceName("hwtag").withParameters( - mapOf("uuljnang", "datauqudewjnzlq", "lwpa", "datacddwmnsapg", "tpqvhkjbgcqqeyt", "datartgrz"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("zoiqai") - .withParameters(mapOf("qhayplakqglj", "datahel", "djbsfpaomlgy", "datarqatxzek", "kjjrjluqlcj", - "dataazeftivpypsj", "cjimsgeslkwwedb", "datamr"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("e").withDatasetParameters("datanyrtlinijcxvqjw") - .withParameters(mapOf("vnbk", "datapqlna", "kqx", "dataqassnwvwluzs")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("fmgudkfoybih") - .withParameters(mapOf("wmddvfubr", "datakwtatkkufb", "rnggwujyukjfsb", "dataomff"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("gb") - .withParameters(mapOf("gkbf", "dataihxwfmuwhgx", "avqqxtyhdik", "dataqsislaubij", "ryi", - "dataratzgxta", "ovu", "datalfb"))) - .withScript("jgfp"), - "grapmftziracztls", - new PowerQuerySink().withName("jmbzph").withDescription("rjsgbcro") - .withDataset(new DatasetReference().withReferenceName("ddifywxe") - .withParameters(mapOf("ackfupyivqpczx", "databijaqyiyefleju", "bperkeyhybc", "datazlxowgzt"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("urdfzynf") - .withParameters(mapOf("gmntzeauifcz", "dataqrnu", "he", "datarutfvzdo"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("xtztfnjpwbx").withDatasetParameters("datan") - .withParameters(mapOf("oydeggqwlbx", "datavthqj", "wazfrsmxgvfmb", "dataypkppnzaluaf", - "ollntvfq", "datae", "plxkzcfx", "datajfzfhspdsraxztai")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService( - new LinkedServiceReference().withReferenceName("ypwvhjskkgsf").withParameters( - mapOf("lp", "datapqmc", "tfrhjfsehtzjbuzl", "datazmix", "kzbsbcddhl", "datanz"))) - .withRejectedDataLinkedService( - new LinkedServiceReference().withReferenceName( - "abs").withParameters( - mapOf("gs", "datayjjfj", "nxexafql", "dataup"))) - .withScript("ps"))) - .withQueries( - Arrays - .asList( - new PowerQuerySinkMapping().withQueryName("ghznltjxstjge") - .withDataflowSinks(Arrays.asList( - new PowerQuerySink().withName("kpgi").withDescription("lkvaiolfrceoc") - .withDataset(new DatasetReference().withReferenceName("reicpsv") - .withParameters(mapOf("yq", "dataksmwrbwejhoccrl", "adtpbbzjevd", "datakkpoufu", - "ozsflnmjschttl", "datavnyzhbtnagkndne"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("twrnuklshr") - .withParameters(mapOf("vpadrfxqudyad", "datatchttbdxj"))) - .withFlowlet( - new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("tkdtequnbrpvoswj").withDatasetParameters("datagbgvyz") - .withParameters( - mapOf("srtqortxeuwb", "datadzwubuqxsnc", "zxz", "datajzwisxsasgfm")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("pjqwxs") - .withParameters(mapOf("lppnmrftnf", "dataineyjerfgj", "p", "datasmgsftk"))) - .withRejectedDataLinkedService(new LinkedServiceReference() - .withReferenceName("kefbfnszeemsfpm").withParameters( - mapOf("nkseq", "dataktsinsnikmwnzc", "shoeqpv", "datavqkkcebjzocuj"))) - .withScript("wnjo"), - new PowerQuerySink().withName("pfkyttxgtcovp").withDescription("pgzgq") - .withDataset(new DatasetReference().withReferenceName("p") - .withParameters(mapOf("ena", "datamkbjgtascxmn"))) - .withLinkedService( - new LinkedServiceReference().withReferenceName("zxzw").withParameters( - mapOf("lahaddng", "datacqcxyjurkfwokzi", "wn", "dataoezmpkiuzxph"))) - .withFlowlet( - new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("hnwcq").withDatasetParameters("datalbabmddgqbkng") - .withParameters(mapOf("shbdvqoivbwe", "datawstytengfkr", "rivjso", - "dataggxesxxpnckbhw", "ypyfhezluhqcte", "dataexgykriwpxc")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService( - new LinkedServiceReference().withReferenceName("lbesgdlskwfi") - .withParameters(mapOf("o", "dataqmqqrzeo", "roszxiwmw", "dataooptmpcp", - "xmskhdvqga", "databruuwhhbkynfxxld", "awdls", "datamcmfbesyhpzros"))) - .withRejectedDataLinkedService(new LinkedServiceReference() - .withReferenceName("lqnksovvbtlp") - .withParameters(mapOf("pdkwv", "databzosytturzh", "ygwwxentudpvsnll", - "dataxrxmublfsagp", "pmmtlwrwsgyqwfp", "datajbb"))) - .withScript("jktqeomagoqfm"), - new PowerQuerySink().withName("x").withDescription("bayxdrwunoo") - .withDataset(new DatasetReference().withReferenceName("q") - .withParameters(mapOf("hpuhljmluzvtpt", "datarbhqxewdcdnqhkn", "gdwgqkoxbghpg", - "datajabszqcz"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("rpab") - .withParameters(mapOf("x", "datavawmrmwrzmfnjs", "vragr", "datanst", - "mjmpxshyxiezmp", "dataimunmgtkyzupjn", "lfxf", "datadaxgwgbpblsas"))) - .withFlowlet( - new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ybpwzg").withDatasetParameters("datan") - .withParameters(mapOf("eobr", "datautt")).withAdditionalProperties(mapOf())) - .withSchemaLinkedService( - new LinkedServiceReference().withReferenceName("vskfrirxlvusghdv") - .withParameters(mapOf("lpu", "databubaorkc", "oywknlzob", "datadnpdxx", - "gi", "datahqefza", "nzqpfgrqmq", "databstx"))) - .withRejectedDataLinkedService(new LinkedServiceReference() - .withReferenceName("h") - .withParameters(mapOf("tjxggspydmul", "datathceoujfa", "vxy", "datalzrhc", - "eggtyifmfi", "dataloxfnzjpg"))) - .withScript("pidvcoghp"), - new PowerQuerySink().withName("llppljcaq").withDescription("rvchsarff") - .withDataset(new DatasetReference().withReferenceName("etmehdzeyh") - .withParameters(mapOf("zl", "datazobwpn", "nywfyoimw", "datarinwtvsb", - "mtddkyyrpbnqi", "dataeoutztlnhg", "pishcrxc", "datalocyiuiwkr"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("nvpiicnwtgyy") - .withParameters(mapOf("wynhv", "dataujnzxhotyhyjben"))) - .withFlowlet( - new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ythqgzip").withDatasetParameters("dataczzvfdhsmqygm") - .withParameters(mapOf("vlixwebjykafiiz", "datanlobycrjeaxjgo")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService( - new LinkedServiceReference().withReferenceName("lnsahpsw").withParameters( - mapOf("h", "dataifgsaka", "hmckzbuadoxlle", "datadybjgyxbwhuta"))) - .withRejectedDataLinkedService(new LinkedServiceReference() - .withReferenceName("vcyyvpobcxnrwazi") - .withParameters( - mapOf("ll", "dataaaj", "cweeakgtrwosps", "datakwquwoxnb", "fihpydywwj", - "datakdtdrvihu", "uviifuy", "dataqdch"))) - .withScript("txpbv"))), - new PowerQuerySinkMapping().withQueryName("yaszuoheuifshs") - .withDataflowSinks(Arrays.asList(new PowerQuerySink().withName("wjmsogzc") - .withDescription("djtwq") - .withDataset(new DatasetReference().withReferenceName("wjevdnpkd") - .withParameters(mapOf("qaqbaevh", "datalrzvjvlnafpfou", "xwt", "datawcdjxqxfvklu"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("euftpjldljfo") - .withParameters(mapOf("jyt", "datay"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("mmbxkbtberyql").withDatasetParameters("dataebmiko") - .withParameters(mapOf("waegmlgmgcnll", "datagw", "qezpsydkgtdwlv", "datafbeuugir", - "thhecmjgbzhd", "datafgqhxdyhoozd")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("qkwg") - .withParameters(mapOf("ap", "dataeb"))) - .withRejectedDataLinkedService( - new LinkedServiceReference().withReferenceName("fapzfqzvkorsrp") - .withParameters(mapOf("wbdk", "dataekxmjkiqbqtm", "hpfrexbg", "datauxda"))) - .withScript("dzikzt"))), - new PowerQuerySinkMapping().withQueryName("kenqcvdrp").withDataflowSinks(Arrays.asList( - new PowerQuerySink().withName("qoujhmdpe").withDescription("sqwjqevwt") - .withDataset(new DatasetReference().withReferenceName("jqnciwy") - .withParameters(mapOf("dtin", "datangrr", "blucxyhtkyq", "datactkgllmpku", "wkvojr", - "dataynvtimpgusroqk"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("c") - .withParameters(mapOf("k", "dataqhfkwsmgkomqfv"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("bpxzd").withDatasetParameters("datafzd") - .withParameters(mapOf("utc", "databytibpgkidiujf")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference() - .withReferenceName("lbax") - .withParameters(mapOf("dbix", "datarpoasyzzebbrqnn", "giydgee", "dataudmaniwkwtmqy", - "llepppdfrgobr", "datapivsowcwehjqy"))) - .withRejectedDataLinkedService( - new LinkedServiceReference().withReferenceName("xcayyvriuvmme").withParameters( - mapOf( - "czxkxvrig", "dataimmwiri", "afpiejbpbdu", "datautxzascalwfefyg", "o", - "dataaypsvedxphf", "mmitvviqs", "dataqqwxjnkbes"))) - .withScript("vtknu"), - new PowerQuerySink().withName("ize").withDescription("yg") - .withDataset(new DatasetReference().withReferenceName("ldy") - .withParameters(mapOf("kjvoeuiwyptzefeo", "dataryaahlttomlpisj", "fsfgabdumhpbcix", - "datasgikvsnfn", "n", "dataaypbvgwylta"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("elxmulyal") - .withParameters(mapOf("yawetkrmqitmcx", "datajqbrf", "nlor", "dataahx", - "qxjxqqbkfdnski", "datadceimlu"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("rjmgmscicqau").withDatasetParameters("dataxe") - .withParameters(mapOf("ji", "datacxeecgfsl", "kukmdeqrpu", "dataayybwxqryyltnfwl")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService( - new LinkedServiceReference().withReferenceName("hdrnzeidblredxf") - .withParameters(mapOf("gcebuts", "dataticwgdivqyb", "om", "datadgsuhtl", - "osnlaxeozg", "dataqebmfopelyfuliat", "ptoentuve", "datatuhdgmshuyqehbpr"))) - .withRejectedDataLinkedService(new LinkedServiceReference() - .withReferenceName("tlfbzlzi") - .withParameters( - mapOf( - "ets", "datalxw", "tjjiearyzzxk", "datahetrqudxzrbg", "hphurzaz", - "dataiwpaeumely", "rditghbaqumqlfno", "dataukgmtrnwwwwlv"))) - .withScript("nfyzjzey"), - new PowerQuerySink().withName("zxzkpumzda").withDescription("oqeteavphup") - .withDataset(new DatasetReference().withReferenceName("rp").withParameters( - mapOf("wofhjonq", "datagihknnvjgccqef", "ufjfuvry", "databnrbtattzxvfs", - "dngtylvdumpmx", "databzjvzgyzenveiy", "kmrvgdjbl", "datafkbbchdypc"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("t") - .withParameters(mapOf("tgnv", "datafjauetzppccfg", "gv", "datarmmizhdxsybnw", - "plvrqkmpqs", "datac"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("inxwtxtetwqklz").withDatasetParameters("dataumzw") - .withParameters(mapOf("plng", "datarvkneo", "yfeqajtzquhqrj", "datazvugqwxslisgfx", - "izjwugr", "datallgrckoxkpjzyc", "ftvylfkecsdfq", "dataiopecuxgim")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("mxbrfqiek") - .withParameters(mapOf("otdgvsoypgqoqv", "datam", "pbgmjodf", "datazdbypzcqlauhbk"))) - .withRejectedDataLinkedService(new LinkedServiceReference() - .withReferenceName("hlipxkxhj").withParameters(mapOf("homdplvglwx", - "datasjuvjmnsgvf", "ydxmplxzrofscib", "datapiwpi", "ri", "datatxyjq"))) - .withScript("scauudxfcvtzr"))))); + ExecutePowerQueryActivityTypeProperties model + = new ExecutePowerQueryActivityTypeProperties() + .withDataFlow(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("ccsvajn") + .withDatasetParameters("datauxbyrvgu") + .withParameters(mapOf("nk", "dataysolmzrfhl", "ysbjtsqfhnqxqte", "datausb")) + .withAdditionalProperties(mapOf())) + .withStaging( + new DataFlowStagingInfo() + .withLinkedService(new LinkedServiceReference().withReferenceName("xjk") + .withParameters(mapOf("bjzhhjgvuvjsnb", "datapposgimtoucls", "g", "datanuujkjkqyewtlom", + "izzhln", "dataaqvrazthduac", "e", "datagngqciiopoamg"))) + .withFolderPath("dataxidjdptr")) + .withIntegrationRuntime(new IntegrationRuntimeReference().withReferenceName("egrauyphu") + .withParameters(mapOf("mcjrfjxisypkifp", "datauipatodfyrfs", "lefltubwpeb", "dataynh", + "rdxytkehfoeph", "datalndla", "mcuqjoukimvhqi", "dataphoy"))) + .withCompute(new ExecuteDataFlowActivityTypePropertiesCompute().withComputeType("dataologfxbvllc") + .withCoreCount("datanajnc")) + .withTraceLevel("dataqnh") + .withContinueOnError("databmuimde") + .withRunConcurrently("datauohtnjtahdtdc") + .withSourceStagingConcurrency("datahjxv") + .withSinks( + mapOf("aynofwyzpntapg", new PowerQuerySink().withName("l") + .withDescription("yaec") + .withDataset(new DatasetReference().withReferenceName("cb") + .withParameters(mapOf("eeqywngcvqhvb", "dataqodesddj", "mvzfp", "datazcsspnfxwkjhz", + "diqsuokyvrzlupy", "dataieofvzeihlubd"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("lfyddy") + .withParameters(mapOf("gjzrd", "datadlhytckp"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("snmzlsgalavx") + .withDatasetParameters("datavwuhvpipaafvtk") + .withParameters(mapOf("hox", "datardplhwip", "wcdczml", "dataeyixbrsjgrjt")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("n") + .withParameters(mapOf("jhaquxquypcnn", "dataxikvjevjapso", "ilqtqrtkdeyuo", "datacusyq"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("pnaohherlyr") + .withParameters(mapOf("ureuhssrdugaxk", "datarpruoyjbzylsu", "biupjzkyghsjcqq", "datagel"))) + .withScript("po"), "wdaocwqkxwoq", + new PowerQuerySink().withName("qruoanhjslkoawo") + .withDescription("etkrtexj") + .withDataset(new DatasetReference().withReferenceName("qsxecejlyhuyhqqq") + .withParameters(mapOf("em", "datazmhydm"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("ijvvbenchtklz") + .withParameters(mapOf("nbvpados", "datattvykzdlkt", "znzrsbs", "dataqcvikhbkcvpubvm", + "fqibwesrgqctrv", "datancy", "klcgwba", "datapg"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("ovgthppoddnwhao") + .withDatasetParameters("datawphvimst") + .withParameters(mapOf("imnfvbfj", "dataklfvcirqcgapkyof")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("arucpk") + .withParameters(mapOf("tohkrqbgxh", "datajnnwobkf", "luy", "databapf", "ipobtbtlmp", + "dataivls", "ogtwxgswu", "datarkgtlrmmmsaujx"))) + .withRejectedDataLinkedService( + new LinkedServiceReference().withReferenceName("xmwfaehryordin") + .withParameters(mapOf("bzekfbuftyxm", "datalifstxjufuddt", "beibgyquzofy", + "datalzlsskphwwnnj"))) + .withScript("iyxyelzmu"), + "l", + new PowerQuerySink().withName("vcqhzejbro") + .withDescription("lopygrsvyjrqhp") + .withDataset(new DatasetReference().withReferenceName("vmxibpcnmps") + .withParameters(mapOf("ri", "dataaentip", "grcjoycqndgbxtz", "datawftrjdyi", + "gubsidwgyaz", "datateszohntch"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("efsdoodcmj") + .withParameters(mapOf("ckecumevgoaxt", "dataydtnpqtwohfh"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("kmuirskoaxstqq") + .withDatasetParameters("dataliyxzenii") + .withParameters(mapOf("iaot", "datamojjmimym", "wltblmumbafcmso", "dataaxluovzmijir")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("hrencxo") + .withParameters(mapOf("lpqcctuxxyt", "datadgn", "afgbfkmqh", "dataxjpkuz", "nksleurj", + "datajshvyjnrjr"))) + .withRejectedDataLinkedService( + new LinkedServiceReference().withReferenceName("ezpewjxcdvwnptmr") + .withParameters(mapOf("zgst", "dataaugid", "whjfuoips", "datasrtzg"))) + .withScript("fnojiqtpbfc"), + "vxucqxj", + new PowerQuerySink().withName("lszpusb") + .withDescription("jrkueprpnzbfoldb") + .withDataset(new DatasetReference().withReferenceName("iljgyrpvmaywpr") + .withParameters(mapOf("kmanrowdqoj", "dataqbnzr", "ahskesea", "datayabvvbsi", + "dyessiielbtg", "datag"))) + .withLinkedService( + new LinkedServiceReference().withReferenceName("iwcqeihuyrz") + .withParameters(mapOf("ftotpvoehsfwra", "datayvquufplmpbvzbt", "yuillrrqw", + "datalqrxewdgzfqsr"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("iqjegeafg") + .withDatasetParameters("dataglljcblppnqosnv") + .withParameters(mapOf("zmwbxautspnyutf", "dataiwgakghvaqbk")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("qxqjvjubjqjxobm") + .withParameters(mapOf("trtarneug", "datatz", "dkkf", "dataupkjyosqwcxedk"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("srehgyfif") + .withParameters(mapOf("boghj", "dataqeis", "ae", "dataihtcaddfvdkt", "dad", + "databvyuarbycuux", "ugmfersbktrei", "datalilkmptvmtn"))) + .withScript("ifxlibwd"))) + .withQueries(Arrays.asList(new PowerQuerySinkMapping().withQueryName("ibliegzjk") + .withDataflowSinks(Arrays.asList(new PowerQuerySink().withName("yqpkskbid") + .withDescription("zjpbhcge") + .withDataset(new DatasetReference().withReferenceName("teqfenhlitcydge") + .withParameters(mapOf("ibguwrd", "datan", "irjkinofw", "dataxamjhpqfjpe"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("il") + .withParameters(mapOf("sbjsekqqtsrupog", "dataesyifdrbkprbl", "nabfjeoq", "datarwku", "i", + "datang", "govnrkyb", "datazzifuovgipqgt"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("rxcnuyfvr") + .withDatasetParameters("datazqoi") + .withParameters(mapOf("gzlvqmy", "datanvayboub", "dqop", "datapojbifixdgkvlze", + "bolczhyqdvxqo", "dataabrzrhdezlhsdcp")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("clvaivsagrfjhcrq") + .withParameters(mapOf("aqlcqjnw", "dataah", "fptvfsvrjd", "dataq", "vuyxsnm", + "datazvhxssnqqivv", "xmvkcu", "datainnisuuakaadbwhs"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("seoqkalekneah") + .withParameters(mapOf("njzbv", "datajupxibupgt", "faxepuvwahfnlk", "datawabilyb"))) + .withScript("i"))))); model = BinaryData.fromObject(model).toObject(ExecutePowerQueryActivityTypeProperties.class); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.dataFlow().type()); - Assertions.assertEquals("v", model.dataFlow().referenceName()); - Assertions.assertEquals("o", model.staging().linkedService().referenceName()); - Assertions.assertEquals("o", model.integrationRuntime().referenceName()); - Assertions.assertEquals("gqbyoyhfbbb", model.sinks().get("tah").name()); - Assertions.assertEquals("l", model.sinks().get("tah").description()); - Assertions.assertEquals("volqprhnchpet", model.sinks().get("tah").dataset().referenceName()); - Assertions.assertEquals("mubzlmmctdkzp", model.sinks().get("tah").linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get("tah").flowlet().type()); - Assertions.assertEquals("ib", model.sinks().get("tah").flowlet().referenceName()); - Assertions.assertEquals("nvckpd", model.sinks().get("tah").schemaLinkedService().referenceName()); - Assertions.assertEquals("ncdebpel", model.sinks().get("tah").rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("uq", model.sinks().get("tah").script()); - Assertions.assertEquals("ghznltjxstjge", model.queries().get(0).queryName()); - Assertions.assertEquals("kpgi", model.queries().get(0).dataflowSinks().get(0).name()); - Assertions.assertEquals("lkvaiolfrceoc", model.queries().get(0).dataflowSinks().get(0).description()); - Assertions.assertEquals("reicpsv", model.queries().get(0).dataflowSinks().get(0).dataset().referenceName()); - Assertions.assertEquals("twrnuklshr", - model.queries().get(0).dataflowSinks().get(0).linkedService().referenceName()); + Assertions.assertEquals("ccsvajn", model.dataFlow().referenceName()); + Assertions.assertEquals("xjk", model.staging().linkedService().referenceName()); + Assertions.assertEquals("egrauyphu", model.integrationRuntime().referenceName()); + Assertions.assertEquals("l", model.sinks().get("aynofwyzpntapg").name()); + Assertions.assertEquals("yaec", model.sinks().get("aynofwyzpntapg").description()); + Assertions.assertEquals("cb", model.sinks().get("aynofwyzpntapg").dataset().referenceName()); + Assertions.assertEquals("lfyddy", model.sinks().get("aynofwyzpntapg").linkedService().referenceName()); + Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, + model.sinks().get("aynofwyzpntapg").flowlet().type()); + Assertions.assertEquals("snmzlsgalavx", model.sinks().get("aynofwyzpntapg").flowlet().referenceName()); + Assertions.assertEquals("n", model.sinks().get("aynofwyzpntapg").schemaLinkedService().referenceName()); + Assertions.assertEquals("pnaohherlyr", + model.sinks().get("aynofwyzpntapg").rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("po", model.sinks().get("aynofwyzpntapg").script()); + Assertions.assertEquals("ibliegzjk", model.queries().get(0).queryName()); + Assertions.assertEquals("yqpkskbid", model.queries().get(0).dataflowSinks().get(0).name()); + Assertions.assertEquals("zjpbhcge", model.queries().get(0).dataflowSinks().get(0).description()); + Assertions.assertEquals("teqfenhlitcydge", + model.queries().get(0).dataflowSinks().get(0).dataset().referenceName()); + Assertions.assertEquals("il", model.queries().get(0).dataflowSinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.queries().get(0).dataflowSinks().get(0).flowlet().type()); - Assertions.assertEquals("tkdtequnbrpvoswj", - model.queries().get(0).dataflowSinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("pjqwxs", + Assertions.assertEquals("rxcnuyfvr", model.queries().get(0).dataflowSinks().get(0).flowlet().referenceName()); + Assertions.assertEquals("clvaivsagrfjhcrq", model.queries().get(0).dataflowSinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("kefbfnszeemsfpm", + Assertions.assertEquals("seoqkalekneah", model.queries().get(0).dataflowSinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("wnjo", model.queries().get(0).dataflowSinks().get(0).script()); + Assertions.assertEquals("i", model.queries().get(0).dataflowSinks().get(0).script()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteWranglingDataflowActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteWranglingDataflowActivityTests.java index 7eb0265fe0e3f..e25a4239632c8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteWranglingDataflowActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteWranglingDataflowActivityTests.java @@ -30,284 +30,250 @@ public final class ExecuteWranglingDataflowActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExecuteWranglingDataflowActivity model = BinaryData.fromString( - "{\"type\":\"ExecuteWranglingDataflow\",\"typeProperties\":{\"sinks\":{\"tfdcwekbbvtcox\":{\"script\":\"rpps\",\"schemaLinkedService\":{\"referenceName\":\"dohiotgf\",\"parameters\":{\"mzpitziej\":\"datapaircnu\",\"h\":\"dataebzofmmcejvs\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"sxrmlxszxokpqn\",\"parameters\":{\"b\":\"datacyjsekdfhnhbkt\"}},\"name\":\"on\",\"description\":\"umlfdxetqknzev\",\"dataset\":{\"referenceName\":\"pqnqneo\",\"parameters\":{\"qlinlwcxrxd\":\"datarmng\",\"u\":\"dataxctojxtkmdegmiv\"}},\"linkedService\":{\"referenceName\":\"pctlbu\",\"parameters\":{\"kofzzsohc\":\"datapabturkmktcsqkt\",\"vepmhohqxl\":\"datae\",\"tliwoodndu\":\"datay\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"kyzirgiyqz\",\"datasetParameters\":\"datanbaz\",\"parameters\":{\"daruwvrvx\":\"datagb\",\"adeqslhz\":\"dataozyhu\",\"mqazolroqusrlkp\":\"datay\"},\"\":{\"mkbpdpk\":\"dataqydrnwsfa\",\"slqikocgzjmjdoq\":\"datalh\"}}},\"aaq\":{\"script\":\"gjdpy\",\"schemaLinkedService\":{\"referenceName\":\"m\",\"parameters\":{\"poynbsttureqvxzl\":\"datarwpjtj\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"aiusglgfecsr\",\"parameters\":{\"xbmgheyamoety\":\"dataswmkxbbziffpvvg\",\"xseyjqklaihqrbrm\":\"dataevyitidi\",\"pydjsubt\":\"datahljqqbue\",\"df\":\"datafbvcveomdlrsjgu\"}},\"name\":\"dqvuqufaowu\",\"description\":\"ujjvojmynlv\",\"dataset\":{\"referenceName\":\"jslxewfqvlhj\",\"parameters\":{\"fgzlrnfmmefppjxt\":\"datarh\",\"dvdrn\":\"dataffwqbdvgfgirrzyn\"}},\"linkedService\":{\"referenceName\":\"qfrxggvstyxv\",\"parameters\":{\"imfpnp\":\"dataaqf\",\"tdorvxdwgpu\":\"datakdgjnd\",\"dzjmjkg\":\"datajeffpidwqr\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"plcoqbouetfx\",\"datasetParameters\":\"dataj\",\"parameters\":{\"dgq\":\"datadlokhimzfltxqpoz\",\"jwjnvhu\":\"datakfevhgjk\"},\"\":{\"hsvthkg\":\"datawvqbpazjmfqus\"}}},\"i\":{\"script\":\"yifjv\",\"schemaLinkedService\":{\"referenceName\":\"gjnqujtcwp\",\"parameters\":{\"afhvuy\":\"dataaft\",\"bdv\":\"databtlmnrdkiqs\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"qsmk\",\"parameters\":{\"mexrofqh\":\"dataljxnkpd\",\"gwov\":\"dataptsdlcsrhttmh\",\"krcwnlyqq\":\"dataduzqu\",\"q\":\"dataknul\"}},\"name\":\"acusmosjawbnxciz\",\"description\":\"ifndgrjnzjygh\",\"dataset\":{\"referenceName\":\"fs\",\"parameters\":{\"ems\":\"datapvgec\",\"vxlaywkbuve\":\"datacgrkgt\"}},\"linkedService\":{\"referenceName\":\"tezeyfdgnaoi\",\"parameters\":{\"fzdgsmeeqelmrpvg\":\"datafdgtwxiesrhvgp\",\"gqsk\":\"datax\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"wobeg\",\"datasetParameters\":\"datajxkxvg\",\"parameters\":{\"enulrfe\":\"datakfef\"},\"\":{\"llhzlicvrdwjght\":\"dataqdvooqjraknngad\",\"doieo\":\"datapvomxtosdbv\",\"adhdlr\":\"dataeebzewbifcyptl\"}}},\"dspykcreuopigsu\":{\"script\":\"uewrhkjmphfhmua\",\"schemaLinkedService\":{\"referenceName\":\"uulhfdggsr\",\"parameters\":{\"tgelfkhmgs\":\"datahhlggobjcf\",\"aqk\":\"datahocrphzdkikjy\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"upm\",\"parameters\":{\"are\":\"datatsa\",\"mjwmld\":\"datav\",\"ryykon\":\"datarwglmmcat\",\"ltcois\":\"dataenmvhbgpgvliinu\"}},\"name\":\"qfzbiy\",\"description\":\"wyyvsbjpyxlzxjir\",\"dataset\":{\"referenceName\":\"prsh\",\"parameters\":{\"duwqovlqfz\":\"datagoqxfbscitizroru\",\"kovubfugdgpmtzqp\":\"dataehagorbspotq\"}},\"linkedService\":{\"referenceName\":\"ochmeximhmisvetu\",\"parameters\":{\"lgypnaqwjsdwna\":\"datakjamihnr\",\"iiqixfygntrynfoa\":\"datauqntxbee\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ztpss\",\"datasetParameters\":\"datadqcrigygtod\",\"parameters\":{\"pdzbybrvkxrcfzsz\":\"datai\"},\"\":{\"tmsphcrnhvxt\":\"datamkeucvqtuuzftds\"}}}},\"queries\":[{\"queryName\":\"k\",\"dataflowSinks\":[{\"script\":\"crdrdkexcyw\",\"schemaLinkedService\":{\"referenceName\":\"iddcnljlly\"},\"rejectedDataLinkedService\":{\"referenceName\":\"hqhcirsajtdz\"},\"name\":\"eliktk\",\"description\":\"mp\",\"dataset\":{\"referenceName\":\"zxicq\"},\"linkedService\":{\"referenceName\":\"wzxqmve\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"roymrqdgyttfzoz\"}},{\"script\":\"vnj\",\"schemaLinkedService\":{\"referenceName\":\"mfcgb\"},\"rejectedDataLinkedService\":{\"referenceName\":\"pslwlh\"},\"name\":\"uezxcpxwqgmnqueq\",\"description\":\"bedfoosiplhygpsa\",\"dataset\":{\"referenceName\":\"pmmshfhr\"},\"linkedService\":{\"referenceName\":\"y\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"vga\"}},{\"script\":\"rulpnrjswrp\",\"schemaLinkedService\":{\"referenceName\":\"recwdle\"},\"rejectedDataLinkedService\":{\"referenceName\":\"muqmzxrjvh\"},\"name\":\"pnyehhqytjrmxaz\",\"description\":\"iqzaeadkahpokf\",\"dataset\":{\"referenceName\":\"ivhozhrwbvfljxl\"},\"linkedService\":{\"referenceName\":\"tirnpazrbkhyzufk\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"vbyf\"}},{\"script\":\"ihuupeflkwbvx\",\"schemaLinkedService\":{\"referenceName\":\"ahdkeayu\"},\"rejectedDataLinkedService\":{\"referenceName\":\"ivp\"},\"name\":\"emydn\",\"description\":\"pbr\",\"dataset\":{\"referenceName\":\"gtblxamd\"},\"linkedService\":{\"referenceName\":\"pifygxuaidrbz\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"nkwullvuk\"}}]},{\"queryName\":\"mosjzmandjjqh\",\"dataflowSinks\":[{\"script\":\"q\",\"schemaLinkedService\":{\"referenceName\":\"djr\"},\"rejectedDataLinkedService\":{\"referenceName\":\"qdiaxf\"},\"name\":\"iwrfocbetlljqkgl\",\"description\":\"jawaxvlc\",\"dataset\":{\"referenceName\":\"c\"},\"linkedService\":{\"referenceName\":\"seqmejerjyz\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"zbjieeivdrqtlcx\"}}]},{\"queryName\":\"gykrmfxlturxyvg\",\"dataflowSinks\":[{\"script\":\"hsnpc\",\"schemaLinkedService\":{\"referenceName\":\"mm\"},\"rejectedDataLinkedService\":{\"referenceName\":\"erxnljt\"},\"name\":\"ujsnzue\",\"description\":\"znkdbhzcd\",\"dataset\":{\"referenceName\":\"nztzhqsbgksfjq\"},\"linkedService\":{\"referenceName\":\"eqhj\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"rneorbdtli\"}},{\"script\":\"aumr\",\"schemaLinkedService\":{\"referenceName\":\"onugcomqlbjxps\"},\"rejectedDataLinkedService\":{\"referenceName\":\"vggv\"},\"name\":\"qparqzygdko\",\"description\":\"tjsr\",\"dataset\":{\"referenceName\":\"qamqqqvuk\"},\"linkedService\":{\"referenceName\":\"cdppdmmfdu\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"cltygxziti\"}},{\"script\":\"b\",\"schemaLinkedService\":{\"referenceName\":\"y\"},\"rejectedDataLinkedService\":{\"referenceName\":\"djcgdharlrfa\"},\"name\":\"dyswlmxe\",\"description\":\"gkf\",\"dataset\":{\"referenceName\":\"gkbujqtklzwmqzk\"},\"linkedService\":{\"referenceName\":\"czpzwfewbj\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"mhpue\"}},{\"script\":\"hhne\",\"schemaLinkedService\":{\"referenceName\":\"xx\"},\"rejectedDataLinkedService\":{\"referenceName\":\"fiali\"},\"name\":\"ymnukvfjbxvhui\",\"description\":\"ody\",\"dataset\":{\"referenceName\":\"otqp\"},\"linkedService\":{\"referenceName\":\"wrahqqumozulefp\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"hx\"}}]},{\"queryName\":\"bgw\",\"dataflowSinks\":[{\"script\":\"mmztvkg\",\"schemaLinkedService\":{\"referenceName\":\"irhnkmjgnnpl\"},\"rejectedDataLinkedService\":{\"referenceName\":\"xopi\"},\"name\":\"x\",\"description\":\"ypsmpgop\",\"dataset\":{\"referenceName\":\"stysirhnwse\"},\"linkedService\":{\"referenceName\":\"we\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"rnjiyddc\"}},{\"script\":\"eozayji\",\"schemaLinkedService\":{\"referenceName\":\"cb\"},\"rejectedDataLinkedService\":{\"referenceName\":\"tm\"},\"name\":\"wqocjkqohcfnomw\",\"description\":\"ebvjmmsgukoql\",\"dataset\":{\"referenceName\":\"kerztenzkbppgc\"},\"linkedService\":{\"referenceName\":\"tdzmei\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ybcorsmdza\"}}]}],\"dataFlow\":{\"type\":\"DataFlowReference\",\"referenceName\":\"q\",\"datasetParameters\":\"datadstztzhwbw\",\"parameters\":{\"agegzrt\":\"datauvqp\",\"je\":\"datausxh\",\"j\":\"datapeafzvxbvk\"},\"\":{\"trad\":\"dataoekohxkgxydimopz\",\"tdhoxuxwvbsa\":\"dataihnpwsagebosbzyd\"}},\"staging\":{\"linkedService\":{\"referenceName\":\"szeoxzrrha\",\"parameters\":{\"pq\":\"dataechrj\"}},\"folderPath\":\"dataylkygcgqjdvab\"},\"integrationRuntime\":{\"referenceName\":\"izmtmctehx\",\"parameters\":{\"vbohpcw\":\"datapyerhdablqoll\",\"tjjqcfzdfmqoe\":\"datasqavpu\",\"zymoqatga\":\"datafypuypztn\"}},\"compute\":{\"computeType\":\"datahih\",\"coreCount\":\"dataebaw\"},\"traceLevel\":\"dataajdkjqznmzr\",\"continueOnError\":\"datagyvxln\",\"runConcurrently\":\"datapcrcpishjkov\",\"sourceStagingConcurrency\":\"datax\"},\"policy\":{\"timeout\":\"datamfcfivr\",\"retry\":\"datapnkjuaxyyvxetgsd\",\"retryIntervalInSeconds\":328007856,\"secureInput\":false,\"secureOutput\":false,\"\":{\"rkqdbqhz\":\"dataorkchbnatrdr\",\"gut\":\"datavatypjk\",\"vrlbezhwsvoi\":\"datadlehcqbjjphuakpk\"}},\"name\":\"dcxjcjiqxybbbytm\",\"description\":\"zrjcbadnwp\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"spdmeeabc\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"ctlixnuz\":\"dataydwdbcur\",\"fzigfdhpwecrvi\":\"datam\",\"wfxgu\":\"databfui\"}}],\"userProperties\":[{\"name\":\"fimpxyurnmanb\",\"value\":\"datauq\"},{\"name\":\"rm\",\"value\":\"datajuldojor\"},{\"name\":\"rgbugprfiympy\",\"value\":\"databcpieiqolym\"}],\"\":{\"hhbnkhgpesyyluk\":\"databcyed\",\"gdfispwpfjxljr\":\"datajdmdykjhuk\"}}") + "{\"type\":\"gtqqrmibmnmxspzt\",\"typeProperties\":{\"sinks\":{\"uqi\":{\"script\":\"gxdxqefuzubn\",\"schemaLinkedService\":{\"referenceName\":\"iminccnubynrh\",\"parameters\":{\"hxtbcqjvyzotxkhy\":\"datagfzhbtzuddqt\",\"mqrioa\":\"dataj\",\"rglmjrufwqpnmcw\":\"datazmrwlsrjjaj\",\"dzkbky\":\"datasfpyt\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"ztwwkvwpbdo\",\"parameters\":{\"jzrzv\":\"datarmp\",\"igzwhfeq\":\"dataupnfrlygyjrlu\"}},\"name\":\"l\",\"description\":\"l\",\"dataset\":{\"referenceName\":\"zzkgebey\",\"parameters\":{\"azfjbxhnahgbloea\":\"datagyksgntgiw\",\"gbyxpma\":\"datawidumilxi\"}},\"linkedService\":{\"referenceName\":\"rn\",\"parameters\":{\"faf\":\"datahviqwfctiy\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"nsktdgbombnc\",\"datasetParameters\":\"dataxkcpqw\",\"parameters\":{\"ijobcpruommtuca\":\"dataqvlcunnb\",\"a\":\"datagrlvkdaphzemn\",\"qbwim\":\"datatkbzz\"},\"\":{\"oukfimonreu\":\"datamgocalgrakmw\",\"hpov\":\"datacrcsdaip\",\"cxtqqpfgjny\":\"datatvotbybcxgrrlzdn\"}}},\"nhlsf\":{\"script\":\"iujsjngs\",\"schemaLinkedService\":{\"referenceName\":\"yvnpuclqtdca\",\"parameters\":{\"cl\":\"datazeckpgpjkczkcdlz\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"anabzycxvifkzs\",\"parameters\":{\"inlqkzee\":\"datal\",\"kwxb\":\"datazelmmwmdhmd\"}},\"name\":\"vmc\",\"description\":\"nmnojfmztpwu\",\"dataset\":{\"referenceName\":\"untvyeyebw\",\"parameters\":{\"eyuirrrxrftfamo\":\"dataonqjnpkofjfus\"}},\"linkedService\":{\"referenceName\":\"vjm\",\"parameters\":{\"cowmukzcrp\":\"datalphqtq\",\"zbeutqfx\":\"datalgzctfnlakl\",\"wticu\":\"dataxzwiehqvvbgwxp\",\"lzhujcx\":\"datakmzubdmcdfvw\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"aqolnfeywbpenq\",\"datasetParameters\":\"datapzwwsfrpbwvfjdg\",\"parameters\":{\"jyulo\":\"dataycxmct\",\"lkmjeekbmwizis\":\"datapulwcxmxf\",\"leaotaakcy\":\"datatmxyrsnmwiy\"},\"\":{\"jnd\":\"dataosnbwbcnfo\",\"oubtehdccghdzq\":\"datawwnlg\",\"otqsp\":\"datawlixh\"}}},\"xlkloqpwsaqcr\":{\"script\":\"jwajsbqyt\",\"schemaLinkedService\":{\"referenceName\":\"jvmjhuvuadpd\",\"parameters\":{\"uwaqiomdlp\":\"datawbheijmwaj\",\"wowmwrn\":\"datakfslm\",\"obg\":\"datauwgrtvyw\",\"evyyppaycasch\":\"datavhdb\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"midkd\",\"parameters\":{\"xxcvugarykbyuuk\":\"dataptessvmd\",\"csevqtd\":\"datassretugorcz\"}},\"name\":\"ktogmcblw\",\"description\":\"vnisin\",\"dataset\":{\"referenceName\":\"cwwpuka\",\"parameters\":{\"hngaczgg\":\"dataj\",\"tok\":\"dataiaqmuptnhuybt\",\"ljzlnre\":\"datahyozxotwral\",\"mjschcxud\":\"datalwfgyabglsar\"}},\"linkedService\":{\"referenceName\":\"mhhsumzfvrakpql\",\"parameters\":{\"holjjxilbsbh\":\"dataiudveoibeh\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ucwdc\",\"datasetParameters\":\"datakwwuljveu\",\"parameters\":{\"fmvauhnh\":\"dataxrsxb\",\"iwvejwtzkiid\":\"datanswlfuukildlayt\",\"ytaeallsxfza\":\"datasskl\"},\"\":{\"gxrfrmdpwpzuxoy\":\"datasbzmoqxjynhmx\"}}},\"vfrtmwyezr\":{\"script\":\"kgjdn\",\"schemaLinkedService\":{\"referenceName\":\"wpvrwec\",\"parameters\":{\"jwpejtszj\":\"dataiaognmanrzjprlq\",\"vwvycvnow\":\"datavj\",\"xwwhusrodrom\":\"dataclijmdl\",\"fw\":\"dataz\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"ocwkwmqroqldacx\",\"parameters\":{\"ssukvsgkzxznctx\":\"dataq\",\"jfcaqpkpvdiir\":\"datacznszmjz\"}},\"name\":\"xahxysl\",\"description\":\"okfomakmi\",\"dataset\":{\"referenceName\":\"hduflajsgut\",\"parameters\":{\"firaoytkkq\":\"databvxyqprchk\",\"dzfypdsrfpihvij\":\"dataaazvmnv\"}},\"linkedService\":{\"referenceName\":\"tkpo\",\"parameters\":{\"njduyotqb\":\"dataoyjjfx\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"xbtuxm\",\"datasetParameters\":\"datarixolbzjlqrps\",\"parameters\":{\"d\":\"datastcoibi\"},\"\":{\"oemcajjazo\":\"dataafenwvvxcah\",\"couilgtbslagtmki\":\"datakwhgnjhxydx\",\"zlmztkzgr\":\"datalcgejmjpvgv\",\"zdptoytcnxgq\":\"dataaqytqmc\"}}}},\"queries\":[{\"queryName\":\"awpbif\",\"dataflowSinks\":[{\"script\":\"iocvjmyinpl\",\"schemaLinkedService\":{\"referenceName\":\"ejhxfjlecbbabi\"},\"rejectedDataLinkedService\":{\"referenceName\":\"alsrxzatlzwr\"},\"name\":\"joqcyowzwshsgrea\",\"description\":\"wsdirxpr\",\"dataset\":{\"referenceName\":\"gzpnrmmjyv\"},\"linkedService\":{\"referenceName\":\"tjuwobwskyjlte\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"vrpvhivvlmzcvpo\"}},{\"script\":\"vfcwedyz\",\"schemaLinkedService\":{\"referenceName\":\"oezgibfisfmc\"},\"rejectedDataLinkedService\":{\"referenceName\":\"rhgcuejtxxlkokt\"},\"name\":\"clpvwtwboxgrv\",\"description\":\"voq\",\"dataset\":{\"referenceName\":\"quwkuszllognl\"},\"linkedService\":{\"referenceName\":\"hvllenygimnfvq\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"zo\"}},{\"script\":\"lwdh\",\"schemaLinkedService\":{\"referenceName\":\"vhflbchzoboee\"},\"rejectedDataLinkedService\":{\"referenceName\":\"kwdtuwbrw\"},\"name\":\"yqtyuywzccumk\",\"description\":\"ygrkcolvitb\",\"dataset\":{\"referenceName\":\"oxrbotzvrgo\"},\"linkedService\":{\"referenceName\":\"ayjselrfqstbfuqm\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"fvbeyugggfshn\"}}]},{\"queryName\":\"ken\",\"dataflowSinks\":[{\"script\":\"jqrxyaa\",\"schemaLinkedService\":{\"referenceName\":\"rkxyjsuappd\"},\"rejectedDataLinkedService\":{\"referenceName\":\"jiguusbw\"},\"name\":\"syoybjtzdgzt\",\"description\":\"qiilfovmcjchbof\",\"dataset\":{\"referenceName\":\"vqvjfszvece\"},\"linkedService\":{\"referenceName\":\"ptez\"},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"erurcjgkauyzbrdi\"}}]}],\"dataFlow\":{\"type\":\"DataFlowReference\",\"referenceName\":\"bkgxqsbwepduyqx\",\"datasetParameters\":\"dataxarddbnqyayl\",\"parameters\":{\"rtpxwgt\":\"datatwd\"},\"\":{\"xndnbzhs\":\"databjihz\",\"stytexu\":\"datadnljpouz\"}},\"staging\":{\"linkedService\":{\"referenceName\":\"qllqn\",\"parameters\":{\"bytshsathk\":\"datafufleioywlcl\"}},\"folderPath\":\"datawqljnuayp\"},\"integrationRuntime\":{\"referenceName\":\"z\",\"parameters\":{\"gytquktcqggxdnpp\":\"dataeqajipnpwomjlps\"}},\"compute\":{\"computeType\":\"dataqag\",\"coreCount\":\"datawoozlfliir\"},\"traceLevel\":\"datanglfcrtkpfsjwtq\",\"continueOnError\":\"dataqeofjoqjmlz\",\"runConcurrently\":\"dataizjssfwoj\",\"sourceStagingConcurrency\":\"datagmhzrjsb\"},\"policy\":{\"timeout\":\"dataitypashvjriniz\",\"retry\":\"dataadus\",\"retryIntervalInSeconds\":818396335,\"secureInput\":true,\"secureOutput\":false,\"\":{\"vgqtvg\":\"datawcxyagidyansnu\",\"vrdsv\":\"dataperbnbsd\"}},\"name\":\"rvzbmhmkoxsavzng\",\"description\":\"wymeb\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"qtpwhicnnanqzrml\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Succeeded\"],\"\":{\"nwjuqzj\":\"datadwxlstpuzx\",\"peauhldqbwkxe\":\"datahzd\",\"ygoijhciynp\":\"datanroewwrhvdwrowe\"}},{\"activity\":\"viivczupcl\",\"dependencyConditions\":[\"Succeeded\",\"Failed\"],\"\":{\"tsnnsxouz\":\"datafh\"}},{\"activity\":\"mudyuoholy\",\"dependencyConditions\":[\"Failed\"],\"\":{\"ttclnhoituk\":\"dataxlyfrrczhnvtih\"}}],\"userProperties\":[{\"name\":\"orepbqkmylj\",\"value\":\"dataicjlrl\"},{\"name\":\"qbavplqkcsr\",\"value\":\"datavvniwqpc\"},{\"name\":\"yo\",\"value\":\"datajikv\"},{\"name\":\"kpu\",\"value\":\"datatjcfyxyrkpclvp\"}],\"\":{\"gkgnuh\":\"datayckzshvcalzlk\"}}") .toObject(ExecuteWranglingDataflowActivity.class); - Assertions.assertEquals("dcxjcjiqxybbbytm", model.name()); - Assertions.assertEquals("zrjcbadnwp", model.description()); + Assertions.assertEquals("rvzbmhmkoxsavzng", model.name()); + Assertions.assertEquals("wymeb", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("spdmeeabc", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("fimpxyurnmanb", model.userProperties().get(0).name()); - Assertions.assertEquals(328007856, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals("qtpwhicnnanqzrml", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("orepbqkmylj", model.userProperties().get(0).name()); + Assertions.assertEquals(818396335, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("on", model.sinks().get("tfdcwekbbvtcox").name()); - Assertions.assertEquals("umlfdxetqknzev", model.sinks().get("tfdcwekbbvtcox").description()); - Assertions.assertEquals("pqnqneo", model.sinks().get("tfdcwekbbvtcox").dataset().referenceName()); - Assertions.assertEquals("pctlbu", model.sinks().get("tfdcwekbbvtcox").linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, - model.sinks().get("tfdcwekbbvtcox").flowlet().type()); - Assertions.assertEquals("kyzirgiyqz", model.sinks().get("tfdcwekbbvtcox").flowlet().referenceName()); - Assertions.assertEquals("dohiotgf", model.sinks().get("tfdcwekbbvtcox").schemaLinkedService().referenceName()); - Assertions.assertEquals("sxrmlxszxokpqn", - model.sinks().get("tfdcwekbbvtcox").rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("rpps", model.sinks().get("tfdcwekbbvtcox").script()); - Assertions.assertEquals("k", model.queries().get(0).queryName()); - Assertions.assertEquals("eliktk", model.queries().get(0).dataflowSinks().get(0).name()); - Assertions.assertEquals("mp", model.queries().get(0).dataflowSinks().get(0).description()); - Assertions.assertEquals("zxicq", model.queries().get(0).dataflowSinks().get(0).dataset().referenceName()); - Assertions.assertEquals("wzxqmve", + Assertions.assertEquals("l", model.sinks().get("uqi").name()); + Assertions.assertEquals("l", model.sinks().get("uqi").description()); + Assertions.assertEquals("zzkgebey", model.sinks().get("uqi").dataset().referenceName()); + Assertions.assertEquals("rn", model.sinks().get("uqi").linkedService().referenceName()); + Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get("uqi").flowlet().type()); + Assertions.assertEquals("nsktdgbombnc", model.sinks().get("uqi").flowlet().referenceName()); + Assertions.assertEquals("iminccnubynrh", model.sinks().get("uqi").schemaLinkedService().referenceName()); + Assertions.assertEquals("ztwwkvwpbdo", model.sinks().get("uqi").rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("gxdxqefuzubn", model.sinks().get("uqi").script()); + Assertions.assertEquals("awpbif", model.queries().get(0).queryName()); + Assertions.assertEquals("joqcyowzwshsgrea", model.queries().get(0).dataflowSinks().get(0).name()); + Assertions.assertEquals("wsdirxpr", model.queries().get(0).dataflowSinks().get(0).description()); + Assertions.assertEquals("gzpnrmmjyv", model.queries().get(0).dataflowSinks().get(0).dataset().referenceName()); + Assertions.assertEquals("tjuwobwskyjlte", model.queries().get(0).dataflowSinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.queries().get(0).dataflowSinks().get(0).flowlet().type()); - Assertions.assertEquals("roymrqdgyttfzoz", + Assertions.assertEquals("vrpvhivvlmzcvpo", model.queries().get(0).dataflowSinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("iddcnljlly", + Assertions.assertEquals("ejhxfjlecbbabi", model.queries().get(0).dataflowSinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("hqhcirsajtdz", + Assertions.assertEquals("alsrxzatlzwr", model.queries().get(0).dataflowSinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("crdrdkexcyw", model.queries().get(0).dataflowSinks().get(0).script()); + Assertions.assertEquals("iocvjmyinpl", model.queries().get(0).dataflowSinks().get(0).script()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.dataFlow().type()); - Assertions.assertEquals("q", model.dataFlow().referenceName()); - Assertions.assertEquals("szeoxzrrha", model.staging().linkedService().referenceName()); - Assertions.assertEquals("izmtmctehx", model.integrationRuntime().referenceName()); + Assertions.assertEquals("bkgxqsbwepduyqx", model.dataFlow().referenceName()); + Assertions.assertEquals("qllqn", model.staging().linkedService().referenceName()); + Assertions.assertEquals("z", model.integrationRuntime().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ExecuteWranglingDataflowActivity model - = new ExecuteWranglingDataflowActivity().withName("dcxjcjiqxybbbytm").withDescription("zrjcbadnwp") - .withState(ActivityState.INACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("spdmeeabc") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) + ExecuteWranglingDataflowActivity model = new ExecuteWranglingDataflowActivity().withName("rvzbmhmkoxsavzng") + .withDescription("wymeb") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("qtpwhicnnanqzrml") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED, + DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("viivczupcl") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("mudyuoholy") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("fimpxyurnmanb").withValue("datauq"), - new UserProperty().withName("rm").withValue("datajuldojor"), - new UserProperty().withName("rgbugprfiympy").withValue("databcpieiqolym"))) - .withPolicy(new ActivityPolicy() - .withTimeout("datamfcfivr").withRetry("datapnkjuaxyyvxetgsd").withRetryIntervalInSeconds(328007856) - .withSecureInput(false).withSecureOutput(false).withAdditionalProperties(mapOf())) - .withSinks(mapOf("tfdcwekbbvtcox", - new PowerQuerySink().withName("on").withDescription("umlfdxetqknzev") - .withDataset(new DatasetReference().withReferenceName("pqnqneo") - .withParameters(mapOf("qlinlwcxrxd", "datarmng", "u", "dataxctojxtkmdegmiv"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("pctlbu").withParameters( - mapOf("kofzzsohc", "datapabturkmktcsqkt", "vepmhohqxl", "datae", "tliwoodndu", "datay"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("kyzirgiyqz").withDatasetParameters("datanbaz") - .withParameters( - mapOf("daruwvrvx", "datagb", "adeqslhz", "dataozyhu", "mqazolroqusrlkp", "datay")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference() - .withReferenceName("dohiotgf") - .withParameters(mapOf("mzpitziej", "datapaircnu", "h", "dataebzofmmcejvs"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("sxrmlxszxokpqn") - .withParameters(mapOf("b", "datacyjsekdfhnhbkt"))) - .withScript("rpps"), - "aaq", - new PowerQuerySink().withName("dqvuqufaowu").withDescription("ujjvojmynlv") - .withDataset(new DatasetReference().withReferenceName("jslxewfqvlhj") - .withParameters(mapOf("fgzlrnfmmefppjxt", "datarh", "dvdrn", "dataffwqbdvgfgirrzyn"))) - .withLinkedService( - new LinkedServiceReference().withReferenceName("qfrxggvstyxv").withParameters( - mapOf("imfpnp", "dataaqf", "tdorvxdwgpu", "datakdgjnd", "dzjmjkg", "datajeffpidwqr"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("plcoqbouetfx").withDatasetParameters("dataj") - .withParameters(mapOf("dgq", "datadlokhimzfltxqpoz", "jwjnvhu", "datakfevhgjk")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("m") - .withParameters(mapOf("poynbsttureqvxzl", "datarwpjtj"))) - .withRejectedDataLinkedService(new LinkedServiceReference() - .withReferenceName("aiusglgfecsr") - .withParameters(mapOf("xbmgheyamoety", "dataswmkxbbziffpvvg", "xseyjqklaihqrbrm", - "dataevyitidi", "pydjsubt", "datahljqqbue", "df", "datafbvcveomdlrsjgu"))) - .withScript("gjdpy"), - "i", - new PowerQuerySink().withName("acusmosjawbnxciz").withDescription("ifndgrjnzjygh") - .withDataset(new DatasetReference().withReferenceName("fs") - .withParameters(mapOf("ems", "datapvgec", "vxlaywkbuve", "datacgrkgt"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("tezeyfdgnaoi") - .withParameters(mapOf("fzdgsmeeqelmrpvg", "datafdgtwxiesrhvgp", "gqsk", "datax"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("wobeg").withDatasetParameters("datajxkxvg") - .withParameters(mapOf("enulrfe", "datakfef")).withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("gjnqujtcwp") - .withParameters(mapOf("afhvuy", "dataaft", "bdv", "databtlmnrdkiqs"))) - .withRejectedDataLinkedService(new LinkedServiceReference() - .withReferenceName("qsmk") - .withParameters(mapOf("mexrofqh", "dataljxnkpd", "gwov", "dataptsdlcsrhttmh", "krcwnlyqq", - "dataduzqu", "q", "dataknul"))) - .withScript("yifjv"), - "dspykcreuopigsu", - new PowerQuerySink().withName("qfzbiy").withDescription("wyyvsbjpyxlzxjir") - .withDataset(new DatasetReference().withReferenceName("prsh").withParameters( - mapOf("duwqovlqfz", "datagoqxfbscitizroru", "kovubfugdgpmtzqp", "dataehagorbspotq"))) - .withLinkedService( - new LinkedServiceReference().withReferenceName("ochmeximhmisvetu").withParameters( - mapOf("lgypnaqwjsdwna", "datakjamihnr", "iiqixfygntrynfoa", "datauqntxbee"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ztpss").withDatasetParameters("datadqcrigygtod") - .withParameters(mapOf("pdzbybrvkxrcfzsz", "datai")).withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("uulhfdggsr") - .withParameters(mapOf("tgelfkhmgs", "datahhlggobjcf", "aqk", "datahocrphzdkikjy"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("upm") - .withParameters(mapOf("are", "datatsa", "mjwmld", "datav", "ryykon", "datarwglmmcat", - "ltcois", "dataenmvhbgpgvliinu"))) - .withScript("uewrhkjmphfhmua"))) - .withQueries(Arrays.asList( - new PowerQuerySinkMapping().withQueryName("k") - .withDataflowSinks(Arrays.asList( - new PowerQuerySink().withName("eliktk").withDescription("mp") - .withDataset(new DatasetReference().withReferenceName("zxicq")) - .withLinkedService(new LinkedServiceReference().withReferenceName("wzxqmve")) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("roymrqdgyttfzoz").withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("iddcnljlly")) - .withRejectedDataLinkedService( - new LinkedServiceReference().withReferenceName("hqhcirsajtdz")) - .withScript("crdrdkexcyw"), - new PowerQuerySink().withName("uezxcpxwqgmnqueq").withDescription("bedfoosiplhygpsa") - .withDataset(new DatasetReference().withReferenceName("pmmshfhr")) - .withLinkedService(new LinkedServiceReference().withReferenceName("y")) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("vga").withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("mfcgb")) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("pslwlh")) - .withScript("vnj"), - new PowerQuerySink().withName("pnyehhqytjrmxaz").withDescription("iqzaeadkahpokf") - .withDataset(new DatasetReference().withReferenceName("ivhozhrwbvfljxl")) - .withLinkedService(new LinkedServiceReference().withReferenceName("tirnpazrbkhyzufk")) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("vbyf").withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("recwdle")) - .withRejectedDataLinkedService( - new LinkedServiceReference().withReferenceName("muqmzxrjvh")) - .withScript("rulpnrjswrp"), - new PowerQuerySink().withName("emydn").withDescription("pbr") - .withDataset(new DatasetReference().withReferenceName("gtblxamd")) - .withLinkedService(new LinkedServiceReference().withReferenceName("pifygxuaidrbz")) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("nkwullvuk").withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ahdkeayu")) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("ivp")) - .withScript("ihuupeflkwbvx"))), - new PowerQuerySinkMapping().withQueryName("mosjzmandjjqh") - .withDataflowSinks(Arrays.asList(new PowerQuerySink().withName("iwrfocbetlljqkgl") - .withDescription("jawaxvlc").withDataset(new DatasetReference().withReferenceName("c")) - .withLinkedService(new LinkedServiceReference().withReferenceName("seqmejerjyz")) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("zbjieeivdrqtlcx").withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("djr")) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("qdiaxf")) - .withScript("q"))), - new PowerQuerySinkMapping().withQueryName("gykrmfxlturxyvg").withDataflowSinks(Arrays.asList( - new PowerQuerySink().withName("ujsnzue").withDescription("znkdbhzcd") - .withDataset(new DatasetReference().withReferenceName("nztzhqsbgksfjq")) - .withLinkedService(new LinkedServiceReference().withReferenceName("eqhj")) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("rneorbdtli").withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("mm")) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("erxnljt")) - .withScript("hsnpc"), - new PowerQuerySink().withName("qparqzygdko").withDescription("tjsr") - .withDataset(new DatasetReference().withReferenceName("qamqqqvuk")) - .withLinkedService(new LinkedServiceReference().withReferenceName("cdppdmmfdu")) + .withUserProperties(Arrays.asList(new UserProperty().withName("orepbqkmylj").withValue("dataicjlrl"), + new UserProperty().withName("qbavplqkcsr").withValue("datavvniwqpc"), + new UserProperty().withName("yo").withValue("datajikv"), + new UserProperty().withName("kpu").withValue("datatjcfyxyrkpclvp"))) + .withPolicy(new ActivityPolicy().withTimeout("dataitypashvjriniz") + .withRetry("dataadus") + .withRetryIntervalInSeconds(818396335) + .withSecureInput(true) + .withSecureOutput(false) + .withAdditionalProperties(mapOf())) + .withSinks(mapOf("uqi", new PowerQuerySink().withName("l") + .withDescription("l") + .withDataset(new DatasetReference().withReferenceName("zzkgebey") + .withParameters(mapOf("azfjbxhnahgbloea", "datagyksgntgiw", "gbyxpma", "datawidumilxi"))) + .withLinkedService( + new LinkedServiceReference().withReferenceName("rn").withParameters(mapOf("faf", "datahviqwfctiy"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("nsktdgbombnc") + .withDatasetParameters("dataxkcpqw") + .withParameters( + mapOf("ijobcpruommtuca", "dataqvlcunnb", "a", "datagrlvkdaphzemn", "qbwim", "datatkbzz")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("iminccnubynrh") + .withParameters(mapOf("hxtbcqjvyzotxkhy", "datagfzhbtzuddqt", "mqrioa", "dataj", "rglmjrufwqpnmcw", + "datazmrwlsrjjaj", "dzkbky", "datasfpyt"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("ztwwkvwpbdo") + .withParameters(mapOf("jzrzv", "datarmp", "igzwhfeq", "dataupnfrlygyjrlu"))) + .withScript("gxdxqefuzubn"), "nhlsf", + new PowerQuerySink().withName("vmc") + .withDescription("nmnojfmztpwu") + .withDataset(new DatasetReference().withReferenceName("untvyeyebw") + .withParameters(mapOf("eyuirrrxrftfamo", "dataonqjnpkofjfus"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("vjm") + .withParameters(mapOf("cowmukzcrp", "datalphqtq", "zbeutqfx", "datalgzctfnlakl", "wticu", + "dataxzwiehqvvbgwxp", "lzhujcx", "datakmzubdmcdfvw"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("aqolnfeywbpenq") + .withDatasetParameters("datapzwwsfrpbwvfjdg") + .withParameters(mapOf("jyulo", "dataycxmct", "lkmjeekbmwizis", "datapulwcxmxf", "leaotaakcy", + "datatmxyrsnmwiy")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("yvnpuclqtdca") + .withParameters(mapOf("cl", "datazeckpgpjkczkcdlz"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("anabzycxvifkzs") + .withParameters(mapOf("inlqkzee", "datal", "kwxb", "datazelmmwmdhmd"))) + .withScript("iujsjngs"), + "xlkloqpwsaqcr", + new PowerQuerySink().withName("ktogmcblw") + .withDescription("vnisin") + .withDataset(new DatasetReference().withReferenceName("cwwpuka") + .withParameters(mapOf("hngaczgg", "dataj", "tok", "dataiaqmuptnhuybt", "ljzlnre", + "datahyozxotwral", "mjschcxud", "datalwfgyabglsar"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("mhhsumzfvrakpql") + .withParameters(mapOf("holjjxilbsbh", "dataiudveoibeh"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("ucwdc") + .withDatasetParameters("datakwwuljveu") + .withParameters(mapOf("fmvauhnh", "dataxrsxb", "iwvejwtzkiid", "datanswlfuukildlayt", + "ytaeallsxfza", "datasskl")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("jvmjhuvuadpd") + .withParameters(mapOf("uwaqiomdlp", "datawbheijmwaj", "wowmwrn", "datakfslm", "obg", + "datauwgrtvyw", "evyyppaycasch", "datavhdb"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("midkd") + .withParameters(mapOf("xxcvugarykbyuuk", "dataptessvmd", "csevqtd", "datassretugorcz"))) + .withScript("jwajsbqyt"), + "vfrtmwyezr", + new PowerQuerySink().withName("xahxysl") + .withDescription("okfomakmi") + .withDataset(new DatasetReference().withReferenceName("hduflajsgut") + .withParameters(mapOf("firaoytkkq", "databvxyqprchk", "dzfypdsrfpihvij", "dataaazvmnv"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("tkpo") + .withParameters(mapOf("njduyotqb", "dataoyjjfx"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("xbtuxm") + .withDatasetParameters("datarixolbzjlqrps") + .withParameters(mapOf("d", "datastcoibi")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("wpvrwec") + .withParameters(mapOf("jwpejtszj", "dataiaognmanrzjprlq", "vwvycvnow", "datavj", "xwwhusrodrom", + "dataclijmdl", "fw", "dataz"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("ocwkwmqroqldacx") + .withParameters(mapOf("ssukvsgkzxznctx", "dataq", "jfcaqpkpvdiir", "datacznszmjz"))) + .withScript("kgjdn"))) + .withQueries(Arrays.asList( + new PowerQuerySinkMapping().withQueryName("awpbif") + .withDataflowSinks(Arrays.asList( + new PowerQuerySink().withName("joqcyowzwshsgrea") + .withDescription("wsdirxpr") + .withDataset(new DatasetReference().withReferenceName("gzpnrmmjyv")) + .withLinkedService(new LinkedServiceReference().withReferenceName("tjuwobwskyjlte")) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("cltygxziti").withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("onugcomqlbjxps")) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("vggv")) - .withScript("aumr"), - new PowerQuerySink().withName("dyswlmxe").withDescription("gkf") - .withDataset(new DatasetReference().withReferenceName("gkbujqtklzwmqzk")) - .withLinkedService(new LinkedServiceReference().withReferenceName("czpzwfewbj")) + .withReferenceName("vrpvhivvlmzcvpo") + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ejhxfjlecbbabi")) + .withRejectedDataLinkedService( + new LinkedServiceReference().withReferenceName("alsrxzatlzwr")) + .withScript("iocvjmyinpl"), + new PowerQuerySink().withName("clpvwtwboxgrv") + .withDescription("voq") + .withDataset(new DatasetReference().withReferenceName("quwkuszllognl")) + .withLinkedService(new LinkedServiceReference().withReferenceName("hvllenygimnfvq")) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("mhpue").withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("y")) + .withReferenceName("zo") + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("oezgibfisfmc")) .withRejectedDataLinkedService( - new LinkedServiceReference().withReferenceName("djcgdharlrfa")) - .withScript("b"), - new PowerQuerySink().withName("ymnukvfjbxvhui").withDescription("ody") - .withDataset(new DatasetReference().withReferenceName("otqp")) - .withLinkedService(new LinkedServiceReference().withReferenceName("wrahqqumozulefp")) + new LinkedServiceReference().withReferenceName("rhgcuejtxxlkokt")) + .withScript("vfcwedyz"), + new PowerQuerySink().withName("yqtyuywzccumk") + .withDescription("ygrkcolvitb") + .withDataset(new DatasetReference().withReferenceName("oxrbotzvrgo")) + .withLinkedService(new LinkedServiceReference().withReferenceName("ayjselrfqstbfuqm")) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("hx").withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("xx")) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("fiali")) - .withScript("hhne"))), - new PowerQuerySinkMapping().withQueryName("bgw") - .withDataflowSinks(Arrays.asList( - new PowerQuerySink().withName("x").withDescription("ypsmpgop") - .withDataset(new DatasetReference().withReferenceName("stysirhnwse")) - .withLinkedService(new LinkedServiceReference().withReferenceName("we")) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("rnjiyddc").withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("irhnkmjgnnpl")) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("xopi")) - .withScript("mmztvkg"), - new PowerQuerySink().withName("wqocjkqohcfnomw").withDescription("ebvjmmsgukoql") - .withDataset(new DatasetReference().withReferenceName("kerztenzkbppgc")) - .withLinkedService(new LinkedServiceReference().withReferenceName("tdzmei")) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ybcorsmdza").withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("cb")) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("tm")) - .withScript("eozayji"))))) - .withDataFlow(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("q").withDatasetParameters("datadstztzhwbw") - .withParameters(mapOf("agegzrt", "datauvqp", "je", "datausxh", "j", "datapeafzvxbvk")) - .withAdditionalProperties(mapOf())) - .withStaging(new DataFlowStagingInfo().withLinkedService(new LinkedServiceReference() - .withReferenceName("szeoxzrrha").withParameters(mapOf("pq", "dataechrj"))) - .withFolderPath("dataylkygcgqjdvab")) - .withIntegrationRuntime(new IntegrationRuntimeReference().withReferenceName("izmtmctehx") - .withParameters(mapOf("vbohpcw", "datapyerhdablqoll", "tjjqcfzdfmqoe", "datasqavpu", "zymoqatga", - "datafypuypztn"))) - .withCompute(new ExecuteDataFlowActivityTypePropertiesCompute().withComputeType("datahih") - .withCoreCount("dataebaw")) - .withTraceLevel("dataajdkjqznmzr").withContinueOnError("datagyvxln") - .withRunConcurrently("datapcrcpishjkov").withSourceStagingConcurrency("datax"); + .withReferenceName("fvbeyugggfshn") + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("vhflbchzoboee")) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("kwdtuwbrw")) + .withScript("lwdh"))), + new PowerQuerySinkMapping().withQueryName("ken") + .withDataflowSinks(Arrays.asList(new PowerQuerySink().withName("syoybjtzdgzt") + .withDescription("qiilfovmcjchbof") + .withDataset(new DatasetReference().withReferenceName("vqvjfszvece")) + .withLinkedService(new LinkedServiceReference().withReferenceName("ptez")) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("erurcjgkauyzbrdi") + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("rkxyjsuappd")) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("jiguusbw")) + .withScript("jqrxyaa"))))) + .withDataFlow(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("bkgxqsbwepduyqx") + .withDatasetParameters("dataxarddbnqyayl") + .withParameters(mapOf("rtpxwgt", "datatwd")) + .withAdditionalProperties(mapOf())) + .withStaging( + new DataFlowStagingInfo().withLinkedService(new LinkedServiceReference().withReferenceName("qllqn") + .withParameters(mapOf("bytshsathk", "datafufleioywlcl"))).withFolderPath("datawqljnuayp")) + .withIntegrationRuntime(new IntegrationRuntimeReference().withReferenceName("z") + .withParameters(mapOf("gytquktcqggxdnpp", "dataeqajipnpwomjlps"))) + .withCompute(new ExecuteDataFlowActivityTypePropertiesCompute().withComputeType("dataqag") + .withCoreCount("datawoozlfliir")) + .withTraceLevel("datanglfcrtkpfsjwtq") + .withContinueOnError("dataqeofjoqjmlz") + .withRunConcurrently("dataizjssfwoj") + .withSourceStagingConcurrency("datagmhzrjsb"); model = BinaryData.fromObject(model).toObject(ExecuteWranglingDataflowActivity.class); - Assertions.assertEquals("dcxjcjiqxybbbytm", model.name()); - Assertions.assertEquals("zrjcbadnwp", model.description()); + Assertions.assertEquals("rvzbmhmkoxsavzng", model.name()); + Assertions.assertEquals("wymeb", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("spdmeeabc", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("fimpxyurnmanb", model.userProperties().get(0).name()); - Assertions.assertEquals(328007856, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals("qtpwhicnnanqzrml", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("orepbqkmylj", model.userProperties().get(0).name()); + Assertions.assertEquals(818396335, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("on", model.sinks().get("tfdcwekbbvtcox").name()); - Assertions.assertEquals("umlfdxetqknzev", model.sinks().get("tfdcwekbbvtcox").description()); - Assertions.assertEquals("pqnqneo", model.sinks().get("tfdcwekbbvtcox").dataset().referenceName()); - Assertions.assertEquals("pctlbu", model.sinks().get("tfdcwekbbvtcox").linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, - model.sinks().get("tfdcwekbbvtcox").flowlet().type()); - Assertions.assertEquals("kyzirgiyqz", model.sinks().get("tfdcwekbbvtcox").flowlet().referenceName()); - Assertions.assertEquals("dohiotgf", model.sinks().get("tfdcwekbbvtcox").schemaLinkedService().referenceName()); - Assertions.assertEquals("sxrmlxszxokpqn", - model.sinks().get("tfdcwekbbvtcox").rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("rpps", model.sinks().get("tfdcwekbbvtcox").script()); - Assertions.assertEquals("k", model.queries().get(0).queryName()); - Assertions.assertEquals("eliktk", model.queries().get(0).dataflowSinks().get(0).name()); - Assertions.assertEquals("mp", model.queries().get(0).dataflowSinks().get(0).description()); - Assertions.assertEquals("zxicq", model.queries().get(0).dataflowSinks().get(0).dataset().referenceName()); - Assertions.assertEquals("wzxqmve", + Assertions.assertEquals("l", model.sinks().get("uqi").name()); + Assertions.assertEquals("l", model.sinks().get("uqi").description()); + Assertions.assertEquals("zzkgebey", model.sinks().get("uqi").dataset().referenceName()); + Assertions.assertEquals("rn", model.sinks().get("uqi").linkedService().referenceName()); + Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get("uqi").flowlet().type()); + Assertions.assertEquals("nsktdgbombnc", model.sinks().get("uqi").flowlet().referenceName()); + Assertions.assertEquals("iminccnubynrh", model.sinks().get("uqi").schemaLinkedService().referenceName()); + Assertions.assertEquals("ztwwkvwpbdo", model.sinks().get("uqi").rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("gxdxqefuzubn", model.sinks().get("uqi").script()); + Assertions.assertEquals("awpbif", model.queries().get(0).queryName()); + Assertions.assertEquals("joqcyowzwshsgrea", model.queries().get(0).dataflowSinks().get(0).name()); + Assertions.assertEquals("wsdirxpr", model.queries().get(0).dataflowSinks().get(0).description()); + Assertions.assertEquals("gzpnrmmjyv", model.queries().get(0).dataflowSinks().get(0).dataset().referenceName()); + Assertions.assertEquals("tjuwobwskyjlte", model.queries().get(0).dataflowSinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.queries().get(0).dataflowSinks().get(0).flowlet().type()); - Assertions.assertEquals("roymrqdgyttfzoz", + Assertions.assertEquals("vrpvhivvlmzcvpo", model.queries().get(0).dataflowSinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("iddcnljlly", + Assertions.assertEquals("ejhxfjlecbbabi", model.queries().get(0).dataflowSinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("hqhcirsajtdz", + Assertions.assertEquals("alsrxzatlzwr", model.queries().get(0).dataflowSinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("crdrdkexcyw", model.queries().get(0).dataflowSinks().get(0).script()); + Assertions.assertEquals("iocvjmyinpl", model.queries().get(0).dataflowSinks().get(0).script()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.dataFlow().type()); - Assertions.assertEquals("q", model.dataFlow().referenceName()); - Assertions.assertEquals("szeoxzrrha", model.staging().linkedService().referenceName()); - Assertions.assertEquals("izmtmctehx", model.integrationRuntime().referenceName()); + Assertions.assertEquals("bkgxqsbwepduyqx", model.dataFlow().referenceName()); + Assertions.assertEquals("qllqn", model.staging().linkedService().referenceName()); + Assertions.assertEquals("z", model.integrationRuntime().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutionActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutionActivityTests.java index 6bfccc4698094..155427f3a7dba 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutionActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutionActivityTests.java @@ -22,53 +22,53 @@ public final class ExecutionActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExecutionActivity model = BinaryData.fromString( - "{\"type\":\"Execution\",\"linkedServiceName\":{\"referenceName\":\"hkdkv\",\"parameters\":{\"otidikxmtmjkfmr\":\"datapbds\",\"vsdyjmbydrgxvnmt\":\"datangrjsqtirhabhhp\",\"qcyycxlllk\":\"datamuxrdmudwruogmth\"}},\"policy\":{\"timeout\":\"datajlwf\",\"retry\":\"datagiebqvusc\",\"retryIntervalInSeconds\":90734181,\"secureInput\":false,\"secureOutput\":false,\"\":{\"wfdtjpsjwlpcxl\":\"datauplamdgffvxnisoo\",\"oouocafaxvhjrpb\":\"datazzcdrgtu\",\"sgn\":\"datarolge\"}},\"name\":\"njtxuuwdmrqah\",\"description\":\"yjahbzbtlmacbwm\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"icel\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Completed\",\"Failed\"],\"\":{\"qgjibrxxiaocr\":\"dataibnd\",\"pugnvhtgwadu\":\"datauhumgw\",\"zxzwinrg\":\"dataokoxqboz\"}},{\"activity\":\"kqobovqlltql\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"jkcol\":\"datanrb\",\"qvv\":\"datazqlyputawdmdikuf\",\"hvfojcvnh\":\"dataujzofyldxk\",\"kysg\":\"dataebuiy\"}}],\"userProperties\":[{\"name\":\"beauvldb\",\"value\":\"datan\"},{\"name\":\"guifqjtoxzxbljpz\",\"value\":\"datauugdarfumitjai\"}],\"\":{\"qrjcozrwrylcttv\":\"dataokfdybvywbgmjrvr\",\"a\":\"datakxgffpvvqwvvnxoq\"}}") + "{\"type\":\"sxyk\",\"linkedServiceName\":{\"referenceName\":\"aaepxlxbofdchbo\",\"parameters\":{\"esetutqjsojw\":\"dataskz\"}},\"policy\":{\"timeout\":\"datatrnakytzcma\",\"retry\":\"datasljkaarqhpxwq\",\"retryIntervalInSeconds\":1027912523,\"secureInput\":false,\"secureOutput\":true,\"\":{\"teedjnklv\":\"datagmtywivbu\"}},\"name\":\"bh\",\"description\":\"dudj\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"nnnxhgd\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Succeeded\"],\"\":{\"icjkq\":\"datadbg\"}}],\"userProperties\":[{\"name\":\"zkhdn\",\"value\":\"datanmrx\"},{\"name\":\"dfkqlkaipfyvquas\",\"value\":\"dataywkbiek\"}],\"\":{\"pgnapkpaie\":\"datakqah\",\"zmlrvlghlrcdiq\":\"dataoxvoaoavezwcl\",\"awifz\":\"datavhcbu\",\"qljl\":\"datajtockgqaawyyszwo\"}}") .toObject(ExecutionActivity.class); - Assertions.assertEquals("njtxuuwdmrqah", model.name()); - Assertions.assertEquals("yjahbzbtlmacbwm", model.description()); + Assertions.assertEquals("bh", model.name()); + Assertions.assertEquals("dudj", model.description()); Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("icel", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("beauvldb", model.userProperties().get(0).name()); - Assertions.assertEquals("hkdkv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(90734181, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("nnnxhgd", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("zkhdn", model.userProperties().get(0).name()); + Assertions.assertEquals("aaepxlxbofdchbo", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1027912523, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); + Assertions.assertEquals(true, model.policy().secureOutput()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ExecutionActivity model = new ExecutionActivity().withName("njtxuuwdmrqah").withDescription("yjahbzbtlmacbwm") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("icel") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("kqobovqlltql") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("beauvldb").withValue("datan"), - new UserProperty().withName("guifqjtoxzxbljpz").withValue("datauugdarfumitjai"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hkdkv") - .withParameters(mapOf("otidikxmtmjkfmr", "datapbds", "vsdyjmbydrgxvnmt", "datangrjsqtirhabhhp", - "qcyycxlllk", "datamuxrdmudwruogmth"))) - .withPolicy(new ActivityPolicy().withTimeout("datajlwf").withRetry("datagiebqvusc") - .withRetryIntervalInSeconds(90734181).withSecureInput(false).withSecureOutput(false) + ExecutionActivity model = new ExecutionActivity().withName("bh") + .withDescription("dudj") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("nnnxhgd") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, + DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("zkhdn").withValue("datanmrx"), + new UserProperty().withName("dfkqlkaipfyvquas").withValue("dataywkbiek"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("aaepxlxbofdchbo") + .withParameters(mapOf("esetutqjsojw", "dataskz"))) + .withPolicy(new ActivityPolicy().withTimeout("datatrnakytzcma") + .withRetry("datasljkaarqhpxwq") + .withRetryIntervalInSeconds(1027912523) + .withSecureInput(false) + .withSecureOutput(true) .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(ExecutionActivity.class); - Assertions.assertEquals("njtxuuwdmrqah", model.name()); - Assertions.assertEquals("yjahbzbtlmacbwm", model.description()); + Assertions.assertEquals("bh", model.name()); + Assertions.assertEquals("dudj", model.description()); Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("icel", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("beauvldb", model.userProperties().get(0).name()); - Assertions.assertEquals("hkdkv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(90734181, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("nnnxhgd", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("zkhdn", model.userProperties().get(0).name()); + Assertions.assertEquals("aaepxlxbofdchbo", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1027912523, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); + Assertions.assertEquals(true, model.policy().secureOutput()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExportSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExportSettingsTests.java index af4700e647e22..c8c31f43fa323 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExportSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExportSettingsTests.java @@ -13,13 +13,13 @@ public final class ExportSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExportSettings model - = BinaryData.fromString("{\"type\":\"ExportSettings\",\"\":{\"dhzwdyva\":\"dataffutezxrp\"}}") + = BinaryData.fromString("{\"type\":\"o\",\"\":{\"dqv\":\"databs\",\"wajjzxcqnl\":\"dataqcme\"}}") .toObject(ExportSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ExportSettings model = new ExportSettings().withAdditionalProperties(mapOf("type", "ExportSettings")); + ExportSettings model = new ExportSettings().withAdditionalProperties(mapOf("type", "o")); model = BinaryData.fromObject(model).toObject(ExportSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchRequestTests.java index d84f849576100..0d8cd1c3f87c7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchRequestTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchRequestTests.java @@ -13,19 +13,20 @@ public final class ExposureControlBatchRequestTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - ExposureControlBatchRequest model = BinaryData - .fromString("{\"exposureControlRequests\":[{\"featureName\":\"fbuhfmvfaxkffe\",\"featureType\":\"th\"}]}") + ExposureControlBatchRequest model = BinaryData.fromString( + "{\"exposureControlRequests\":[{\"featureName\":\"ygevqzntypmrbpiz\",\"featureType\":\"r\"},{\"featureName\":\"sdpydnfyhxdeoejz\",\"featureType\":\"w\"}]}") .toObject(ExposureControlBatchRequest.class); - Assertions.assertEquals("fbuhfmvfaxkffe", model.exposureControlRequests().get(0).featureName()); - Assertions.assertEquals("th", model.exposureControlRequests().get(0).featureType()); + Assertions.assertEquals("ygevqzntypmrbpiz", model.exposureControlRequests().get(0).featureName()); + Assertions.assertEquals("r", model.exposureControlRequests().get(0).featureType()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ExposureControlBatchRequest model = new ExposureControlBatchRequest().withExposureControlRequests( - Arrays.asList(new ExposureControlRequest().withFeatureName("fbuhfmvfaxkffe").withFeatureType("th"))); + Arrays.asList(new ExposureControlRequest().withFeatureName("ygevqzntypmrbpiz").withFeatureType("r"), + new ExposureControlRequest().withFeatureName("sdpydnfyhxdeoejz").withFeatureType("w"))); model = BinaryData.fromObject(model).toObject(ExposureControlBatchRequest.class); - Assertions.assertEquals("fbuhfmvfaxkffe", model.exposureControlRequests().get(0).featureName()); - Assertions.assertEquals("th", model.exposureControlRequests().get(0).featureType()); + Assertions.assertEquals("ygevqzntypmrbpiz", model.exposureControlRequests().get(0).featureName()); + Assertions.assertEquals("r", model.exposureControlRequests().get(0).featureType()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchResponseInnerTests.java index c5db19883a28f..ef58cf0540259 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchResponseInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchResponseInnerTests.java @@ -12,15 +12,15 @@ public final class ExposureControlBatchResponseInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - ExposureControlBatchResponseInner model = BinaryData.fromString( - "{\"exposureControlResponses\":[{\"featureName\":\"ez\",\"value\":\"shxmzsbbzoggigrx\"},{\"featureName\":\"ur\",\"value\":\"xxjnspydptk\"}]}") + ExposureControlBatchResponseInner model = BinaryData + .fromString("{\"exposureControlResponses\":[{\"featureName\":\"jttgzf\",\"value\":\"shcbkhajdeyeamdp\"}]}") .toObject(ExposureControlBatchResponseInner.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ExposureControlBatchResponseInner model = new ExposureControlBatchResponseInner().withExposureControlResponses( - Arrays.asList(new ExposureControlResponseInner(), new ExposureControlResponseInner())); + ExposureControlBatchResponseInner model = new ExposureControlBatchResponseInner() + .withExposureControlResponses(Arrays.asList(new ExposureControlResponseInner())); model = BinaryData.fromObject(model).toObject(ExposureControlBatchResponseInner.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlRequestTests.java index a33d034da583e..41dc1301ef974 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlRequestTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlRequestTests.java @@ -12,18 +12,18 @@ public final class ExposureControlRequestTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExposureControlRequest model - = BinaryData.fromString("{\"featureName\":\"mvxi\",\"featureType\":\"uugidyjrrfby\"}") + = BinaryData.fromString("{\"featureName\":\"rvjx\",\"featureType\":\"nspydptkoenkoukn\"}") .toObject(ExposureControlRequest.class); - Assertions.assertEquals("mvxi", model.featureName()); - Assertions.assertEquals("uugidyjrrfby", model.featureType()); + Assertions.assertEquals("rvjx", model.featureName()); + Assertions.assertEquals("nspydptkoenkoukn", model.featureType()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ExposureControlRequest model - = new ExposureControlRequest().withFeatureName("mvxi").withFeatureType("uugidyjrrfby"); + = new ExposureControlRequest().withFeatureName("rvjx").withFeatureType("nspydptkoenkoukn"); model = BinaryData.fromObject(model).toObject(ExposureControlRequest.class); - Assertions.assertEquals("mvxi", model.featureName()); - Assertions.assertEquals("uugidyjrrfby", model.featureType()); + Assertions.assertEquals("rvjx", model.featureName()); + Assertions.assertEquals("nspydptkoenkoukn", model.featureType()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlResponseInnerTests.java index dec1e0d826d3c..4e8b8845e7ba3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlResponseInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlResponseInnerTests.java @@ -11,7 +11,7 @@ public final class ExposureControlResponseInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExposureControlResponseInner model - = BinaryData.fromString("{\"featureName\":\"svexcsonpclhoco\",\"value\":\"lkevle\"}") + = BinaryData.fromString("{\"featureName\":\"dwtiukbldngkp\",\"value\":\"ipazyxoegukgjnpi\"}") .toObject(ExposureControlResponseInner.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueByFactoryWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueByFactoryWithResponseMockTests.java index 294cba57b4bd3..38ccf3e348f99 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueByFactoryWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueByFactoryWithResponseMockTests.java @@ -6,50 +6,32 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ExposureControlRequest; import com.azure.resourcemanager.datafactory.models.ExposureControlResponse; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ExposureControlsGetFeatureValueByFactoryWithResponseMockTests { @Test public void testGetFeatureValueByFactoryWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); + String responseStr = "{\"featureName\":\"yxsaxwugpn\",\"value\":\"sxwk\"}"; - String responseStr = "{\"featureName\":\"dqoj\",\"value\":\"a\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); ExposureControlResponse response = manager.exposureControls() - .getFeatureValueByFactoryWithResponse("f", "jrkueprpnzbfoldb", - new ExposureControlRequest().withFeatureName("iljgyrpvmaywpr").withFeatureType("vqbnzrrkmanr"), + .getFeatureValueByFactoryWithResponse("ignbisszobpxf", "padzdzswvfw", + new ExposureControlRequest().withFeatureName("njwqxgzt").withFeatureType("gdqnwvb"), com.azure.core.util.Context.NONE) .getValue(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueWithResponseMockTests.java index 515a34f10ab29..d4e5ca7523995 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueWithResponseMockTests.java @@ -6,50 +6,32 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ExposureControlRequest; import com.azure.resourcemanager.datafactory.models.ExposureControlResponse; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ExposureControlsGetFeatureValueWithResponseMockTests { @Test public void testGetFeatureValueWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); + String responseStr = "{\"featureName\":\"pdtso\",\"value\":\"nhlrp\"}"; - String responseStr = "{\"featureName\":\"s\",\"value\":\"treihlszpus\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); ExposureControlResponse response = manager.exposureControls() - .getFeatureValueWithResponse("ddfvdktbaexbvyu", - new ExposureControlRequest().withFeatureName("bycuuxgdadflil").withFeatureType("ptvmtnougmf"), + .getFeatureValueWithResponse("pwvieymkguvrd", + new ExposureControlRequest().withFeatureName("proytd").withFeatureType("elqcvm"), com.azure.core.util.Context.NONE) .getValue(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsQueryFeatureValuesByFactoryWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsQueryFeatureValuesByFactoryWithResponseMockTests.java index eb57e8007e3b6..7f00632d3684d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsQueryFeatureValuesByFactoryWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsQueryFeatureValuesByFactoryWithResponseMockTests.java @@ -6,56 +6,36 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ExposureControlBatchRequest; import com.azure.resourcemanager.datafactory.models.ExposureControlBatchResponse; import com.azure.resourcemanager.datafactory.models.ExposureControlRequest; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.Arrays; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ExposureControlsQueryFeatureValuesByFactoryWithResponseMockTests { @Test public void testQueryFeatureValuesByFactoryWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"exposureControlResponses\":[{\"featureName\":\"dgzfqs\",\"value\":\"yuillrrqw\"},{\"featureName\":\"hiqjegea\",\"value\":\"oqg\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"exposureControlResponses\":[{\"featureName\":\"zcnpsdpfw\",\"value\":\"wwbunfymbwi\"},{\"featureName\":\"rajtbmjok\",\"value\":\"qgokha\"},{\"featureName\":\"ylkflf\",\"value\":\"fjskndwywbptvym\"},{\"featureName\":\"pdcddbeozhprlxxb\",\"value\":\"z\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); ExposureControlBatchResponse response = manager.exposureControls() - .queryFeatureValuesByFactoryWithResponse("vvbsilahskesea", "g", - new ExposureControlBatchRequest().withExposureControlRequests(Arrays.asList( - new ExposureControlRequest().withFeatureName("essiielbtge").withFeatureType("wcqeihuyrzi"), - new ExposureControlRequest().withFeatureName("yvquufplmpbvzbt").withFeatureType("totpvoe"), - new ExposureControlRequest().withFeatureName("fwrao").withFeatureType("r"))), + .queryFeatureValuesByFactoryWithResponse("k", "dtofakmopqfzvvti", + new ExposureControlBatchRequest().withExposureControlRequests(Arrays + .asList(new ExposureControlRequest().withFeatureName("suemewfut").withFeatureType("bpnrfucxt"))), com.azure.core.util.Context.NONE) .getValue(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionTests.java index 08022db1a8753..7b25d30656860 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionTests.java @@ -11,14 +11,14 @@ public final class ExpressionTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - Expression model = BinaryData.fromString("{\"value\":\"tny\"}").toObject(Expression.class); - Assertions.assertEquals("tny", model.value()); + Expression model = BinaryData.fromString("{\"value\":\"cciklhs\"}").toObject(Expression.class); + Assertions.assertEquals("cciklhs", model.value()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Expression model = new Expression().withValue("tny"); + Expression model = new Expression().withValue("cciklhs"); model = BinaryData.fromObject(model).toObject(Expression.class); - Assertions.assertEquals("tny", model.value()); + Assertions.assertEquals("cciklhs", model.value()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionV2Tests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionV2Tests.java index 7a16a04d4157f..85a4533ff3302 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionV2Tests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionV2Tests.java @@ -14,73 +14,88 @@ public final class ExpressionV2Tests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ExpressionV2 model = BinaryData.fromString( - "{\"type\":\"Unary\",\"value\":\"fgfspzw\",\"operator\":\"cu\",\"operands\":[{\"type\":\"Binary\",\"value\":\"jvlwczwdkk\",\"operator\":\"ooqnvhtrfckrm\",\"operands\":[{\"type\":\"Binary\",\"value\":\"tfmpcbvkoughj\",\"operator\":\"pptsvppfdnihxcij\",\"operands\":[{}]},{\"type\":\"Unary\",\"value\":\"yvscgzqncddaqqkl\",\"operator\":\"ibro\",\"operands\":[{},{}]}]},{\"type\":\"Unary\",\"value\":\"nex\",\"operator\":\"sanglw\",\"operands\":[{\"type\":\"Binary\",\"value\":\"m\",\"operator\":\"rhjj\",\"operands\":[{},{},{},{}]},{\"type\":\"Unary\",\"value\":\"rywucpdzbnty\",\"operator\":\"wnpuyhqayls\",\"operands\":[{},{},{},{}]}]},{\"type\":\"Unary\",\"value\":\"lzrrhabbdqne\",\"operator\":\"fujzwqpkhgrdg\",\"operands\":[{\"type\":\"Field\",\"value\":\"qkv\",\"operator\":\"n\",\"operands\":[{},{},{}]},{\"type\":\"Unary\",\"value\":\"aoetrglpaocq\",\"operator\":\"vleouevuhago\",\"operands\":[{},{},{},{}]},{\"type\":\"Binary\",\"value\":\"tuoq\",\"operator\":\"crdnmhrym\",\"operands\":[{},{}]},{\"type\":\"Unary\",\"value\":\"owubkiocjn\",\"operator\":\"nwktbsckcng\",\"operands\":[{},{},{}]}]},{\"type\":\"Field\",\"value\":\"zoxmajpxbtkzvt\",\"operator\":\"tgrmgcfvfwwev\",\"operands\":[{\"type\":\"Constant\",\"value\":\"x\",\"operator\":\"qaqzttogblriznr\",\"operands\":[{},{},{},{}]},{\"type\":\"Unary\",\"value\":\"p\",\"operator\":\"nqljlw\",\"operands\":[{},{},{}]},{\"type\":\"Unary\",\"value\":\"ln\",\"operator\":\"jqlq\",\"operands\":[{},{}]}]}]}") + "{\"type\":\"Field\",\"value\":\"el\",\"operator\":\"ikigzbrkwgsqos\",\"operands\":[{\"type\":\"Constant\",\"value\":\"hkljktujf\",\"operator\":\"eqfryketwrzxb\",\"operands\":[{\"type\":\"Field\",\"value\":\"g\",\"operator\":\"qnjfjyppixfub\",\"operands\":[{},{},{}]},{\"type\":\"Constant\",\"value\":\"wyetyrnhisp\",\"operator\":\"ivanlypspnjlo\",\"operands\":[{},{}]}]},{\"type\":\"Unary\",\"value\":\"lma\",\"operator\":\"rbwbkrsmkeiunxtb\",\"operands\":[{\"type\":\"Unary\",\"value\":\"xdtzzmcrmhhfcaiz\",\"operator\":\"iyuzufd\",\"operands\":[{}]},{\"type\":\"Unary\",\"value\":\"nfljv\",\"operator\":\"qkoecozfauhn\",\"operands\":[{},{},{},{}]},{\"type\":\"Unary\",\"value\":\"lgrzcjpkzmhax\",\"operator\":\"jimlfrkmynmmm\",\"operands\":[{}]},{\"type\":\"Binary\",\"value\":\"ghordcc\",\"operator\":\"rwzczlvqlcca\",\"operands\":[{},{}]}]},{\"type\":\"Binary\",\"value\":\"tyiqqwdgys\",\"operator\":\"vvagvqrwrchwd\",\"operands\":[{\"type\":\"Constant\",\"value\":\"tfjj\",\"operator\":\"xweuoklwtoecxndh\",\"operands\":[{},{},{}]},{\"type\":\"Field\",\"value\":\"ocunan\",\"operator\":\"tverplhfwq\",\"operands\":[{},{},{}]}]}]}") .toObject(ExpressionV2.class); - Assertions.assertEquals(ExpressionV2Type.UNARY, model.type()); - Assertions.assertEquals("fgfspzw", model.value()); - Assertions.assertEquals("cu", model.operator()); - Assertions.assertEquals(ExpressionV2Type.BINARY, model.operands().get(0).type()); - Assertions.assertEquals("jvlwczwdkk", model.operands().get(0).value()); - Assertions.assertEquals("ooqnvhtrfckrm", model.operands().get(0).operator()); - Assertions.assertEquals(ExpressionV2Type.BINARY, model.operands().get(0).operands().get(0).type()); - Assertions.assertEquals("tfmpcbvkoughj", model.operands().get(0).operands().get(0).value()); - Assertions.assertEquals("pptsvppfdnihxcij", model.operands().get(0).operands().get(0).operator()); + Assertions.assertEquals(ExpressionV2Type.FIELD, model.type()); + Assertions.assertEquals("el", model.value()); + Assertions.assertEquals("ikigzbrkwgsqos", model.operator()); + Assertions.assertEquals(ExpressionV2Type.CONSTANT, model.operands().get(0).type()); + Assertions.assertEquals("hkljktujf", model.operands().get(0).value()); + Assertions.assertEquals("eqfryketwrzxb", model.operands().get(0).operator()); + Assertions.assertEquals(ExpressionV2Type.FIELD, model.operands().get(0).operands().get(0).type()); + Assertions.assertEquals("g", model.operands().get(0).operands().get(0).value()); + Assertions.assertEquals("qnjfjyppixfub", model.operands().get(0).operands().get(0).operator()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ExpressionV2 model = new ExpressionV2().withType(ExpressionV2Type.UNARY).withValue("fgfspzw").withOperator("cu") - .withOperands(Arrays.asList( - new ExpressionV2().withType(ExpressionV2Type.BINARY).withValue("jvlwczwdkk") - .withOperator("ooqnvhtrfckrm") - .withOperands(Arrays.asList( - new ExpressionV2().withType(ExpressionV2Type.BINARY).withValue("tfmpcbvkoughj") - .withOperator("pptsvppfdnihxcij").withOperands(Arrays.asList(new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.UNARY).withValue("yvscgzqncddaqqkl") - .withOperator("ibro").withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2())))), - new ExpressionV2().withType(ExpressionV2Type.UNARY).withValue("nex").withOperator("sanglw") - .withOperands(Arrays.asList( - new ExpressionV2().withType(ExpressionV2Type.BINARY).withValue("m").withOperator("rhjj") - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2(), - new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.UNARY).withValue("rywucpdzbnty") - .withOperator("wnpuyhqayls") - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2(), - new ExpressionV2())))), - new ExpressionV2().withType(ExpressionV2Type.UNARY).withValue("lzrrhabbdqne") - .withOperator("fujzwqpkhgrdg") - .withOperands(Arrays.asList( - new ExpressionV2().withType(ExpressionV2Type.FIELD).withValue("qkv").withOperator("n") - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.UNARY).withValue("aoetrglpaocq") - .withOperator("vleouevuhago") - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2(), - new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.BINARY).withValue("tuoq").withOperator("crdnmhrym") - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.UNARY).withValue("owubkiocjn") - .withOperator("nwktbsckcng") - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2())))), - new ExpressionV2().withType(ExpressionV2Type.FIELD).withValue("zoxmajpxbtkzvt") - .withOperator("tgrmgcfvfwwev") - .withOperands(Arrays.asList( - new ExpressionV2().withType(ExpressionV2Type.CONSTANT).withValue("x") - .withOperator("qaqzttogblriznr") - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2(), - new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.UNARY).withValue("p").withOperator("nqljlw") - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.UNARY).withValue("ln").withOperator("jqlq") - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2())))))); + ExpressionV2 model + = new ExpressionV2().withType(ExpressionV2Type.FIELD) + .withValue("el") + .withOperator("ikigzbrkwgsqos") + .withOperands( + Arrays + .asList( + new ExpressionV2().withType(ExpressionV2Type.CONSTANT) + .withValue("hkljktujf") + .withOperator("eqfryketwrzxb") + .withOperands( + Arrays.asList( + new ExpressionV2().withType(ExpressionV2Type.FIELD) + .withValue("g") + .withOperator("qnjfjyppixfub") + .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), + new ExpressionV2())), + new ExpressionV2().withType(ExpressionV2Type.CONSTANT) + .withValue("wyetyrnhisp") + .withOperator("ivanlypspnjlo") + .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2())))), + new ExpressionV2().withType(ExpressionV2Type.UNARY) + .withValue("lma") + .withOperator("rbwbkrsmkeiunxtb") + .withOperands(Arrays.asList( + new ExpressionV2().withType(ExpressionV2Type.UNARY) + .withValue("xdtzzmcrmhhfcaiz") + .withOperator("iyuzufd") + .withOperands(Arrays.asList(new ExpressionV2())), + new ExpressionV2().withType(ExpressionV2Type.UNARY) + .withValue("nfljv") + .withOperator("qkoecozfauhn") + .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), + new ExpressionV2(), new ExpressionV2())), + new ExpressionV2().withType(ExpressionV2Type.UNARY) + .withValue("lgrzcjpkzmhax") + .withOperator("jimlfrkmynmmm") + .withOperands(Arrays.asList(new ExpressionV2())), + new ExpressionV2() + .withType(ExpressionV2Type.BINARY) + .withValue("ghordcc") + .withOperator("rwzczlvqlcca") + .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2())))), + new ExpressionV2().withType(ExpressionV2Type.BINARY) + .withValue("tyiqqwdgys") + .withOperator("vvagvqrwrchwd") + .withOperands( + Arrays.asList( + new ExpressionV2().withType(ExpressionV2Type.CONSTANT) + .withValue("tfjj") + .withOperator("xweuoklwtoecxndh") + .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), + new ExpressionV2())), + new ExpressionV2().withType(ExpressionV2Type.FIELD) + .withValue("ocunan") + .withOperator("tverplhfwq") + .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), + new ExpressionV2())))))); model = BinaryData.fromObject(model).toObject(ExpressionV2.class); - Assertions.assertEquals(ExpressionV2Type.UNARY, model.type()); - Assertions.assertEquals("fgfspzw", model.value()); - Assertions.assertEquals("cu", model.operator()); - Assertions.assertEquals(ExpressionV2Type.BINARY, model.operands().get(0).type()); - Assertions.assertEquals("jvlwczwdkk", model.operands().get(0).value()); - Assertions.assertEquals("ooqnvhtrfckrm", model.operands().get(0).operator()); - Assertions.assertEquals(ExpressionV2Type.BINARY, model.operands().get(0).operands().get(0).type()); - Assertions.assertEquals("tfmpcbvkoughj", model.operands().get(0).operands().get(0).value()); - Assertions.assertEquals("pptsvppfdnihxcij", model.operands().get(0).operands().get(0).operator()); + Assertions.assertEquals(ExpressionV2Type.FIELD, model.type()); + Assertions.assertEquals("el", model.value()); + Assertions.assertEquals("ikigzbrkwgsqos", model.operator()); + Assertions.assertEquals(ExpressionV2Type.CONSTANT, model.operands().get(0).type()); + Assertions.assertEquals("hkljktujf", model.operands().get(0).value()); + Assertions.assertEquals("eqfryketwrzxb", model.operands().get(0).operator()); + Assertions.assertEquals(ExpressionV2Type.FIELD, model.operands().get(0).operands().get(0).type()); + Assertions.assertEquals("g", model.operands().get(0).operands().get(0).value()); + Assertions.assertEquals("qnjfjyppixfub", model.operands().get(0).operands().get(0).operator()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoriesDeleteByResourceGroupWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoriesDeleteByResourceGroupWithResponseMockTests.java index 2c7a34b1b0fb9..7f90bd6425317 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoriesDeleteByResourceGroupWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoriesDeleteByResourceGroupWithResponseMockTests.java @@ -6,47 +6,28 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class FactoriesDeleteByResourceGroupWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.factories().deleteByResourceGroupWithResponse("rok", "lopygrsvyjrqhp", - com.azure.core.util.Context.NONE); + manager.factories().deleteByResourceGroupWithResponse("zhobt", "ubebbery", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryIdentityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryIdentityTests.java index b66b3e4c9c202..789ea890c6c76 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryIdentityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryIdentityTests.java @@ -15,17 +15,18 @@ public final class FactoryIdentityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FactoryIdentity model = BinaryData.fromString( - "{\"type\":\"SystemAssigned\",\"principalId\":\"5aa54869-dc8c-4533-8391-cc95f3e0b9ef\",\"tenantId\":\"bd63df9d-a5bd-4e04-b286-b4ebbba5c965\",\"userAssignedIdentities\":{\"qjpkcattpngjcrc\":\"dataleyyvx\"}}") + "{\"type\":\"SystemAssigned,UserAssigned\",\"principalId\":\"8795ef66-8e5f-4f7e-b025-f68d2c204177\",\"tenantId\":\"0b14d1cf-00e0-40ff-88dd-bcb987ea8319\",\"userAssignedIdentities\":{\"jh\":\"dataattpngjcrcczsq\",\"ysou\":\"datamdajv\",\"canoaeupf\":\"dataq\",\"tuo\":\"datayhltrpmopjmcm\"}}") .toObject(FactoryIdentity.class); - Assertions.assertEquals(FactoryIdentityType.SYSTEM_ASSIGNED, model.type()); + Assertions.assertEquals(FactoryIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED, model.type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FactoryIdentity model = new FactoryIdentity().withType(FactoryIdentityType.SYSTEM_ASSIGNED) - .withUserAssignedIdentities(mapOf("qjpkcattpngjcrc", "dataleyyvx")); + FactoryIdentity model = new FactoryIdentity().withType(FactoryIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED) + .withUserAssignedIdentities(mapOf("jh", "dataattpngjcrcczsq", "ysou", "datamdajv", "canoaeupf", "dataq", + "tuo", "datayhltrpmopjmcm")); model = BinaryData.fromObject(model).toObject(FactoryIdentity.class); - Assertions.assertEquals(FactoryIdentityType.SYSTEM_ASSIGNED, model.type()); + Assertions.assertEquals(FactoryIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED, model.type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoConfigurationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoConfigurationTests.java index 66ef2052419c8..03c38214130a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoConfigurationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoConfigurationTests.java @@ -12,26 +12,30 @@ public final class FactoryRepoConfigurationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FactoryRepoConfiguration model = BinaryData.fromString( - "{\"type\":\"FactoryRepoConfiguration\",\"accountName\":\"o\",\"repositoryName\":\"rq\",\"collaborationBranch\":\"b\",\"rootFolder\":\"oczvy\",\"lastCommitId\":\"qrvkdv\",\"disablePublish\":false}") + "{\"type\":\"iqzbq\",\"accountName\":\"vsovmyokac\",\"repositoryName\":\"pkwlhz\",\"collaborationBranch\":\"obpxjmflbvvn\",\"rootFolder\":\"hrk\",\"lastCommitId\":\"iwwzjuqk\",\"disablePublish\":false}") .toObject(FactoryRepoConfiguration.class); - Assertions.assertEquals("o", model.accountName()); - Assertions.assertEquals("rq", model.repositoryName()); - Assertions.assertEquals("b", model.collaborationBranch()); - Assertions.assertEquals("oczvy", model.rootFolder()); - Assertions.assertEquals("qrvkdv", model.lastCommitId()); + Assertions.assertEquals("vsovmyokac", model.accountName()); + Assertions.assertEquals("pkwlhz", model.repositoryName()); + Assertions.assertEquals("obpxjmflbvvn", model.collaborationBranch()); + Assertions.assertEquals("hrk", model.rootFolder()); + Assertions.assertEquals("iwwzjuqk", model.lastCommitId()); Assertions.assertEquals(false, model.disablePublish()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FactoryRepoConfiguration model = new FactoryRepoConfiguration().withAccountName("o").withRepositoryName("rq") - .withCollaborationBranch("b").withRootFolder("oczvy").withLastCommitId("qrvkdv").withDisablePublish(false); + FactoryRepoConfiguration model = new FactoryRepoConfiguration().withAccountName("vsovmyokac") + .withRepositoryName("pkwlhz") + .withCollaborationBranch("obpxjmflbvvn") + .withRootFolder("hrk") + .withLastCommitId("iwwzjuqk") + .withDisablePublish(false); model = BinaryData.fromObject(model).toObject(FactoryRepoConfiguration.class); - Assertions.assertEquals("o", model.accountName()); - Assertions.assertEquals("rq", model.repositoryName()); - Assertions.assertEquals("b", model.collaborationBranch()); - Assertions.assertEquals("oczvy", model.rootFolder()); - Assertions.assertEquals("qrvkdv", model.lastCommitId()); + Assertions.assertEquals("vsovmyokac", model.accountName()); + Assertions.assertEquals("pkwlhz", model.repositoryName()); + Assertions.assertEquals("obpxjmflbvvn", model.collaborationBranch()); + Assertions.assertEquals("hrk", model.rootFolder()); + Assertions.assertEquals("iwwzjuqk", model.lastCommitId()); Assertions.assertEquals(false, model.disablePublish()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoUpdateTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoUpdateTests.java index e14934d562908..4d04889f5131e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoUpdateTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoUpdateTests.java @@ -13,30 +13,33 @@ public final class FactoryRepoUpdateTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FactoryRepoUpdate model = BinaryData.fromString( - "{\"factoryResourceId\":\"qzbqjvsov\",\"repoConfiguration\":{\"type\":\"FactoryRepoConfiguration\",\"accountName\":\"okacspk\",\"repositoryName\":\"lhzdobp\",\"collaborationBranch\":\"jmflbvvnch\",\"rootFolder\":\"kcciwwzjuqkhr\",\"lastCommitId\":\"jiwkuofoskghsau\",\"disablePublish\":true}}") + "{\"factoryResourceId\":\"gidyjrrf\",\"repoConfiguration\":{\"type\":\"osvexcsonpclhoc\",\"accountName\":\"hslkevleggzf\",\"repositoryName\":\"u\",\"collaborationBranch\":\"fmvfaxkffeiit\",\"rootFolder\":\"lvmezyvshxmzsbbz\",\"lastCommitId\":\"gigr\",\"disablePublish\":true}}") .toObject(FactoryRepoUpdate.class); - Assertions.assertEquals("qzbqjvsov", model.factoryResourceId()); - Assertions.assertEquals("okacspk", model.repoConfiguration().accountName()); - Assertions.assertEquals("lhzdobp", model.repoConfiguration().repositoryName()); - Assertions.assertEquals("jmflbvvnch", model.repoConfiguration().collaborationBranch()); - Assertions.assertEquals("kcciwwzjuqkhr", model.repoConfiguration().rootFolder()); - Assertions.assertEquals("jiwkuofoskghsau", model.repoConfiguration().lastCommitId()); + Assertions.assertEquals("gidyjrrf", model.factoryResourceId()); + Assertions.assertEquals("hslkevleggzf", model.repoConfiguration().accountName()); + Assertions.assertEquals("u", model.repoConfiguration().repositoryName()); + Assertions.assertEquals("fmvfaxkffeiit", model.repoConfiguration().collaborationBranch()); + Assertions.assertEquals("lvmezyvshxmzsbbz", model.repoConfiguration().rootFolder()); + Assertions.assertEquals("gigr", model.repoConfiguration().lastCommitId()); Assertions.assertEquals(true, model.repoConfiguration().disablePublish()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FactoryRepoUpdate model = new FactoryRepoUpdate().withFactoryResourceId("qzbqjvsov") - .withRepoConfiguration(new FactoryRepoConfiguration().withAccountName("okacspk") - .withRepositoryName("lhzdobp").withCollaborationBranch("jmflbvvnch").withRootFolder("kcciwwzjuqkhr") - .withLastCommitId("jiwkuofoskghsau").withDisablePublish(true)); + FactoryRepoUpdate model = new FactoryRepoUpdate().withFactoryResourceId("gidyjrrf") + .withRepoConfiguration(new FactoryRepoConfiguration().withAccountName("hslkevleggzf") + .withRepositoryName("u") + .withCollaborationBranch("fmvfaxkffeiit") + .withRootFolder("lvmezyvshxmzsbbz") + .withLastCommitId("gigr") + .withDisablePublish(true)); model = BinaryData.fromObject(model).toObject(FactoryRepoUpdate.class); - Assertions.assertEquals("qzbqjvsov", model.factoryResourceId()); - Assertions.assertEquals("okacspk", model.repoConfiguration().accountName()); - Assertions.assertEquals("lhzdobp", model.repoConfiguration().repositoryName()); - Assertions.assertEquals("jmflbvvnch", model.repoConfiguration().collaborationBranch()); - Assertions.assertEquals("kcciwwzjuqkhr", model.repoConfiguration().rootFolder()); - Assertions.assertEquals("jiwkuofoskghsau", model.repoConfiguration().lastCommitId()); + Assertions.assertEquals("gidyjrrf", model.factoryResourceId()); + Assertions.assertEquals("hslkevleggzf", model.repoConfiguration().accountName()); + Assertions.assertEquals("u", model.repoConfiguration().repositoryName()); + Assertions.assertEquals("fmvfaxkffeiit", model.repoConfiguration().collaborationBranch()); + Assertions.assertEquals("lvmezyvshxmzsbbz", model.repoConfiguration().rootFolder()); + Assertions.assertEquals("gigr", model.repoConfiguration().lastCommitId()); Assertions.assertEquals(true, model.repoConfiguration().disablePublish()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdateParametersTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdateParametersTests.java index 7469bd679dcf0..581c51303e6de 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdateParametersTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdateParametersTests.java @@ -17,25 +17,22 @@ public final class FactoryUpdateParametersTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FactoryUpdateParameters model = BinaryData.fromString( - "{\"tags\":{\"bldngkpoc\":\"kouknvudwtiu\",\"npiucgygevqznty\":\"pazyxoegukg\"},\"identity\":{\"type\":\"SystemAssigned\",\"principalId\":\"4ef79349-41e7-45a9-9a40-a258f0de4c65\",\"tenantId\":\"bcb38326-fe19-43c5-a108-72b8af5678eb\",\"userAssignedIdentities\":{\"r\":\"datac\",\"dpydn\":\"dataj\",\"sjttgzfbish\":\"datayhxdeoejzicwi\",\"jdeyeamdpha\":\"databkh\"}},\"properties\":{\"publicNetworkAccess\":\"Disabled\"}}") + "{\"tags\":{\"wkgshwa\":\"alpbuxwgipwhon\"},\"identity\":{\"type\":\"SystemAssigned,UserAssigned\",\"principalId\":\"1db887d3-ab26-4f99-bea3-54de739a81f2\",\"tenantId\":\"5a181021-8f94-4be1-a00d-28fd085bc8d8\",\"userAssignedIdentities\":{\"zoqftiyqzrnkcqvy\":\"datanjeputtmrywn\",\"lsicohoqqnwv\":\"datalwh\"}},\"properties\":{\"publicNetworkAccess\":\"Disabled\"}}") .toObject(FactoryUpdateParameters.class); - Assertions.assertEquals("kouknvudwtiu", model.tags().get("bldngkpoc")); - Assertions.assertEquals(FactoryIdentityType.SYSTEM_ASSIGNED, model.identity().type()); + Assertions.assertEquals("alpbuxwgipwhon", model.tags().get("wkgshwa")); + Assertions.assertEquals(FactoryIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED, model.identity().type()); Assertions.assertEquals(PublicNetworkAccess.DISABLED, model.publicNetworkAccess()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FactoryUpdateParameters model - = new FactoryUpdateParameters() - .withTags(mapOf("bldngkpoc", "kouknvudwtiu", "npiucgygevqznty", "pazyxoegukg")) - .withIdentity(new FactoryIdentity().withType(FactoryIdentityType.SYSTEM_ASSIGNED) - .withUserAssignedIdentities(mapOf("r", "datac", "dpydn", "dataj", "sjttgzfbish", - "datayhxdeoejzicwi", "jdeyeamdpha", "databkh"))) - .withPublicNetworkAccess(PublicNetworkAccess.DISABLED); + FactoryUpdateParameters model = new FactoryUpdateParameters().withTags(mapOf("wkgshwa", "alpbuxwgipwhon")) + .withIdentity(new FactoryIdentity().withType(FactoryIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED) + .withUserAssignedIdentities(mapOf("zoqftiyqzrnkcqvy", "datanjeputtmrywn", "lsicohoqqnwv", "datalwh"))) + .withPublicNetworkAccess(PublicNetworkAccess.DISABLED); model = BinaryData.fromObject(model).toObject(FactoryUpdateParameters.class); - Assertions.assertEquals("kouknvudwtiu", model.tags().get("bldngkpoc")); - Assertions.assertEquals(FactoryIdentityType.SYSTEM_ASSIGNED, model.identity().type()); + Assertions.assertEquals("alpbuxwgipwhon", model.tags().get("wkgshwa")); + Assertions.assertEquals(FactoryIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED, model.identity().type()); Assertions.assertEquals(PublicNetworkAccess.DISABLED, model.publicNetworkAccess()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdatePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdatePropertiesTests.java index 3f0fc96403a53..8fb25b40bae0e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdatePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdatePropertiesTests.java @@ -13,15 +13,15 @@ public final class FactoryUpdatePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FactoryUpdateProperties model - = BinaryData.fromString("{\"publicNetworkAccess\":\"Disabled\"}").toObject(FactoryUpdateProperties.class); - Assertions.assertEquals(PublicNetworkAccess.DISABLED, model.publicNetworkAccess()); + = BinaryData.fromString("{\"publicNetworkAccess\":\"Enabled\"}").toObject(FactoryUpdateProperties.class); + Assertions.assertEquals(PublicNetworkAccess.ENABLED, model.publicNetworkAccess()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { FactoryUpdateProperties model - = new FactoryUpdateProperties().withPublicNetworkAccess(PublicNetworkAccess.DISABLED); + = new FactoryUpdateProperties().withPublicNetworkAccess(PublicNetworkAccess.ENABLED); model = BinaryData.fromObject(model).toObject(FactoryUpdateProperties.class); - Assertions.assertEquals(PublicNetworkAccess.DISABLED, model.publicNetworkAccess()); + Assertions.assertEquals(PublicNetworkAccess.ENABLED, model.publicNetworkAccess()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryVstsConfigurationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryVstsConfigurationTests.java index 11899dfc870b1..5946e971c51e6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryVstsConfigurationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryVstsConfigurationTests.java @@ -12,32 +12,36 @@ public final class FactoryVstsConfigurationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FactoryVstsConfiguration model = BinaryData.fromString( - "{\"type\":\"FactoryVSTSConfiguration\",\"projectName\":\"yeofltfnnxrkad\",\"tenantId\":\"ynnfmuiii\",\"accountName\":\"ipfohykfkx\",\"repositoryName\":\"bcbrwjiutgnjizbe\",\"collaborationBranch\":\"woiymrvz\",\"rootFolder\":\"juyrsrziuctixg\",\"lastCommitId\":\"suif\",\"disablePublish\":false}") + "{\"type\":\"qbedygisrzwn\",\"projectName\":\"jzudrtpzk\",\"tenantId\":\"eboywhczzqrhm\",\"accountName\":\"kdidjc\",\"repositoryName\":\"lrmpwctofldse\",\"collaborationBranch\":\"cdhz\",\"rootFolder\":\"xkbrfg\",\"lastCommitId\":\"wjiyewhfjsrwq\",\"disablePublish\":true}") .toObject(FactoryVstsConfiguration.class); - Assertions.assertEquals("ipfohykfkx", model.accountName()); - Assertions.assertEquals("bcbrwjiutgnjizbe", model.repositoryName()); - Assertions.assertEquals("woiymrvz", model.collaborationBranch()); - Assertions.assertEquals("juyrsrziuctixg", model.rootFolder()); - Assertions.assertEquals("suif", model.lastCommitId()); - Assertions.assertEquals(false, model.disablePublish()); - Assertions.assertEquals("yeofltfnnxrkad", model.projectName()); - Assertions.assertEquals("ynnfmuiii", model.tenantId()); + Assertions.assertEquals("kdidjc", model.accountName()); + Assertions.assertEquals("lrmpwctofldse", model.repositoryName()); + Assertions.assertEquals("cdhz", model.collaborationBranch()); + Assertions.assertEquals("xkbrfg", model.rootFolder()); + Assertions.assertEquals("wjiyewhfjsrwq", model.lastCommitId()); + Assertions.assertEquals(true, model.disablePublish()); + Assertions.assertEquals("jzudrtpzk", model.projectName()); + Assertions.assertEquals("eboywhczzqrhm", model.tenantId()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FactoryVstsConfiguration model - = new FactoryVstsConfiguration().withAccountName("ipfohykfkx").withRepositoryName("bcbrwjiutgnjizbe") - .withCollaborationBranch("woiymrvz").withRootFolder("juyrsrziuctixg").withLastCommitId("suif") - .withDisablePublish(false).withProjectName("yeofltfnnxrkad").withTenantId("ynnfmuiii"); + FactoryVstsConfiguration model = new FactoryVstsConfiguration().withAccountName("kdidjc") + .withRepositoryName("lrmpwctofldse") + .withCollaborationBranch("cdhz") + .withRootFolder("xkbrfg") + .withLastCommitId("wjiyewhfjsrwq") + .withDisablePublish(true) + .withProjectName("jzudrtpzk") + .withTenantId("eboywhczzqrhm"); model = BinaryData.fromObject(model).toObject(FactoryVstsConfiguration.class); - Assertions.assertEquals("ipfohykfkx", model.accountName()); - Assertions.assertEquals("bcbrwjiutgnjizbe", model.repositoryName()); - Assertions.assertEquals("woiymrvz", model.collaborationBranch()); - Assertions.assertEquals("juyrsrziuctixg", model.rootFolder()); - Assertions.assertEquals("suif", model.lastCommitId()); - Assertions.assertEquals(false, model.disablePublish()); - Assertions.assertEquals("yeofltfnnxrkad", model.projectName()); - Assertions.assertEquals("ynnfmuiii", model.tenantId()); + Assertions.assertEquals("kdidjc", model.accountName()); + Assertions.assertEquals("lrmpwctofldse", model.repositoryName()); + Assertions.assertEquals("cdhz", model.collaborationBranch()); + Assertions.assertEquals("xkbrfg", model.rootFolder()); + Assertions.assertEquals("wjiyewhfjsrwq", model.lastCommitId()); + Assertions.assertEquals(true, model.disablePublish()); + Assertions.assertEquals("jzudrtpzk", model.projectName()); + Assertions.assertEquals("eboywhczzqrhm", model.tenantId()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerLocationTests.java index 5ead1b53361ce..bb6b1e3b1cd68 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerLocationTests.java @@ -11,14 +11,14 @@ public final class FileServerLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FileServerLocation model = BinaryData.fromString( - "{\"type\":\"FileServerLocation\",\"folderPath\":\"datavwdtgckzdqiqdl\",\"fileName\":\"datatrkwxo\",\"\":{\"lglh\":\"dataxsuykznhrfg\",\"f\":\"datary\"}}") + "{\"type\":\"nx\",\"folderPath\":\"datafffhtjnwos\",\"fileName\":\"datafjxtvlxxzqfc\",\"\":{\"hjmbji\":\"dataiomxeezw\",\"hpyvdkgdet\":\"dataegmxdbsohc\",\"canzb\":\"dataz\"}}") .toObject(FileServerLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { FileServerLocation model - = new FileServerLocation().withFolderPath("datavwdtgckzdqiqdl").withFileName("datatrkwxo"); + = new FileServerLocation().withFolderPath("datafffhtjnwos").withFileName("datafjxtvlxxzqfc"); model = BinaryData.fromObject(model).toObject(FileServerLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerReadSettingsTests.java index 95f05b9b076d0..e10122c45158f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerReadSettingsTests.java @@ -11,19 +11,24 @@ public final class FileServerReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FileServerReadSettings model = BinaryData.fromString( - "{\"type\":\"FileServerReadSettings\",\"recursive\":\"dataajbcbrtiqpjlakam\",\"wildcardFolderPath\":\"dataqluicrqxqjzmosml\",\"wildcardFileName\":\"datappfgtnsxdj\",\"fileListPath\":\"datatnjpkpmdlttmfhde\",\"enablePartitionDiscovery\":\"dataiaaiqyxlromxpe\",\"partitionRootPath\":\"dataxcnh\",\"deleteFilesAfterCompletion\":\"datacbtyor\",\"modifiedDatetimeStart\":\"datadamyumrobbaxnym\",\"modifiedDatetimeEnd\":\"datag\",\"fileFilter\":\"dataqmkakgw\",\"maxConcurrentConnections\":\"dataznob\",\"disableMetricsCollection\":\"datagyheyayktutflhe\",\"\":{\"zszjqzmqjhghih\":\"dataefsahmdcoeexw\"}}") + "{\"type\":\"jkmpa\",\"recursive\":\"datawsynt\",\"wildcardFolderPath\":\"datarluqaqn\",\"wildcardFileName\":\"datag\",\"fileListPath\":\"datab\",\"enablePartitionDiscovery\":\"datajbozkl\",\"partitionRootPath\":\"dataifvpsmvk\",\"deleteFilesAfterCompletion\":\"datauw\",\"modifiedDatetimeStart\":\"datanplqf\",\"modifiedDatetimeEnd\":\"dataxfqmdjzgo\",\"fileFilter\":\"datakp\",\"maxConcurrentConnections\":\"dataeylpofaogvmqzagr\",\"disableMetricsCollection\":\"dataqhwfskmkdr\",\"\":{\"ldwcxjvexlutxcmc\":\"datapn\",\"yypvhdulds\":\"datacotqocn\"}}") .toObject(FileServerReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FileServerReadSettings model = new FileServerReadSettings().withMaxConcurrentConnections("dataznob") - .withDisableMetricsCollection("datagyheyayktutflhe").withRecursive("dataajbcbrtiqpjlakam") - .withWildcardFolderPath("dataqluicrqxqjzmosml").withWildcardFileName("datappfgtnsxdj") - .withFileListPath("datatnjpkpmdlttmfhde").withEnablePartitionDiscovery("dataiaaiqyxlromxpe") - .withPartitionRootPath("dataxcnh").withDeleteFilesAfterCompletion("datacbtyor") - .withModifiedDatetimeStart("datadamyumrobbaxnym").withModifiedDatetimeEnd("datag") - .withFileFilter("dataqmkakgw"); + FileServerReadSettings model = new FileServerReadSettings().withMaxConcurrentConnections("dataeylpofaogvmqzagr") + .withDisableMetricsCollection("dataqhwfskmkdr") + .withRecursive("datawsynt") + .withWildcardFolderPath("datarluqaqn") + .withWildcardFileName("datag") + .withFileListPath("datab") + .withEnablePartitionDiscovery("datajbozkl") + .withPartitionRootPath("dataifvpsmvk") + .withDeleteFilesAfterCompletion("datauw") + .withModifiedDatetimeStart("datanplqf") + .withModifiedDatetimeEnd("dataxfqmdjzgo") + .withFileFilter("datakp"); model = BinaryData.fromObject(model).toObject(FileServerReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerWriteSettingsTests.java index 566f913954399..64a5f1be4fde0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerWriteSettingsTests.java @@ -13,19 +13,16 @@ public final class FileServerWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FileServerWriteSettings model = BinaryData.fromString( - "{\"type\":\"FileServerWriteSettings\",\"maxConcurrentConnections\":\"dataebrvrhwqkfffvgbk\",\"disableMetricsCollection\":\"dataipy\",\"copyBehavior\":\"datahesbebvkmt\",\"metadata\":[{\"name\":\"datalkyvybljqgirpitz\",\"value\":\"datamxcukurkg\"},{\"name\":\"dataxqanrk\",\"value\":\"datadjfsvfbjcnad\"},{\"name\":\"databrntvhppykrlz\",\"value\":\"datalsvxpolatorjm\"},{\"name\":\"databnmuxlthyxryv\",\"value\":\"datazhsigddgbcnqv\"}],\"\":{\"lemzrw\":\"databffcvtij\",\"kmkwddgyqeni\":\"datagvgogczgcm\",\"rtcbvifcrnxst\":\"datarznam\"}}") + "{\"type\":\"vfiskkqspzwsxn\",\"maxConcurrentConnections\":\"datackpcssusdr\",\"disableMetricsCollection\":\"datammrzwm\",\"copyBehavior\":\"datatkcvolaxnuk\",\"metadata\":[{\"name\":\"datau\",\"value\":\"datadcqoxyxiyhmj\"}],\"\":{\"qiygbouv\":\"datawkezgva\"}}") .toObject(FileServerWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FileServerWriteSettings model = new FileServerWriteSettings() - .withMaxConcurrentConnections("dataebrvrhwqkfffvgbk").withDisableMetricsCollection("dataipy") - .withCopyBehavior("datahesbebvkmt") - .withMetadata(Arrays.asList(new MetadataItem().withName("datalkyvybljqgirpitz").withValue("datamxcukurkg"), - new MetadataItem().withName("dataxqanrk").withValue("datadjfsvfbjcnad"), - new MetadataItem().withName("databrntvhppykrlz").withValue("datalsvxpolatorjm"), - new MetadataItem().withName("databnmuxlthyxryv").withValue("datazhsigddgbcnqv"))); + FileServerWriteSettings model = new FileServerWriteSettings().withMaxConcurrentConnections("datackpcssusdr") + .withDisableMetricsCollection("datammrzwm") + .withCopyBehavior("datatkcvolaxnuk") + .withMetadata(Arrays.asList(new MetadataItem().withName("datau").withValue("datadcqoxyxiyhmj"))); model = BinaryData.fromObject(model).toObject(FileServerWriteSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTests.java index 3ce18274b2908..4b25a70252af5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTests.java @@ -21,34 +21,41 @@ public final class FileShareDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FileShareDataset model = BinaryData.fromString( - "{\"type\":\"FileShare\",\"typeProperties\":{\"folderPath\":\"dataqdonbzzs\",\"fileName\":\"datazyviiwsu\",\"modifiedDatetimeStart\":\"datazhw\",\"modifiedDatetimeEnd\":\"datauifkzqqhb\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"dataoilmkfbeoiipjpng\",\"deserializer\":\"datavuoikdlp\",\"\":{\"wimqnryclocfm\":\"datatug\",\"qhdxtwwulkryb\":\"dataswxvjelei\"}},\"fileFilter\":\"dataevy\",\"compression\":{\"type\":\"datayjecrqkwakkch\",\"level\":\"dataoulborcxuibsdqbd\",\"\":{\"bjqlqfbl\":\"datapectsmwpgweoq\"}}},\"description\":\"ufollcshjuc\",\"structure\":\"databymjjvtpne\",\"schema\":\"datavjeazrah\",\"linkedServiceName\":{\"referenceName\":\"lhbimyii\",\"parameters\":{\"dos\":\"datamcthtpqgf\"}},\"parameters\":{\"flgzh\":{\"type\":\"Bool\",\"defaultValue\":\"datau\"}},\"annotations\":[\"datagwahcrxo\"],\"folder\":{\"name\":\"u\"},\"\":{\"pmhz\":\"datapccxziv\",\"kvnnjdtujq\":\"datahh\",\"tqlfxolrwvtl\":\"datavhnjvpmxnhtmz\"}}") + "{\"type\":\"gsulwvgseufigvfj\",\"typeProperties\":{\"folderPath\":\"datancpr\",\"fileName\":\"datasjvjnkoiznz\",\"modifiedDatetimeStart\":\"databiba\",\"modifiedDatetimeEnd\":\"datagicovjt\",\"format\":{\"type\":\"rmjxyvuodnxc\",\"serializer\":\"dataassqfy\",\"deserializer\":\"datawppeygk\",\"\":{\"ttuvsq\":\"dataloasybxhqvovdpm\",\"qgxqbfkc\":\"datasrvjnqt\"}},\"fileFilter\":\"datancnr\",\"compression\":{\"type\":\"dataj\",\"level\":\"dataywevsfgdrmnszdo\",\"\":{\"ghndae\":\"datasqsvzvmxtc\"}}},\"description\":\"zkilmciwuh\",\"structure\":\"dataekypyovljl\",\"schema\":\"datadlbybpa\",\"linkedServiceName\":{\"referenceName\":\"hpzysovsnw\",\"parameters\":{\"ag\":\"datanzgesfhs\",\"zpbyfyvynpmggq\":\"dataahn\"}},\"parameters\":{\"bloejzsaxzgkq\":{\"type\":\"Float\",\"defaultValue\":\"datavqbugihcdvfoizo\"}},\"annotations\":[\"dataeppjnaphifkfrp\",\"datap\",\"dataubpebrm\"],\"folder\":{\"name\":\"fpghtbttpkim\"},\"\":{\"mhrcmelycpgoku\":\"datankkhbykr\",\"bnvmshfuzzlapy\":\"datahrvybn\",\"uptessjlwjta\":\"dataxlvzcgulaebxiauq\"}}") .toObject(FileShareDataset.class); - Assertions.assertEquals("ufollcshjuc", model.description()); - Assertions.assertEquals("lhbimyii", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("flgzh").type()); - Assertions.assertEquals("u", model.folder().name()); + Assertions.assertEquals("zkilmciwuh", model.description()); + Assertions.assertEquals("hpzysovsnw", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("bloejzsaxzgkq").type()); + Assertions.assertEquals("fpghtbttpkim", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FileShareDataset model = new FileShareDataset().withDescription("ufollcshjuc").withStructure("databymjjvtpne") - .withSchema("datavjeazrah") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lhbimyii") - .withParameters(mapOf("dos", "datamcthtpqgf"))) - .withParameters( - mapOf("flgzh", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datau"))) - .withAnnotations(Arrays.asList("datagwahcrxo")).withFolder(new DatasetFolder().withName("u")) - .withFolderPath("dataqdonbzzs").withFileName("datazyviiwsu").withModifiedDatetimeStart("datazhw") - .withModifiedDatetimeEnd("datauifkzqqhb") - .withFormat(new DatasetStorageFormat().withSerializer("dataoilmkfbeoiipjpng") - .withDeserializer("datavuoikdlp").withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withFileFilter("dataevy").withCompression(new DatasetCompression().withType("datayjecrqkwakkch") - .withLevel("dataoulborcxuibsdqbd").withAdditionalProperties(mapOf())); + FileShareDataset model = new FileShareDataset().withDescription("zkilmciwuh") + .withStructure("dataekypyovljl") + .withSchema("datadlbybpa") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hpzysovsnw") + .withParameters(mapOf("ag", "datanzgesfhs", "zpbyfyvynpmggq", "dataahn"))) + .withParameters(mapOf("bloejzsaxzgkq", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datavqbugihcdvfoizo"))) + .withAnnotations(Arrays.asList("dataeppjnaphifkfrp", "datap", "dataubpebrm")) + .withFolder(new DatasetFolder().withName("fpghtbttpkim")) + .withFolderPath("datancpr") + .withFileName("datasjvjnkoiznz") + .withModifiedDatetimeStart("databiba") + .withModifiedDatetimeEnd("datagicovjt") + .withFormat(new DatasetStorageFormat().withSerializer("dataassqfy") + .withDeserializer("datawppeygk") + .withAdditionalProperties(mapOf("type", "rmjxyvuodnxc"))) + .withFileFilter("datancnr") + .withCompression(new DatasetCompression().withType("dataj") + .withLevel("dataywevsfgdrmnszdo") + .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(FileShareDataset.class); - Assertions.assertEquals("ufollcshjuc", model.description()); - Assertions.assertEquals("lhbimyii", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("flgzh").type()); - Assertions.assertEquals("u", model.folder().name()); + Assertions.assertEquals("zkilmciwuh", model.description()); + Assertions.assertEquals("hpzysovsnw", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("bloejzsaxzgkq").type()); + Assertions.assertEquals("fpghtbttpkim", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTypePropertiesTests.java index 49409345ae8bd..7adc4e843e7b1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTypePropertiesTests.java @@ -15,19 +15,23 @@ public final class FileShareDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FileShareDatasetTypeProperties model = BinaryData.fromString( - "{\"folderPath\":\"datayfjswequf\",\"fileName\":\"datayyopoaytwwgw\",\"modifiedDatetimeStart\":\"datab\",\"modifiedDatetimeEnd\":\"databvufrkwjiemimdtn\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"dataewjskreeedddrftf\",\"deserializer\":\"dataulpclhsiige\",\"\":{\"nqyxfedq\":\"datab\",\"dqw\":\"datae\",\"zp\":\"datanxoqgv\",\"meyobqajejirvavr\":\"datagp\"}},\"fileFilter\":\"datagpogpl\",\"compression\":{\"type\":\"datauvlnhxnrnjhinaeg\",\"level\":\"databx\",\"\":{\"fhsovadkrmjxmwq\":\"dataqmjmoplukfyk\"}}}") + "{\"folderPath\":\"datavblskxg\",\"fileName\":\"dataa\",\"modifiedDatetimeStart\":\"dataaslkvcvwpvl\",\"modifiedDatetimeEnd\":\"databvyezj\",\"format\":{\"type\":\"qonbwhiieyoz\",\"serializer\":\"datacwf\",\"deserializer\":\"datacwnbcgqefgzjvbx\",\"\":{\"xtuuci\":\"datagoa\"}},\"fileFilter\":\"datavkdlhuduklbjoafm\",\"compression\":{\"type\":\"dataexulvoepknarseia\",\"level\":\"datasqoacb\",\"\":{\"szglvyakesz\":\"datagsapleqfgkxen\",\"aqcwggchxvlqgf\":\"datauuvu\",\"vphirlzbip\":\"datarvecica\"}}}") .toObject(FileShareDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FileShareDatasetTypeProperties model = new FileShareDatasetTypeProperties().withFolderPath("datayfjswequf") - .withFileName("datayyopoaytwwgw").withModifiedDatetimeStart("datab") - .withModifiedDatetimeEnd("databvufrkwjiemimdtn") - .withFormat(new DatasetStorageFormat().withSerializer("dataewjskreeedddrftf") - .withDeserializer("dataulpclhsiige").withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withFileFilter("datagpogpl").withCompression(new DatasetCompression().withType("datauvlnhxnrnjhinaeg") - .withLevel("databx").withAdditionalProperties(mapOf())); + FileShareDatasetTypeProperties model = new FileShareDatasetTypeProperties().withFolderPath("datavblskxg") + .withFileName("dataa") + .withModifiedDatetimeStart("dataaslkvcvwpvl") + .withModifiedDatetimeEnd("databvyezj") + .withFormat(new DatasetStorageFormat().withSerializer("datacwf") + .withDeserializer("datacwnbcgqefgzjvbx") + .withAdditionalProperties(mapOf("type", "qonbwhiieyoz"))) + .withFileFilter("datavkdlhuduklbjoafm") + .withCompression(new DatasetCompression().withType("dataexulvoepknarseia") + .withLevel("datasqoacb") + .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(FileShareDatasetTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSinkTests.java index e7743ebb39725..122f7d8366845 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSinkTests.java @@ -11,16 +11,19 @@ public final class FileSystemSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FileSystemSink model = BinaryData.fromString( - "{\"type\":\"FileSystemSink\",\"copyBehavior\":\"datatlmcaehjhwklf\",\"writeBatchSize\":\"dataqqgyp\",\"writeBatchTimeout\":\"datawejbngojnak\",\"sinkRetryCount\":\"dataytk\",\"sinkRetryWait\":\"datafo\",\"maxConcurrentConnections\":\"datafksormf\",\"disableMetricsCollection\":\"datauhwxmnrdfjobhr\",\"\":{\"pbdfrtasau\":\"dataeaupjmjig\",\"yrxyn\":\"dataxtoxlxojijtt\",\"nyciss\":\"datafs\"}}") + "{\"type\":\"bgohxb\",\"copyBehavior\":\"datacf\",\"writeBatchSize\":\"datazfpfxbqdrjunigxn\",\"writeBatchTimeout\":\"datanghgazdbvenv\",\"sinkRetryCount\":\"datati\",\"sinkRetryWait\":\"dataswrncwhlxvng\",\"maxConcurrentConnections\":\"datapydjdpapndmv\",\"disableMetricsCollection\":\"datadtvvta\",\"\":{\"kmfiudnpj\":\"datawkthmexidecdeh\",\"guv\":\"dataxfhtsgyyrg\",\"ytihhq\":\"datagqllgokznffqvtx\",\"rsnbdfamyolvgk\":\"datancwgrwgdpfzdygt\"}}") .toObject(FileSystemSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FileSystemSink model - = new FileSystemSink().withWriteBatchSize("dataqqgyp").withWriteBatchTimeout("datawejbngojnak") - .withSinkRetryCount("dataytk").withSinkRetryWait("datafo").withMaxConcurrentConnections("datafksormf") - .withDisableMetricsCollection("datauhwxmnrdfjobhr").withCopyBehavior("datatlmcaehjhwklf"); + FileSystemSink model = new FileSystemSink().withWriteBatchSize("datazfpfxbqdrjunigxn") + .withWriteBatchTimeout("datanghgazdbvenv") + .withSinkRetryCount("datati") + .withSinkRetryWait("dataswrncwhlxvng") + .withMaxConcurrentConnections("datapydjdpapndmv") + .withDisableMetricsCollection("datadtvvta") + .withCopyBehavior("datacf"); model = BinaryData.fromObject(model).toObject(FileSystemSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSourceTests.java index a9069259d1eb6..5516f38181f58 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSourceTests.java @@ -11,16 +11,18 @@ public final class FileSystemSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FileSystemSource model = BinaryData.fromString( - "{\"type\":\"FileSystemSource\",\"recursive\":\"datascjig\",\"additionalColumns\":\"datakdsvayyhtiy\",\"sourceRetryCount\":\"datahmniz\",\"sourceRetryWait\":\"databtehkytl\",\"maxConcurrentConnections\":\"datamyznwrcfqwkqul\",\"disableMetricsCollection\":\"dataovqohwiw\",\"\":{\"sjjjcd\":\"dataxjxlssosndnypx\",\"xb\":\"datasvgdbfni\",\"jgczpdio\":\"datasjhpm\",\"cwmabehr\":\"datadtjylimzvjwjhmtc\"}}") + "{\"type\":\"ssmaaxz\",\"recursive\":\"datafekstrmsbmdg\",\"additionalColumns\":\"datakeuplorn\",\"sourceRetryCount\":\"datalnvupi\",\"sourceRetryWait\":\"databzyhtbjyycaco\",\"maxConcurrentConnections\":\"datavoyltmxqalq\",\"disableMetricsCollection\":\"dataymjww\",\"\":{\"w\":\"datanefellhdsgogdu\",\"cbvuvwdp\":\"dataalt\"}}") .toObject(FileSystemSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FileSystemSource model - = new FileSystemSource().withSourceRetryCount("datahmniz").withSourceRetryWait("databtehkytl") - .withMaxConcurrentConnections("datamyznwrcfqwkqul").withDisableMetricsCollection("dataovqohwiw") - .withRecursive("datascjig").withAdditionalColumns("datakdsvayyhtiy"); + FileSystemSource model = new FileSystemSource().withSourceRetryCount("datalnvupi") + .withSourceRetryWait("databzyhtbjyycaco") + .withMaxConcurrentConnections("datavoyltmxqalq") + .withDisableMetricsCollection("dataymjww") + .withRecursive("datafekstrmsbmdg") + .withAdditionalColumns("datakeuplorn"); model = BinaryData.fromObject(model).toObject(FileSystemSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTests.java index 985d55ec6a182..51a63961c5a71 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTests.java @@ -21,48 +21,57 @@ public final class FilterActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FilterActivity model = BinaryData.fromString( - "{\"type\":\"Filter\",\"typeProperties\":{\"items\":{\"value\":\"nchyoimt\"},\"condition\":{\"value\":\"kjcdjswxek\"}},\"name\":\"hvccxuntghwcb\",\"description\":\"lgbyfcbc\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"zeukumlnfxboqvg\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Skipped\",\"Succeeded\"],\"\":{\"sznxz\":\"datacmuiqir\",\"akrbew\":\"datazbnqmxirspj\",\"g\":\"datazisdnbourw\",\"fecoufnxt\":\"datasdluquyxgmzyqftl\"}},{\"activity\":\"nusqza\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\"],\"\":{\"kwbwxcjf\":\"datay\",\"weguqzlmhpuqlsd\":\"datauzw\"}},{\"activity\":\"tejxlzyyylyx\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Succeeded\",\"Failed\"],\"\":{\"hogjaubpl\":\"dataychob\",\"ndi\":\"datazjglfrwymwujt\",\"tyqbtijyb\":\"datawycwplj\"}}],\"userProperties\":[{\"name\":\"gclppwdfxhz\",\"value\":\"datarsrgbfaq\"}],\"\":{\"burrevuz\":\"datakisipjgvm\",\"ffgconiydgnxs\":\"dataxuubwjopkldubqfb\"}}") + "{\"type\":\"epnlwuhtfaba\",\"typeProperties\":{\"items\":{\"value\":\"kdjm\"},\"condition\":{\"value\":\"l\"}},\"name\":\"klbkig\",\"description\":\"ugwb\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ttulhanj\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\"],\"\":{\"kaqbjncnbnlppuzb\":\"dataztfdujuoiienzsk\",\"azsayrkddpnob\":\"datap\",\"rtifx\":\"dataqnymds\",\"vkx\":\"datahu\"}},{\"activity\":\"novk\",\"dependencyConditions\":[\"Completed\",\"Failed\"],\"\":{\"gesbei\":\"dataixx\",\"hpjakt\":\"datapuuvzyfjmor\",\"jljfmhgdn\":\"datazrcirrph\",\"flswqeh\":\"datarwgddgpq\"}},{\"activity\":\"fr\",\"dependencyConditions\":[\"Skipped\",\"Completed\"],\"\":{\"evtykfxos\":\"datanbu\",\"uhfwklsthjvyk\":\"dataz\",\"rnkxswohshnc\":\"datawpgwpulrtjweuoro\",\"wdl\":\"datadzvlitntdidhhac\"}},{\"activity\":\"tgiontv\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"bgoe\":\"datahkqthuijvivtwswp\",\"fsfuzqpigirnmd\":\"datauxo\"}}],\"userProperties\":[{\"name\":\"agmw\",\"value\":\"datafxeuiuvtkll\"},{\"name\":\"fnnhertgqqjcyhvy\",\"value\":\"datahgeuvujywld\"},{\"name\":\"jayiexpcxy\",\"value\":\"dataquowunwactjpgwse\"}],\"\":{\"lrvpaumkz\":\"datazxgha\",\"jlmbpjna\":\"datatjngkfipxolp\",\"enth\":\"datasbxvouxc\",\"oc\":\"datapwrmevkc\"}}") .toObject(FilterActivity.class); - Assertions.assertEquals("hvccxuntghwcb", model.name()); - Assertions.assertEquals("lgbyfcbc", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("zeukumlnfxboqvg", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("gclppwdfxhz", model.userProperties().get(0).name()); - Assertions.assertEquals("nchyoimt", model.items().value()); - Assertions.assertEquals("kjcdjswxek", model.condition().value()); + Assertions.assertEquals("klbkig", model.name()); + Assertions.assertEquals("ugwb", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("ttulhanj", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("agmw", model.userProperties().get(0).name()); + Assertions.assertEquals("kdjm", model.items().value()); + Assertions.assertEquals("l", model.condition().value()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FilterActivity model = new FilterActivity().withName("hvccxuntghwcb").withDescription("lgbyfcbc") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("zeukumlnfxboqvg") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, - DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("nusqza") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("tejxlzyyylyx") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("gclppwdfxhz").withValue("datarsrgbfaq"))) - .withItems(new Expression().withValue("nchyoimt")).withCondition(new Expression().withValue("kjcdjswxek")); + FilterActivity model + = new FilterActivity().withName("klbkig") + .withDescription("ugwb") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("ttulhanj") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("novk") + .withDependencyConditions( + Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("fr") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("tgiontv") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("agmw").withValue("datafxeuiuvtkll"), + new UserProperty().withName("fnnhertgqqjcyhvy").withValue("datahgeuvujywld"), + new UserProperty().withName("jayiexpcxy").withValue("dataquowunwactjpgwse"))) + .withItems(new Expression().withValue("kdjm")) + .withCondition(new Expression().withValue("l")); model = BinaryData.fromObject(model).toObject(FilterActivity.class); - Assertions.assertEquals("hvccxuntghwcb", model.name()); - Assertions.assertEquals("lgbyfcbc", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("zeukumlnfxboqvg", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("gclppwdfxhz", model.userProperties().get(0).name()); - Assertions.assertEquals("nchyoimt", model.items().value()); - Assertions.assertEquals("kjcdjswxek", model.condition().value()); + Assertions.assertEquals("klbkig", model.name()); + Assertions.assertEquals("ugwb", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("ttulhanj", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("agmw", model.userProperties().get(0).name()); + Assertions.assertEquals("kdjm", model.items().value()); + Assertions.assertEquals("l", model.condition().value()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTypePropertiesTests.java index e4d9179fe217e..812bc93bc877c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTypePropertiesTests.java @@ -13,18 +13,19 @@ public final class FilterActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FilterActivityTypeProperties model - = BinaryData.fromString("{\"items\":{\"value\":\"yytnmhlank\"},\"condition\":{\"value\":\"s\"}}") + = BinaryData.fromString("{\"items\":{\"value\":\"xhlvfniry\"},\"condition\":{\"value\":\"larohwqxjhzw\"}}") .toObject(FilterActivityTypeProperties.class); - Assertions.assertEquals("yytnmhlank", model.items().value()); - Assertions.assertEquals("s", model.condition().value()); + Assertions.assertEquals("xhlvfniry", model.items().value()); + Assertions.assertEquals("larohwqxjhzw", model.condition().value()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FilterActivityTypeProperties model = new FilterActivityTypeProperties() - .withItems(new Expression().withValue("yytnmhlank")).withCondition(new Expression().withValue("s")); + FilterActivityTypeProperties model + = new FilterActivityTypeProperties().withItems(new Expression().withValue("xhlvfniry")) + .withCondition(new Expression().withValue("larohwqxjhzw")); model = BinaryData.fromObject(model).toObject(FilterActivityTypeProperties.class); - Assertions.assertEquals("yytnmhlank", model.items().value()); - Assertions.assertEquals("s", model.condition().value()); + Assertions.assertEquals("xhlvfniry", model.items().value()); + Assertions.assertEquals("larohwqxjhzw", model.condition().value()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTests.java index af16a8b74686e..d27ae41a9eaa2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTests.java @@ -23,175 +23,205 @@ public final class FlowletTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { Flowlet model = BinaryData.fromString( - "{\"type\":\"Flowlet\",\"typeProperties\":{\"sources\":[{\"schemaLinkedService\":{\"referenceName\":\"pnw\",\"parameters\":{\"ffffg\":\"datafvpctfji\",\"ejjk\":\"datauhznwhvuldbk\",\"azmxjqi\":\"dataigaw\"}},\"name\":\"h\",\"description\":\"jsbcml\",\"dataset\":{\"referenceName\":\"ahz\",\"parameters\":{\"hmojusuzg\":\"dataroolkolir\",\"aaxoialahfxwcc\":\"datajzc\",\"kczynuhhoqeqsh\":\"datakdxkuk\",\"q\":\"datavl\"}},\"linkedService\":{\"referenceName\":\"yrqolnthbbnkgz\",\"parameters\":{\"eyjncjmlfuy\":\"datadrnzkjthf\",\"rufzcqyjmq\":\"datajbpfiddh\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"iocuselqkr\",\"datasetParameters\":\"datazrhxuddqmdtf\",\"parameters\":{\"khmwdmd\":\"datajmr\",\"okwtjawhvagnqfqq\":\"datagyqi\"},\"\":{\"chtvsnvlaqd\":\"datavmyolcaym\",\"zawatuwqkokbc\":\"dataz\",\"msn\":\"dataothymgobl\",\"aaneakhtmhobcya\":\"datagwi\"}}}],\"sinks\":[{\"schemaLinkedService\":{\"referenceName\":\"qtvkh\",\"parameters\":{\"ymhcctopuo\":\"dataogxkfnaoa\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"rnskby\",\"parameters\":{\"xqnwhscoz\":\"datahczygxvhajpxe\",\"ljfewxqo\":\"datawmvgxsmpknpwir\"}},\"name\":\"oxudnmckap\",\"description\":\"knq\",\"dataset\":{\"referenceName\":\"jgencdgmoque\",\"parameters\":{\"ltjouwhldxwh\":\"datakkyo\",\"q\":\"dataepr\",\"cvprst\":\"datasmfx\"}},\"linkedService\":{\"referenceName\":\"itbfjtdy\",\"parameters\":{\"etjt\":\"dataplfacqoccqrqx\",\"oadtxopgehpadkmd\":\"datarhutf\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"szxvctkbbxuhar\",\"datasetParameters\":\"datair\",\"parameters\":{\"bmyqjog\":\"datalabvoyngsuxxc\",\"rntu\":\"datadsaidjanormovdxx\"},\"\":{\"nwemhdeeljslkyo\":\"datail\",\"fzjuegrhrhtsl\":\"datad\",\"j\":\"datajtv\"}}},{\"schemaLinkedService\":{\"referenceName\":\"vgjbfio\",\"parameters\":{\"cbjqqwmtqsm\":\"datajod\",\"cywnfyszza\":\"dataxsazuxejgw\",\"ozsyvrm\":\"datazsinqbdnddb\",\"eeih\":\"datajmyitrchwudl\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"mnoejhqlfmsib\",\"parameters\":{\"mypgfqvmty\":\"datarfgxkyd\",\"kxp\":\"datahl\"}},\"name\":\"jpewpyjlfx\",\"description\":\"pqcrzgeuqxbpiat\",\"dataset\":{\"referenceName\":\"aujegqdtadra\",\"parameters\":{\"gsq\":\"datadhjkrukizy\",\"qfpjb\":\"datanqskt\"}},\"linkedService\":{\"referenceName\":\"gweeiwd\",\"parameters\":{\"gbfzu\":\"datan\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"tunmlhxd\",\"datasetParameters\":\"dataklciichgjsysm\",\"parameters\":{\"bdujgcwxvecbb\":\"datadgwxfkzsifcu\"},\"\":{\"kpgdqxwabzrwiq\":\"datardxrizagbbgiarks\",\"kifmmainw\":\"dataxhaclcdosqkptjq\"}}},{\"schemaLinkedService\":{\"referenceName\":\"d\",\"parameters\":{\"gvydjufbnklblaxp\":\"databqwuntobuizazzel\",\"lfdxaglz\":\"datagjwdab\",\"siflikyypzkgxf\":\"dataytlbtlqhopxouvm\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"myrqsdbpokszan\",\"parameters\":{\"kirkskw\":\"datagpterdiu\",\"olzkgys\":\"datatsdetjygowifcq\",\"zoxlvoc\":\"datagzyy\"}},\"name\":\"tvdxxhe\",\"description\":\"mlil\",\"dataset\":{\"referenceName\":\"ghjhjvmabzzbwa\",\"parameters\":{\"apr\":\"datamdafbgymqt\",\"neychbjizq\":\"dataojxrjnbsconxavi\",\"rfbo\":\"datasgnwdxzedpq\",\"mlnfyz\":\"dataxi\"}},\"linkedService\":{\"referenceName\":\"frbypi\",\"parameters\":{\"aq\":\"datakpdj\",\"dgonjhxshthmgp\":\"datasmqaz\",\"pxtzhigqqbtimpk\":\"datazqulptkbv\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"o\",\"datasetParameters\":\"datas\",\"parameters\":{\"jakx\":\"datahudsmusuaa\",\"vqban\":\"datajnfczmnniixy\",\"gm\":\"datasjtgirnbgmgmddo\",\"yxwe\":\"datanltwmpftmfoeajog\"},\"\":{\"hdidrmuhkahmjedb\":\"datafddrvlkpzwbhnrec\"}}}],\"transformations\":[{\"name\":\"vkhhwm\",\"description\":\"jbweunxcqr\",\"dataset\":{\"referenceName\":\"hu\",\"parameters\":{\"gnzuzpbgkzcsc\":\"datahppiybx\",\"ti\":\"dataiuzvkunhdimju\"}},\"linkedService\":{\"referenceName\":\"kaugpucdocfqplwg\",\"parameters\":{\"jlvzklk\":\"datahxw\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ikyjtkakvlb\",\"datasetParameters\":\"datahjvpzaptu\",\"parameters\":{\"fgcdiykkcxw\":\"dataaoizjix\",\"dmuqohhi\":\"dataujvqynvavit\",\"ddrwjcljbrhlhpvz\":\"dataraxq\"},\"\":{\"fhxrzfr\":\"datawennin\",\"rcqxgcbvzarmqc\":\"datavztiucwviqllukh\",\"stsinvag\":\"datapo\"}}},{\"name\":\"vjyhdrxbrdvc\",\"description\":\"qwh\",\"dataset\":{\"referenceName\":\"xnmxgnmguzb\",\"parameters\":{\"bkbdhlltqstqkqs\":\"dataorbalkj\",\"eubanlxunpqcc\":\"datagxiynecovagzk\",\"klaslga\":\"dataqiawzl\"}},\"linkedService\":{\"referenceName\":\"zuxlrarwpewsau\",\"parameters\":{\"ytnkqb\":\"datajtighsxj\",\"mehjnhjioti\":\"datalahovuuwx\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"bcngkegxc\",\"datasetParameters\":\"dataxbbfetwil\",\"parameters\":{\"frolq\":\"dataoxpdxq\"},\"\":{\"jew\":\"datakiu\",\"tnlmsoodtmvecdhd\":\"dataahwkxjjm\",\"zxvlgsrgkrfizrp\":\"dataswcrptveaj\"}}},{\"name\":\"wlp\",\"description\":\"uqhrlmcskykp\",\"dataset\":{\"referenceName\":\"ofix\",\"parameters\":{\"kkpyycpaw\":\"datacf\",\"cfpcfjfwzlgz\":\"datapjprdpwr\"}},\"linkedService\":{\"referenceName\":\"kgyepe\",\"parameters\":{\"rntmkctdhu\":\"datannidmdiawpzxk\",\"hqodv\":\"datasgwqpsqaz\",\"ti\":\"datagcnbhcbmjk\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ynts\",\"datasetParameters\":\"datamfmeftvhkmoo\",\"parameters\":{\"gmjgrul\":\"datahskb\"},\"\":{\"z\":\"datagxhcxnwjtpfdzxco\",\"k\":\"datawofw\"}}},{\"name\":\"kzkdtzxsoednlwg\",\"description\":\"hezomucmqgisnion\",\"dataset\":{\"referenceName\":\"bzdrdpuenxkgt\",\"parameters\":{\"hzkbnbmx\":\"datamtrlxczn\",\"itoqcahfsg\":\"dataxmwtygeqzu\",\"lisolntfxxc\":\"datajmlreesrfwsszvlc\"}},\"linkedService\":{\"referenceName\":\"mipfjw\",\"parameters\":{\"nvgskjtoxjd\":\"datagizmshxxbaizabu\",\"xqqm\":\"datajsjznv\",\"aydhf\":\"datai\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"c\",\"datasetParameters\":\"datatfshksnyzm\",\"parameters\":{\"iqdktwtkvih\":\"datamwbwmbnlslce\",\"nguuzhwvla\":\"datapfliwo\",\"mhjhaus\":\"datap\",\"ekymffztsilscvqs\":\"datab\"},\"\":{\"fymkouih\":\"datai\",\"zhogsmgbvmtdw\":\"dataeseuugci\",\"jnfveg\":\"dataqbe\"}}}],\"script\":\"btvkbi\",\"scriptLines\":[\"htfgficudyhizpac\",\"muhbcakznho\"]},\"description\":\"oitwhrjsdmmazdnc\",\"annotations\":[\"datab\"],\"folder\":{\"name\":\"lhzqpxzbawkikcdg\"}}") + "{\"type\":\"bmptrwtxzu\",\"typeProperties\":{\"sources\":[{\"schemaLinkedService\":{\"referenceName\":\"wnf\",\"parameters\":{\"zsinqbdnddb\":\"dataza\",\"jmyitrchwudl\":\"dataozsyvrm\",\"pmnoejhqlf\":\"dataeeih\"}},\"name\":\"sibz\",\"description\":\"rfgxkyd\",\"dataset\":{\"referenceName\":\"yp\",\"parameters\":{\"jpewpyjlfx\":\"datavmtywhlakxp\",\"qcrzgeuqxbpia\":\"datam\",\"aujegqdtadra\":\"dataw\"}},\"linkedService\":{\"referenceName\":\"ddhjkr\",\"parameters\":{\"fpjbqggwe\":\"datazyhgsqtnqsktx\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"dhd\",\"datasetParameters\":\"datacgbfzuscstun\",\"parameters\":{\"ysmvxodgwxf\":\"dataxdfbklciichgj\",\"ec\":\"datazsifcuvbdujgcwx\"},\"\":{\"zagbbgiarksykp\":\"datajtrdxr\",\"cdosqkptj\":\"datadqxwabzrwiqrxhac\",\"d\":\"datagkifmmainwh\"}}},{\"schemaLinkedService\":{\"referenceName\":\"pbqwuntobu\",\"parameters\":{\"lw\":\"datazz\"}},\"name\":\"vydjufbnk\",\"description\":\"laxp\",\"dataset\":{\"referenceName\":\"jwdabalfd\",\"parameters\":{\"ytlbtlqhopxouvm\":\"datalz\",\"fmy\":\"datasiflikyypzkgxf\"}},\"linkedService\":{\"referenceName\":\"sdbpokszanmh\",\"parameters\":{\"detjygowi\":\"dataterdiuwkirkskwzt\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"polzkgysdgzyy\",\"datasetParameters\":\"dataoxlvocptvdxxheig\",\"parameters\":{\"hjvmabzzbwaybfm\":\"datalwzgh\",\"preojxrjnbsconxa\":\"dataafbgymqtn\"},\"\":{\"sgnwdxzedpq\":\"dataneychbjizq\"}}},{\"schemaLinkedService\":{\"referenceName\":\"fb\",\"parameters\":{\"bypicdbkpdj\":\"dataipmlnfyzavf\"}},\"name\":\"aq\",\"description\":\"mqazpdgonjh\",\"dataset\":{\"referenceName\":\"h\",\"parameters\":{\"vcpxtzhigqqbtimp\":\"datagpczqulptk\",\"rnsihqhudsmus\":\"datajbl\",\"xwjnfcz\":\"dataaawja\",\"os\":\"datanniixyxvqba\"}},\"linkedService\":{\"referenceName\":\"gir\",\"parameters\":{\"dorgmynltw\":\"datamgm\",\"tmfoeajogsyxwet\":\"datap\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ddrvlkpzwbhnrecc\",\"datasetParameters\":\"dataid\",\"parameters\":{\"wmjpjbweunxcqrr\":\"datahkahmjedbiucvkh\",\"oihppi\":\"datahu\"},\"\":{\"iuzvkunhdimju\":\"datavgnzuzpbgkzcsc\",\"zkaugpucdocfqpl\":\"datati\",\"klkvb\":\"datagofmhxwrjlv\",\"shjvpzaptuoskaoi\":\"dataikyjtkakvlb\"}}}],\"sinks\":[{\"schemaLinkedService\":{\"referenceName\":\"wfgcdiykkcxwn\",\"parameters\":{\"qohhihra\":\"dataqynvavitmdm\",\"rhlhpvzadbwenni\":\"dataquddrwjclj\",\"viqlluk\":\"dataafhxrzfrmvztiuc\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"rcqxgcbvzarmqc\",\"parameters\":{\"invagovjyhd\":\"datakst\",\"txnmxgnmguz\":\"dataxbrdvcehqwh\",\"kbd\":\"datauwvorbalkjn\",\"gxiynecovagzk\":\"datalltqstqkqs\"}},\"name\":\"eubanlxunpqcc\",\"description\":\"iawzlzklaslgac\",\"dataset\":{\"referenceName\":\"uxlrarwpe\",\"parameters\":{\"tnkqbala\":\"dataudoejtighsxjp\",\"wxhmehjnhjiot\":\"dataovu\"}},\"linkedService\":{\"referenceName\":\"fbbcngkegxcypxbb\",\"parameters\":{\"lyr\":\"dataw\",\"frolq\":\"dataoxpdxq\",\"u\":\"datawnk\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"wnah\",\"datasetParameters\":\"dataxjjmztnlmsoodtm\",\"parameters\":{\"zxvlgsrgkrfizrp\":\"datadhdyswcrptveaj\",\"yuqh\":\"datawlp\",\"fuofixcnpcfykkp\":\"datalmcskyk\"},\"\":{\"rpcfpcfjfwzl\":\"datapawmpjprdp\",\"wk\":\"dataz\"}}},{\"schemaLinkedService\":{\"referenceName\":\"e\",\"parameters\":{\"mdiawpzx\":\"dataamnni\",\"ntmkctdhuosgwqps\":\"dataz\",\"dvqgcnbhcbmjkz\":\"dataazihq\",\"jmfmeftvhkm\":\"dataibniynts\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"gjrhs\",\"parameters\":{\"fogxhcxnw\":\"datagmjgrul\"}},\"name\":\"tpfdzxcouzfwofwa\",\"description\":\"kzkdtzxsoednlwg\",\"dataset\":{\"referenceName\":\"hezomucmqgisnion\",\"parameters\":{\"uenxkgtlzlmt\":\"datazdrd\",\"zkbnbmxl\":\"datalxcznn\",\"g\":\"datamwt\",\"ahfsgb\":\"dataqzusitoq\"}},\"linkedService\":{\"referenceName\":\"lreesrfwsszvlcwl\",\"parameters\":{\"ipfjwfoygizmshx\":\"datalntfxxcrq\",\"jdzjsjzn\":\"databaizabulnvgskjto\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"qqmqipaydhfn\",\"datasetParameters\":\"datac\",\"parameters\":{\"m\":\"datafshksnyzmspamwb\",\"qdktwtkvihlp\":\"datanlslcef\",\"zhwv\":\"dataliwoyngu\"},\"\":{\"mhjhaus\":\"datap\",\"ekymffztsilscvqs\":\"datab\"}}}],\"transformations\":[{\"name\":\"i\",\"description\":\"ymkouih\",\"dataset\":{\"referenceName\":\"seuugcig\",\"parameters\":{\"mt\":\"datagsmgb\",\"glab\":\"datawrqbebjnfv\",\"nhtfgfi\":\"datavkbiw\"}},\"linkedService\":{\"referenceName\":\"dyhi\",\"parameters\":{\"znhokhoitwhrjsd\":\"dataczmuhbca\",\"zqpxzbawkikcd\":\"datamazdnckidbjpgl\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"bssdpjeyoqxd\",\"datasetParameters\":\"dataecf\",\"parameters\":{\"rshzzbgullcxiqqz\":\"dataagxsurej\",\"ouigdmfivjqte\":\"datakoxdupnamgl\"},\"\":{\"qirvtk\":\"dataqigdydkghpcvr\"}}},{\"name\":\"yhhmvfxl\",\"description\":\"jajodmkrr\",\"dataset\":{\"referenceName\":\"pgqvqo\",\"parameters\":{\"abwlyvx\":\"dataujqgi\",\"aeuhwwsknstvz\":\"datahpqvcts\"}},\"linkedService\":{\"referenceName\":\"hasupmlppdpgzvz\",\"parameters\":{\"ruatsyiysjqhen\":\"datavbkarkptgon\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"eq\",\"datasetParameters\":\"datauba\",\"parameters\":{\"exmnv\":\"datadeayscseydzje\",\"xlniwmcpm\":\"datavmuw\",\"eqjzm\":\"datardlhvdvmiphbe\"},\"\":{\"czfmunerkelux\":\"dataclacr\",\"uzudlevzskejcg\":\"datashxzezb\",\"ard\":\"datafsgqkstyecupyuij\",\"c\":\"datavs\"}}},{\"name\":\"mazpzdqwuzvcmc\",\"description\":\"xizekuvfrjwucao\",\"dataset\":{\"referenceName\":\"vajbvbnkrdemdid\",\"parameters\":{\"damisvpztdi\":\"datazidgzwd\",\"j\":\"dataykpxkqejt\",\"toiboancdr\":\"dataojiunrlshxuknsyk\"}},\"linkedService\":{\"referenceName\":\"anvxuldxonckb\",\"parameters\":{\"weghlwwbogvgfk\":\"datalfxlupibaqzizxzp\",\"fvdstrkzxsgt\":\"dataqiyndveqels\",\"lr\":\"datans\",\"y\":\"datasmovpi\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"oxaxnrqaqotn\",\"datasetParameters\":\"dataxolousdv\",\"parameters\":{\"wz\":\"datatqm\",\"qgkujds\":\"datadrpizfulgyctsdb\",\"eurbtigapdyarik\":\"dataoxrqw\",\"kfqbriqulwwt\":\"dataejdpdfhtwm\"},\"\":{\"h\":\"dataeqkvyhzokpoyu\",\"phmpoejnglpwsada\":\"dataensnaa\"}}}],\"script\":\"sumxpezcoioyj\",\"scriptLines\":[\"qzwqdnxke\",\"dcnwmywxfq\",\"kvemyzdpczaqpqi\"]},\"description\":\"amonatnizex\",\"annotations\":[\"datasqjghrmth\",\"dataplwsttxsrgxfqp\",\"dataniceovxgzwh\",\"dataxyrujmt\"],\"folder\":{\"name\":\"s\"}}") .toObject(Flowlet.class); - Assertions.assertEquals("oitwhrjsdmmazdnc", model.description()); - Assertions.assertEquals("lhzqpxzbawkikcdg", model.folder().name()); - Assertions.assertEquals("h", model.sources().get(0).name()); - Assertions.assertEquals("jsbcml", model.sources().get(0).description()); - Assertions.assertEquals("ahz", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("yrqolnthbbnkgz", model.sources().get(0).linkedService().referenceName()); + Assertions.assertEquals("amonatnizex", model.description()); + Assertions.assertEquals("s", model.folder().name()); + Assertions.assertEquals("sibz", model.sources().get(0).name()); + Assertions.assertEquals("rfgxkyd", model.sources().get(0).description()); + Assertions.assertEquals("yp", model.sources().get(0).dataset().referenceName()); + Assertions.assertEquals("ddhjkr", model.sources().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("iocuselqkr", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("pnw", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("oxudnmckap", model.sinks().get(0).name()); - Assertions.assertEquals("knq", model.sinks().get(0).description()); - Assertions.assertEquals("jgencdgmoque", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("itbfjtdy", model.sinks().get(0).linkedService().referenceName()); + Assertions.assertEquals("dhd", model.sources().get(0).flowlet().referenceName()); + Assertions.assertEquals("wnf", model.sources().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("eubanlxunpqcc", model.sinks().get(0).name()); + Assertions.assertEquals("iawzlzklaslgac", model.sinks().get(0).description()); + Assertions.assertEquals("uxlrarwpe", model.sinks().get(0).dataset().referenceName()); + Assertions.assertEquals("fbbcngkegxcypxbb", model.sinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("szxvctkbbxuhar", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("qtvkh", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("rnskby", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("vkhhwm", model.transformations().get(0).name()); - Assertions.assertEquals("jbweunxcqr", model.transformations().get(0).description()); - Assertions.assertEquals("hu", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("kaugpucdocfqplwg", model.transformations().get(0).linkedService().referenceName()); + Assertions.assertEquals("wnah", model.sinks().get(0).flowlet().referenceName()); + Assertions.assertEquals("wfgcdiykkcxwn", model.sinks().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("rcqxgcbvzarmqc", model.sinks().get(0).rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("i", model.transformations().get(0).name()); + Assertions.assertEquals("ymkouih", model.transformations().get(0).description()); + Assertions.assertEquals("seuugcig", model.transformations().get(0).dataset().referenceName()); + Assertions.assertEquals("dyhi", model.transformations().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("ikyjtkakvlb", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("btvkbi", model.script()); - Assertions.assertEquals("htfgficudyhizpac", model.scriptLines().get(0)); + Assertions.assertEquals("bssdpjeyoqxd", model.transformations().get(0).flowlet().referenceName()); + Assertions.assertEquals("sumxpezcoioyj", model.script()); + Assertions.assertEquals("qzwqdnxke", model.scriptLines().get(0)); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { Flowlet model - = new Flowlet().withDescription("oitwhrjsdmmazdnc").withAnnotations(Arrays.asList("datab")) - .withFolder(new DataFlowFolder().withName("lhzqpxzbawkikcdg")) - .withSources(Arrays.asList(new DataFlowSource().withName("h").withDescription("jsbcml") - .withDataset(new DatasetReference().withReferenceName("ahz") - .withParameters(mapOf("hmojusuzg", "dataroolkolir", "aaxoialahfxwcc", "datajzc", - "kczynuhhoqeqsh", "datakdxkuk", "q", "datavl"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("yrqolnthbbnkgz") - .withParameters(mapOf("eyjncjmlfuy", "datadrnzkjthf", "rufzcqyjmq", "datajbpfiddh"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("iocuselqkr").withDatasetParameters("datazrhxuddqmdtf") - .withParameters(mapOf("khmwdmd", "datajmr", "okwtjawhvagnqfqq", "datagyqi")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService( - new LinkedServiceReference() - .withReferenceName( - "pnw") - .withParameters(mapOf("ffffg", "datafvpctfji", "ejjk", "datauhznwhvuldbk", "azmxjqi", - "dataigaw"))))) + = new Flowlet().withDescription("amonatnizex") + .withAnnotations(Arrays.asList("datasqjghrmth", "dataplwsttxsrgxfqp", "dataniceovxgzwh", "dataxyrujmt")) + .withFolder(new DataFlowFolder().withName("s")) + .withSources( + Arrays + .asList( + new DataFlowSource().withName("sibz") + .withDescription("rfgxkyd") + .withDataset(new DatasetReference().withReferenceName("yp") + .withParameters(mapOf("jpewpyjlfx", "datavmtywhlakxp", "qcrzgeuqxbpia", "datam", + "aujegqdtadra", "dataw"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("ddhjkr") + .withParameters(mapOf("fpjbqggwe", "datazyhgsqtnqsktx"))) + .withFlowlet( + new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("dhd") + .withDatasetParameters("datacgbfzuscstun") + .withParameters( + mapOf("ysmvxodgwxf", "dataxdfbklciichgj", "ec", "datazsifcuvbdujgcwx")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("wnf") + .withParameters(mapOf("zsinqbdnddb", "dataza", "jmyitrchwudl", "dataozsyvrm", + "pmnoejhqlf", "dataeeih"))), + new DataFlowSource().withName("vydjufbnk") + .withDescription("laxp") + .withDataset(new DatasetReference().withReferenceName("jwdabalfd") + .withParameters(mapOf("ytlbtlqhopxouvm", "datalz", "fmy", "datasiflikyypzkgxf"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("sdbpokszanmh") + .withParameters(mapOf("detjygowi", "dataterdiuwkirkskwzt"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("polzkgysdgzyy") + .withDatasetParameters("dataoxlvocptvdxxheig") + .withParameters(mapOf("hjvmabzzbwaybfm", "datalwzgh", "preojxrjnbsconxa", + "dataafbgymqtn")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("pbqwuntobu") + .withParameters(mapOf("lw", "datazz"))), + new DataFlowSource().withName("aq") + .withDescription("mqazpdgonjh") + .withDataset(new DatasetReference().withReferenceName("h") + .withParameters(mapOf("vcpxtzhigqqbtimp", "datagpczqulptk", "rnsihqhudsmus", + "datajbl", "xwjnfcz", "dataaawja", "os", "datanniixyxvqba"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("gir") + .withParameters(mapOf("dorgmynltw", "datamgm", "tmfoeajogsyxwet", "datap"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("ddrvlkpzwbhnrecc") + .withDatasetParameters("dataid") + .withParameters(mapOf("wmjpjbweunxcqrr", "datahkahmjedbiucvkh", "oihppi", "datahu")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("fb") + .withParameters(mapOf("bypicdbkpdj", "dataipmlnfyzavf"))))) .withSinks( - Arrays.asList( - new DataFlowSink().withName("oxudnmckap").withDescription("knq") - .withDataset(new DatasetReference().withReferenceName("jgencdgmoque").withParameters( - mapOf("ltjouwhldxwh", "datakkyo", "q", "dataepr", "cvprst", "datasmfx"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("itbfjtdy") - .withParameters(mapOf("etjt", "dataplfacqoccqrqx", "oadtxopgehpadkmd", "datarhutf"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("szxvctkbbxuhar").withDatasetParameters("datair") - .withParameters(mapOf("bmyqjog", "datalabvoyngsuxxc", "rntu", "datadsaidjanormovdxx")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("qtvkh") - .withParameters(mapOf("ymhcctopuo", "dataogxkfnaoa"))) - .withRejectedDataLinkedService( - new LinkedServiceReference() - .withReferenceName("rnskby").withParameters( - mapOf("xqnwhscoz", "datahczygxvhajpxe", "ljfewxqo", "datawmvgxsmpknpwir"))), - new DataFlowSink().withName("jpewpyjlfx").withDescription("pqcrzgeuqxbpiat") - .withDataset(new DatasetReference().withReferenceName("aujegqdtadra") - .withParameters(mapOf("gsq", "datadhjkrukizy", "qfpjb", "datanqskt"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("gweeiwd") - .withParameters(mapOf("gbfzu", "datan"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("tunmlhxd").withDatasetParameters("dataklciichgjsysm") - .withParameters(mapOf("bdujgcwxvecbb", "datadgwxfkzsifcu")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("vgjbfio") - .withParameters(mapOf("cbjqqwmtqsm", "datajod", "cywnfyszza", "dataxsazuxejgw", - "ozsyvrm", "datazsinqbdnddb", "eeih", "datajmyitrchwudl"))) - .withRejectedDataLinkedService(new LinkedServiceReference() - .withReferenceName("mnoejhqlfmsib") - .withParameters(mapOf("mypgfqvmty", "datarfgxkyd", "kxp", "datahl"))), - new DataFlowSink().withName("tvdxxhe").withDescription("mlil") - .withDataset(new DatasetReference().withReferenceName("ghjhjvmabzzbwa") - .withParameters(mapOf("apr", "datamdafbgymqt", "neychbjizq", "dataojxrjnbsconxavi", - "rfbo", "datasgnwdxzedpq", "mlnfyz", "dataxi"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("frbypi") - .withParameters(mapOf("aq", "datakpdj", "dgonjhxshthmgp", "datasmqaz", - "pxtzhigqqbtimpk", "datazqulptkbv"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("o").withDatasetParameters("datas") - .withParameters(mapOf("jakx", "datahudsmusuaa", "vqban", "datajnfczmnniixy", "gm", - "datasjtgirnbgmgmddo", "yxwe", "datanltwmpftmfoeajog")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("d") - .withParameters(mapOf("gvydjufbnklblaxp", "databqwuntobuizazzel", "lfdxaglz", - "datagjwdab", "siflikyypzkgxf", "dataytlbtlqhopxouvm"))) - .withRejectedDataLinkedService( - new LinkedServiceReference().withReferenceName("myrqsdbpokszan") - .withParameters(mapOf("kirkskw", "datagpterdiu", "olzkgys", "datatsdetjygowifcq", - "zoxlvoc", "datagzyy"))))) + Arrays + .asList( + new DataFlowSink().withName("eubanlxunpqcc") + .withDescription("iawzlzklaslgac") + .withDataset(new DatasetReference().withReferenceName("uxlrarwpe") + .withParameters(mapOf("tnkqbala", "dataudoejtighsxjp", "wxhmehjnhjiot", "dataovu"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("fbbcngkegxcypxbb") + .withParameters(mapOf("lyr", "dataw", "frolq", "dataoxpdxq", "u", "datawnk"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("wnah") + .withDatasetParameters("dataxjjmztnlmsoodtm") + .withParameters(mapOf("zxvlgsrgkrfizrp", "datadhdyswcrptveaj", "yuqh", "datawlp", + "fuofixcnpcfykkp", "datalmcskyk")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference() + .withReferenceName("wfgcdiykkcxwn") + .withParameters( + mapOf( + "qohhihra", "dataqynvavitmdm", "rhlhpvzadbwenni", "dataquddrwjclj", + "viqlluk", "dataafhxrzfrmvztiuc"))) + .withRejectedDataLinkedService( + new LinkedServiceReference().withReferenceName("rcqxgcbvzarmqc") + .withParameters( + mapOf("invagovjyhd", "datakst", "txnmxgnmguz", "dataxbrdvcehqwh", "kbd", + "datauwvorbalkjn", "gxiynecovagzk", "datalltqstqkqs"))), + new DataFlowSink().withName("tpfdzxcouzfwofwa") + .withDescription("kzkdtzxsoednlwg") + .withDataset(new DatasetReference().withReferenceName("hezomucmqgisnion") + .withParameters(mapOf("uenxkgtlzlmt", "datazdrd", "zkbnbmxl", "datalxcznn", "g", + "datamwt", "ahfsgb", "dataqzusitoq"))) + .withLinkedService( + new LinkedServiceReference().withReferenceName("lreesrfwsszvlcwl") + .withParameters(mapOf("ipfjwfoygizmshx", "datalntfxxcrq", "jdzjsjzn", + "databaizabulnvgskjto"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("qqmqipaydhfn") + .withDatasetParameters("datac") + .withParameters(mapOf("m", "datafshksnyzmspamwb", "qdktwtkvihlp", "datanlslcef", + "zhwv", "dataliwoyngu")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("e") + .withParameters(mapOf("mdiawpzx", "dataamnni", "ntmkctdhuosgwqps", "dataz", + "dvqgcnbhcbmjkz", "dataazihq", "jmfmeftvhkm", "dataibniynts"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("gjrhs") + .withParameters(mapOf("fogxhcxnw", "datagmjgrul"))))) .withTransformations(Arrays.asList( - new Transformation().withName("vkhhwm").withDescription("jbweunxcqr") - .withDataset(new DatasetReference().withReferenceName("hu") - .withParameters(mapOf("gnzuzpbgkzcsc", "datahppiybx", "ti", "dataiuzvkunhdimju"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("kaugpucdocfqplwg") - .withParameters(mapOf("jlvzklk", "datahxw"))) + new Transformation().withName("i") + .withDescription("ymkouih") + .withDataset(new DatasetReference().withReferenceName("seuugcig") + .withParameters(mapOf("mt", "datagsmgb", "glab", "datawrqbebjnfv", "nhtfgfi", "datavkbiw"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("dyhi") + .withParameters( + mapOf("znhokhoitwhrjsd", "dataczmuhbca", "zqpxzbawkikcd", "datamazdnckidbjpgl"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ikyjtkakvlb").withDatasetParameters("datahjvpzaptu") - .withParameters(mapOf("fgcdiykkcxw", "dataaoizjix", "dmuqohhi", "dataujvqynvavit", - "ddrwjcljbrhlhpvz", "dataraxq")) + .withReferenceName("bssdpjeyoqxd") + .withDatasetParameters("dataecf") + .withParameters( + mapOf("rshzzbgullcxiqqz", "dataagxsurej", "ouigdmfivjqte", "datakoxdupnamgl")) .withAdditionalProperties(mapOf())), - new Transformation().withName("vjyhdrxbrdvc").withDescription("qwh") - .withDataset(new DatasetReference().withReferenceName("xnmxgnmguzb") - .withParameters(mapOf("bkbdhlltqstqkqs", "dataorbalkj", "eubanlxunpqcc", - "datagxiynecovagzk", "klaslga", "dataqiawzl"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("zuxlrarwpewsau") - .withParameters(mapOf("ytnkqb", "datajtighsxj", "mehjnhjioti", "datalahovuuwx"))) + new Transformation().withName("yhhmvfxl") + .withDescription("jajodmkrr") + .withDataset(new DatasetReference().withReferenceName("pgqvqo") + .withParameters(mapOf("abwlyvx", "dataujqgi", "aeuhwwsknstvz", "datahpqvcts"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("hasupmlppdpgzvz") + .withParameters(mapOf("ruatsyiysjqhen", "datavbkarkptgon"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("bcngkegxc").withDatasetParameters("dataxbbfetwil") - .withParameters(mapOf("frolq", "dataoxpdxq")).withAdditionalProperties(mapOf())), - new Transformation().withName("wlp").withDescription("uqhrlmcskykp") - .withDataset(new DatasetReference().withReferenceName("ofix") - .withParameters(mapOf("kkpyycpaw", "datacf", "cfpcfjfwzlgz", "datapjprdpwr"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("kgyepe").withParameters( - mapOf("rntmkctdhu", "datannidmdiawpzxk", "hqodv", "datasgwqpsqaz", "ti", "datagcnbhcbmjk"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ynts").withDatasetParameters("datamfmeftvhkmoo") - .withParameters(mapOf("gmjgrul", "datahskb")).withAdditionalProperties(mapOf())), - new Transformation().withName("kzkdtzxsoednlwg").withDescription("hezomucmqgisnion") - .withDataset(new DatasetReference().withReferenceName("bzdrdpuenxkgt") - .withParameters(mapOf("hzkbnbmx", "datamtrlxczn", "itoqcahfsg", "dataxmwtygeqzu", - "lisolntfxxc", "datajmlreesrfwsszvlc"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("mipfjw").withParameters( - mapOf("nvgskjtoxjd", "datagizmshxxbaizabu", "xqqm", "datajsjznv", "aydhf", "datai"))) + .withReferenceName("eq") + .withDatasetParameters("datauba") + .withParameters(mapOf("exmnv", "datadeayscseydzje", "xlniwmcpm", "datavmuw", "eqjzm", + "datardlhvdvmiphbe")) + .withAdditionalProperties(mapOf())), + new Transformation().withName("mazpzdqwuzvcmc") + .withDescription("xizekuvfrjwucao") + .withDataset(new DatasetReference().withReferenceName("vajbvbnkrdemdid") + .withParameters(mapOf("damisvpztdi", "datazidgzwd", "j", "dataykpxkqejt", "toiboancdr", + "dataojiunrlshxuknsyk"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("anvxuldxonckb") + .withParameters(mapOf("weghlwwbogvgfk", "datalfxlupibaqzizxzp", "fvdstrkzxsgt", + "dataqiyndveqels", "lr", "datans", "y", "datasmovpi"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("c").withDatasetParameters("datatfshksnyzm") - .withParameters(mapOf("iqdktwtkvih", "datamwbwmbnlslce", "nguuzhwvla", "datapfliwo", - "mhjhaus", "datap", "ekymffztsilscvqs", "datab")) + .withReferenceName("oxaxnrqaqotn") + .withDatasetParameters("dataxolousdv") + .withParameters(mapOf("wz", "datatqm", "qgkujds", "datadrpizfulgyctsdb", "eurbtigapdyarik", + "dataoxrqw", "kfqbriqulwwt", "dataejdpdfhtwm")) .withAdditionalProperties(mapOf())))) - .withScript("btvkbi").withScriptLines(Arrays.asList("htfgficudyhizpac", "muhbcakznho")); + .withScript("sumxpezcoioyj") + .withScriptLines(Arrays.asList("qzwqdnxke", "dcnwmywxfq", "kvemyzdpczaqpqi")); model = BinaryData.fromObject(model).toObject(Flowlet.class); - Assertions.assertEquals("oitwhrjsdmmazdnc", model.description()); - Assertions.assertEquals("lhzqpxzbawkikcdg", model.folder().name()); - Assertions.assertEquals("h", model.sources().get(0).name()); - Assertions.assertEquals("jsbcml", model.sources().get(0).description()); - Assertions.assertEquals("ahz", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("yrqolnthbbnkgz", model.sources().get(0).linkedService().referenceName()); + Assertions.assertEquals("amonatnizex", model.description()); + Assertions.assertEquals("s", model.folder().name()); + Assertions.assertEquals("sibz", model.sources().get(0).name()); + Assertions.assertEquals("rfgxkyd", model.sources().get(0).description()); + Assertions.assertEquals("yp", model.sources().get(0).dataset().referenceName()); + Assertions.assertEquals("ddhjkr", model.sources().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("iocuselqkr", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("pnw", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("oxudnmckap", model.sinks().get(0).name()); - Assertions.assertEquals("knq", model.sinks().get(0).description()); - Assertions.assertEquals("jgencdgmoque", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("itbfjtdy", model.sinks().get(0).linkedService().referenceName()); + Assertions.assertEquals("dhd", model.sources().get(0).flowlet().referenceName()); + Assertions.assertEquals("wnf", model.sources().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("eubanlxunpqcc", model.sinks().get(0).name()); + Assertions.assertEquals("iawzlzklaslgac", model.sinks().get(0).description()); + Assertions.assertEquals("uxlrarwpe", model.sinks().get(0).dataset().referenceName()); + Assertions.assertEquals("fbbcngkegxcypxbb", model.sinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("szxvctkbbxuhar", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("qtvkh", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("rnskby", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("vkhhwm", model.transformations().get(0).name()); - Assertions.assertEquals("jbweunxcqr", model.transformations().get(0).description()); - Assertions.assertEquals("hu", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("kaugpucdocfqplwg", model.transformations().get(0).linkedService().referenceName()); + Assertions.assertEquals("wnah", model.sinks().get(0).flowlet().referenceName()); + Assertions.assertEquals("wfgcdiykkcxwn", model.sinks().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("rcqxgcbvzarmqc", model.sinks().get(0).rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("i", model.transformations().get(0).name()); + Assertions.assertEquals("ymkouih", model.transformations().get(0).description()); + Assertions.assertEquals("seuugcig", model.transformations().get(0).dataset().referenceName()); + Assertions.assertEquals("dyhi", model.transformations().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("ikyjtkakvlb", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("btvkbi", model.script()); - Assertions.assertEquals("htfgficudyhizpac", model.scriptLines().get(0)); + Assertions.assertEquals("bssdpjeyoqxd", model.transformations().get(0).flowlet().referenceName()); + Assertions.assertEquals("sumxpezcoioyj", model.script()); + Assertions.assertEquals("qzwqdnxke", model.scriptLines().get(0)); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTypePropertiesTests.java index af10e928c2f77..c1914f2287789 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTypePropertiesTests.java @@ -22,165 +22,186 @@ public final class FlowletTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FlowletTypeProperties model = BinaryData.fromString( - "{\"sources\":[{\"schemaLinkedService\":{\"referenceName\":\"sd\",\"parameters\":{\"surejqrshzzbgu\":\"datayoqxdedecfiwhag\"}},\"name\":\"lcxiqqzjko\",\"description\":\"upnamglroui\",\"dataset\":{\"referenceName\":\"mfivjqterd\",\"parameters\":{\"d\":\"datagd\",\"tyhhmvfxlapja\":\"dataghpcvrwqirvt\"}},\"linkedService\":{\"referenceName\":\"dmkr\",\"parameters\":{\"qlujqgi\":\"datapgqvqo\",\"hpqvcts\":\"dataabwlyvx\",\"zhasupmlppdpgzvz\":\"dataaeuhwwsknstvz\",\"ptgongruat\":\"dataazvbkar\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"y\",\"datasetParameters\":\"dataqheni\",\"parameters\":{\"yjdeayscseyd\":\"dataqnguba\"},\"\":{\"muwrx\":\"datamexmnvk\",\"wmcpmrrdlhvdvm\":\"datan\",\"hkdcl\":\"dataphbeaeqjz\",\"unerke\":\"datacroczf\"}}},{\"schemaLinkedService\":{\"referenceName\":\"xzs\",\"parameters\":{\"udl\":\"dataezbzu\",\"cgwfsgqkstyecu\":\"datavzske\"}},\"name\":\"yu\",\"description\":\"p\",\"dataset\":{\"referenceName\":\"davsjcfmazpz\",\"parameters\":{\"izekuvfrj\":\"datauzvcmcok\",\"ajbvbn\":\"dataucaonz\",\"idgzwdydamis\":\"datardemdidack\",\"xkqejtpjfojiunr\":\"datapztdivyk\"}},\"linkedService\":{\"referenceName\":\"hxuk\",\"parameters\":{\"o\":\"datakdtoiboancdr\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"xu\",\"datasetParameters\":\"dataxonckbnlblfxlup\",\"parameters\":{\"izxzpzweghl\":\"dataq\",\"dve\":\"datawbogvgfklqiy\"},\"\":{\"vlrdsmovpi\":\"datasbfvdstrkzxsgtzn\"}}},{\"schemaLinkedService\":{\"referenceName\":\"ndnoxaxnrqaq\",\"parameters\":{\"usdvrgp\":\"datandxol\"}},\"name\":\"qmawzjdrpizfu\",\"description\":\"yctsdbtqgkuj\",\"dataset\":{\"referenceName\":\"ooxrqwoeurb\",\"parameters\":{\"wmmkfq\":\"dataapdyarikeejdpdfh\",\"qulw\":\"datar\",\"eqkvyhzokpoyu\":\"datatrj\"}},\"linkedService\":{\"referenceName\":\"uensn\",\"parameters\":{\"jsumxpezcoio\":\"dataphmpoejnglpwsada\",\"xkeedcnwmy\":\"datajrmfqzwqd\",\"czaqpqifdbmpt\":\"dataxfqzkvemyzd\",\"natnizexroqsqjg\":\"datawtxzuisam\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"thsplwsttxsr\",\"datasetParameters\":\"datafq\",\"parameters\":{\"sxyr\":\"dataiceovxgzw\",\"ik\":\"datajmtikes\",\"dseipnquwzxhrp\":\"dataohzixyqhfnkvycqq\"},\"\":{\"kfktltdds\":\"datadl\",\"ouhbq\":\"databjop\",\"yigfcvcew\":\"datazkqxsalu\"}}}],\"sinks\":[{\"schemaLinkedService\":{\"referenceName\":\"dgsjsat\",\"parameters\":{\"azdfsqxhyqmrej\":\"datac\",\"bwtdr\":\"dataarnpvgrsz\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"gz\",\"parameters\":{\"fi\":\"dataxzlh\"}},\"name\":\"acfculzjrmhpf\",\"description\":\"vyldqpzfzxsoxin\",\"dataset\":{\"referenceName\":\"jlzkdrocqsxy\",\"parameters\":{\"is\":\"datatcmiwd\",\"p\":\"datanmeylajamcajyhf\",\"ryklleynqa\":\"datac\"}},\"linkedService\":{\"referenceName\":\"kig\",\"parameters\":{\"hg\":\"datalwalhvu\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"etxdqcmyctajqzj\",\"datasetParameters\":\"datalecxbibiwks\",\"parameters\":{\"oikvntwcz\":\"datayxsbfpz\"},\"\":{\"ezpfki\":\"dataushlcxpblalh\",\"zsaaoqdsgptotxjq\":\"datasaid\",\"cnlrt\":\"dataia\"}}},{\"schemaLinkedService\":{\"referenceName\":\"ijzzcaoijolbuauk\",\"parameters\":{\"lxqdwr\":\"dataeopex\",\"pibkgxyxyaux\":\"datawyil\",\"ytkujsq\":\"dataeddobmcnltm\",\"oxfab\":\"datacm\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"gpwb\",\"parameters\":{\"li\":\"datari\"}},\"name\":\"rycgnwplrrbph\",\"description\":\"sbbi\",\"dataset\":{\"referenceName\":\"icuhqvumspb\",\"parameters\":{\"xmzrmtmvwitu\":\"dataeqbbewfcuqfpy\"}},\"linkedService\":{\"referenceName\":\"yyjshcybwfuppo\",\"parameters\":{\"zsvavlr\":\"datacmvouujxdiikmoxr\",\"oywlunpipcwyb\":\"dataikj\",\"npatpftsae\":\"datazfn\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"whxorpwaltz\",\"datasetParameters\":\"datagexojfccylhtrht\",\"parameters\":{\"zxezmnr\":\"datazjpwexcdrzprob\",\"hlokfpmijpdvzv\":\"datajgpjeuxs\",\"rwyambhbafebzxfk\":\"databhwbdqufvcgnrgla\",\"nntrvrkps\":\"dataqutibhl\"},\"\":{\"vzm\":\"datau\"}}},{\"schemaLinkedService\":{\"referenceName\":\"hnysvlpyeu\",\"parameters\":{\"hyqqegatxgr\":\"datapdixqbolxv\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"mg\",\"parameters\":{\"ibmg\":\"datatsdixchw\",\"gair\":\"dataymncjc\",\"fbhtleberp\":\"datacqzoofjnqjsve\"}},\"name\":\"ljekn\",\"description\":\"n\",\"dataset\":{\"referenceName\":\"j\",\"parameters\":{\"pnowawonoehrguql\":\"datawkdnjrxgkrhwiehy\",\"pyrgu\":\"datafwafbjz\"}},\"linkedService\":{\"referenceName\":\"azbkocbygvthrmxk\",\"parameters\":{\"keboo\":\"datawwdxomrawp\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"yacagae\",\"datasetParameters\":\"dataoiqclmgdtwgab\",\"parameters\":{\"wjecooyvhtuqbpe\":\"datakuz\"},\"\":{\"hftzbpyfao\":\"dataibncgagdvcd\",\"htncwmhjobzrfp\":\"datadf\",\"cqhyftcvbz\":\"dataiz\",\"orssatfyb\":\"datagwhgkgsoau\"}}}],\"transformations\":[{\"name\":\"fdmxuqb\",\"description\":\"nasttuxvzfqayop\",\"dataset\":{\"referenceName\":\"sixhgvbhx\",\"parameters\":{\"mar\":\"dataztgsqjay\",\"nh\":\"dataneibpgbrhbjdq\"}},\"linkedService\":{\"referenceName\":\"motpuwnnoh\",\"parameters\":{\"laynosugkf\":\"datangocfrjuypwyi\",\"hqucum\":\"dataaxttpfsmwgs\",\"uqmllfeothxu\":\"datadd\",\"vkrbzkuastaxklpr\":\"datarigrjdljlkq\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"hgltoizwxvs\",\"datasetParameters\":\"datasgfy\",\"parameters\":{\"vfcck\":\"datayekgafxc\"},\"\":{\"ynctaczcnjfmbbfn\":\"datawletyveszrtlhpdh\",\"itzovnkr\":\"dataj\"}}},{\"name\":\"iklsmni\",\"description\":\"lcoqksyiib\",\"dataset\":{\"referenceName\":\"xwbgbudavqd\",\"parameters\":{\"jvlirk\":\"dataccqcdhth\",\"agzlgpyai\":\"dataucosawrdt\",\"qfttkacybdueur\":\"dataihzqjjtsmuy\",\"jermhzic\":\"datamcdcpkshl\"}},\"linkedService\":{\"referenceName\":\"fdjhyaaknyukibxi\",\"parameters\":{\"piilhvtozy\":\"dataphzwxqte\",\"f\":\"datagjjnxkbylhyyx\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"vqzrwtrd\",\"datasetParameters\":\"datacnvqeons\",\"parameters\":{\"ezyohxpthceopv\":\"dataxlw\",\"lc\":\"datavtwfvesobpbokhm\"},\"\":{\"lqhxkasmcolmu\":\"datarnggcjfw\",\"ygz\":\"datapyvaosdkluwzx\",\"nobguqisqsqkpdmi\":\"datatyevjhu\",\"pnml\":\"datay\"}}}],\"script\":\"qcpszp\",\"scriptLines\":[\"qdvrdmvxyrxdh\",\"vqojbxaotcgbz\",\"mbtple\",\"oioyidoxznvgvd\"]}") + "{\"sources\":[{\"schemaLinkedService\":{\"referenceName\":\"ohzixyqhfnkvycqq\",\"parameters\":{\"rptyo\":\"dataeipnquwzx\"}},\"name\":\"lh\",\"description\":\"ktltddsobjopnouh\",\"dataset\":{\"referenceName\":\"ezkqxsalu\",\"parameters\":{\"dgsjsat\":\"datagfcvcewbwq\",\"qmr\":\"datarncmazdfsqxh\",\"rcwgzwllxzlhhf\":\"datajparnpvgrszrbwt\",\"ywvyldqpzfzxsox\":\"dataxacfculzjrmhp\"}},\"linkedService\":{\"referenceName\":\"unjlzkdrocq\",\"parameters\":{\"wdwisvn\":\"datatqqtcm\",\"zcrryklleynqa\":\"dataeylajamcajyhft\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"igglclwalhvub\",\"datasetParameters\":\"datazphetxdqcm\",\"parameters\":{\"alec\":\"dataajqzj\",\"egyxsbfpzvoik\":\"databibiwks\"},\"\":{\"blalh\":\"datawczfzwushlcx\",\"said\":\"dataezpfki\"}}},{\"schemaLinkedService\":{\"referenceName\":\"saao\",\"parameters\":{\"xjqfiafcnlrtbfi\":\"datagpto\",\"wieopexelxqdw\":\"datazzcaoijolbuauk\",\"ux\":\"dataswyiljpibkgxyxy\"}},\"name\":\"eddobmcnltm\",\"description\":\"tkujsqycm\",\"dataset\":{\"referenceName\":\"xfabl\",\"parameters\":{\"cgnwplrrbphcts\":\"datawbmwhrialiwr\",\"pbfs\":\"databibticuhqvum\",\"xmzrmtmvwitu\":\"dataeqbbewfcuqfpy\"}},\"linkedService\":{\"referenceName\":\"yyjshcybwfuppo\",\"parameters\":{\"zsvavlr\":\"datacmvouujxdiikmoxr\",\"oywlunpipcwyb\":\"dataikj\",\"npatpftsae\":\"datazfn\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"whxorpwaltz\",\"datasetParameters\":\"datagexojfccylhtrht\",\"parameters\":{\"zxezmnr\":\"datazjpwexcdrzprob\",\"hlokfpmijpdvzv\":\"datajgpjeuxs\",\"rwyambhbafebzxfk\":\"databhwbdqufvcgnrgla\",\"nntrvrkps\":\"dataqutibhl\"},\"\":{\"vzm\":\"datau\"}}}],\"sinks\":[{\"schemaLinkedService\":{\"referenceName\":\"ysvlpyeuukppd\",\"parameters\":{\"xvhhyqq\":\"databo\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"at\",\"parameters\":{\"nmtsdixc\":\"dataznmg\",\"ibmg\":\"dataw\",\"gair\":\"dataymncjc\",\"fbhtleberp\":\"datacqzoofjnqjsve\"}},\"name\":\"ljekn\",\"description\":\"n\",\"dataset\":{\"referenceName\":\"j\",\"parameters\":{\"pnowawonoehrguql\":\"datawkdnjrxgkrhwiehy\",\"pyrgu\":\"datafwafbjz\"}},\"linkedService\":{\"referenceName\":\"azbkocbygvthrmxk\",\"parameters\":{\"keboo\":\"datawwdxomrawp\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"yacagae\",\"datasetParameters\":\"dataoiqclmgdtwgab\",\"parameters\":{\"wjecooyvhtuqbpe\":\"datakuz\"},\"\":{\"hftzbpyfao\":\"dataibncgagdvcd\",\"htncwmhjobzrfp\":\"datadf\",\"cqhyftcvbz\":\"dataiz\",\"orssatfyb\":\"datagwhgkgsoau\"}}},{\"schemaLinkedService\":{\"referenceName\":\"ufdmxuq\",\"parameters\":{\"tsixhgvbhxmndztg\":\"datanasttuxvzfqayop\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"jay\",\"parameters\":{\"q\":\"datarxneibpgbrhbj\",\"nmotpuwnnoh\":\"datanh\",\"wyiulaynosu\":\"datamzngocfrjuy\"}},\"name\":\"kfhaxttpfsmwgsgh\",\"description\":\"cum\",\"dataset\":{\"referenceName\":\"dau\",\"parameters\":{\"dlj\":\"datalfeothxuarigr\",\"pruulhg\":\"datakqhvkrbzkuastaxk\"}},\"linkedService\":{\"referenceName\":\"oizwxvs\",\"parameters\":{\"yekgafxc\":\"datagfyys\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"cckwrtwle\",\"datasetParameters\":\"dataveszrtlhpdhw\",\"parameters\":{\"njfmbbfnvjxit\":\"datatacz\"},\"\":{\"mn\":\"datankrtikl\"}}},{\"schemaLinkedService\":{\"referenceName\":\"qlcoqksyiibhyx\",\"parameters\":{\"avqdorbccqcd\":\"databu\",\"wrdtnagzlgpy\":\"datathojvlirknucos\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"xihzqjjts\",\"parameters\":{\"acybdueurgmcdcpk\":\"datadqftt\",\"ermhzicsbfdjhyaa\":\"datahlo\",\"glhphzwx\":\"datanyukibx\"}},\"name\":\"tej\",\"description\":\"ilhvtozyagjj\",\"dataset\":{\"referenceName\":\"k\",\"parameters\":{\"dgscnvqeonsgnwx\":\"datahyyxgffklvqzrwt\"}},\"linkedService\":{\"referenceName\":\"mezyo\",\"parameters\":{\"sobpbo\":\"datathceopvkvtwfv\",\"yarnggcjfwblqhxk\":\"datahmmlc\",\"aosd\":\"datasmcolmugpy\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"wzxeygzvtyevjhu\",\"datasetParameters\":\"dataobguqisqsqk\",\"parameters\":{\"nmlvi\":\"dataioyj\",\"yrx\":\"datacpszpmcvqdvrdmv\"},\"\":{\"mbtple\":\"datavqojbxaotcgbz\",\"muuv\":\"dataoioyidoxznvgvd\"}}}],\"transformations\":[{\"name\":\"sxmrszbknimxlp\",\"description\":\"rxrzutylcur\",\"dataset\":{\"referenceName\":\"q\",\"parameters\":{\"jmbnvynf\":\"dataxqaehtd\"}},\"linkedService\":{\"referenceName\":\"oeactedcglskakdd\",\"parameters\":{\"duyqypf\":\"datahzllrqmtlpbyxro\",\"brjjtalxrdsjr\":\"datamnoiicsudy\",\"pjwyblvtbdmvs\":\"dataoluqwgusxxhdo\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"daelqpv\",\"datasetParameters\":\"datamkwjfbotl\",\"parameters\":{\"ivuxcjkcoqwczs\":\"datadusxurs\",\"qtnhjrfd\":\"dataiqrizfwihvaan\"},\"\":{\"fkkauigvmua\":\"datavbbaexxjfwtg\"}}},{\"name\":\"mczfedyuepsvplt\",\"description\":\"ajjvywe\",\"dataset\":{\"referenceName\":\"fkumcfjxoky\",\"parameters\":{\"k\":\"datayasvf\",\"jekrknfd\":\"datamyg\",\"lcr\":\"dataugjqyckgtxkrdt\",\"tcsubmzoo\":\"datajdkl\"}},\"linkedService\":{\"referenceName\":\"vo\",\"parameters\":{\"yslb\":\"datakxfpwh\",\"yqxridttb\":\"datalglmnnkkwayqsh\",\"uylztpziizevjyk\":\"dataaqjmkgxqwque\",\"kqtwqlepjjzkcasf\":\"datafvezefk\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ntogffjwajnrt\",\"datasetParameters\":\"datavaqkifmxawost\",\"parameters\":{\"hemvwfnqqwypvnd\":\"datanhrkmjqncfvdsc\",\"jfkainj\":\"datawbgodtggrssg\",\"bgmusaictd\":\"datauymvecvzts\"},\"\":{\"zeqozre\":\"datakzzohnrddc\",\"ixbnj\":\"datalbz\",\"ameudpabcreu\":\"dataqvzyuexozonyn\",\"exzvdube\":\"datazosgyjxvcvasorm\"}}}],\"script\":\"zygba\",\"scriptLines\":[\"ecovs\",\"qhzrtdbak\",\"limzfvppk\"]}") .toObject(FlowletTypeProperties.class); - Assertions.assertEquals("lcxiqqzjko", model.sources().get(0).name()); - Assertions.assertEquals("upnamglroui", model.sources().get(0).description()); - Assertions.assertEquals("mfivjqterd", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("dmkr", model.sources().get(0).linkedService().referenceName()); + Assertions.assertEquals("lh", model.sources().get(0).name()); + Assertions.assertEquals("ktltddsobjopnouh", model.sources().get(0).description()); + Assertions.assertEquals("ezkqxsalu", model.sources().get(0).dataset().referenceName()); + Assertions.assertEquals("unjlzkdrocq", model.sources().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("y", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("sd", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("acfculzjrmhpf", model.sinks().get(0).name()); - Assertions.assertEquals("vyldqpzfzxsoxin", model.sinks().get(0).description()); - Assertions.assertEquals("jlzkdrocqsxy", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("kig", model.sinks().get(0).linkedService().referenceName()); + Assertions.assertEquals("igglclwalhvub", model.sources().get(0).flowlet().referenceName()); + Assertions.assertEquals("ohzixyqhfnkvycqq", model.sources().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("ljekn", model.sinks().get(0).name()); + Assertions.assertEquals("n", model.sinks().get(0).description()); + Assertions.assertEquals("j", model.sinks().get(0).dataset().referenceName()); + Assertions.assertEquals("azbkocbygvthrmxk", model.sinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("etxdqcmyctajqzj", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("dgsjsat", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("gz", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("fdmxuqb", model.transformations().get(0).name()); - Assertions.assertEquals("nasttuxvzfqayop", model.transformations().get(0).description()); - Assertions.assertEquals("sixhgvbhx", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("motpuwnnoh", model.transformations().get(0).linkedService().referenceName()); + Assertions.assertEquals("yacagae", model.sinks().get(0).flowlet().referenceName()); + Assertions.assertEquals("ysvlpyeuukppd", model.sinks().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("at", model.sinks().get(0).rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("sxmrszbknimxlp", model.transformations().get(0).name()); + Assertions.assertEquals("rxrzutylcur", model.transformations().get(0).description()); + Assertions.assertEquals("q", model.transformations().get(0).dataset().referenceName()); + Assertions.assertEquals("oeactedcglskakdd", model.transformations().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("hgltoizwxvs", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("qcpszp", model.script()); - Assertions.assertEquals("qdvrdmvxyrxdh", model.scriptLines().get(0)); + Assertions.assertEquals("daelqpv", model.transformations().get(0).flowlet().referenceName()); + Assertions.assertEquals("zygba", model.script()); + Assertions.assertEquals("ecovs", model.scriptLines().get(0)); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FlowletTypeProperties model = new FlowletTypeProperties() - .withSources(Arrays.asList( - new DataFlowSource().withName("lcxiqqzjko").withDescription("upnamglroui") - .withDataset(new DatasetReference().withReferenceName("mfivjqterd") - .withParameters(mapOf("d", "datagd", "tyhhmvfxlapja", "dataghpcvrwqirvt"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("dmkr") - .withParameters(mapOf("qlujqgi", "datapgqvqo", "hpqvcts", "dataabwlyvx", "zhasupmlppdpgzvz", - "dataaeuhwwsknstvz", "ptgongruat", "dataazvbkar"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("y").withDatasetParameters("dataqheni") - .withParameters(mapOf("yjdeayscseyd", "dataqnguba")).withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("sd") - .withParameters(mapOf("surejqrshzzbgu", "datayoqxdedecfiwhag"))), - new DataFlowSource().withName("yu").withDescription("p") - .withDataset(new DatasetReference().withReferenceName("davsjcfmazpz") - .withParameters(mapOf("izekuvfrj", "datauzvcmcok", "ajbvbn", "dataucaonz", "idgzwdydamis", - "datardemdidack", "xkqejtpjfojiunr", "datapztdivyk"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("hxuk") - .withParameters(mapOf("o", "datakdtoiboancdr"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("xu").withDatasetParameters("dataxonckbnlblfxlup") - .withParameters(mapOf("izxzpzweghl", "dataq", "dve", "datawbogvgfklqiy")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("xzs") - .withParameters(mapOf("udl", "dataezbzu", "cgwfsgqkstyecu", "datavzske"))), - new DataFlowSource().withName("qmawzjdrpizfu").withDescription("yctsdbtqgkuj") - .withDataset(new DatasetReference().withReferenceName("ooxrqwoeurb").withParameters( - mapOf("wmmkfq", "dataapdyarikeejdpdfh", "qulw", "datar", "eqkvyhzokpoyu", "datatrj"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("uensn") - .withParameters(mapOf("jsumxpezcoio", "dataphmpoejnglpwsada", "xkeedcnwmy", "datajrmfqzwqd", - "czaqpqifdbmpt", "dataxfqzkvemyzd", "natnizexroqsqjg", "datawtxzuisam"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("thsplwsttxsr").withDatasetParameters("datafq") - .withParameters(mapOf("sxyr", "dataiceovxgzw", "ik", "datajmtikes", "dseipnquwzxhrp", - "dataohzixyqhfnkvycqq")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ndnoxaxnrqaq") - .withParameters(mapOf("usdvrgp", "datandxol"))))) - .withSinks( - Arrays.asList( - new DataFlowSink().withName("acfculzjrmhpf").withDescription("vyldqpzfzxsoxin") - .withDataset(new DatasetReference().withReferenceName("jlzkdrocqsxy").withParameters( - mapOf("is", "datatcmiwd", "p", "datanmeylajamcajyhf", "ryklleynqa", "datac"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("kig") - .withParameters(mapOf("hg", "datalwalhvu"))) + FlowletTypeProperties model + = new FlowletTypeProperties() + .withSources(Arrays.asList( + new DataFlowSource().withName("lh") + .withDescription("ktltddsobjopnouh") + .withDataset(new DatasetReference().withReferenceName("ezkqxsalu") + .withParameters(mapOf("dgsjsat", "datagfcvcewbwq", "qmr", "datarncmazdfsqxh", + "rcwgzwllxzlhhf", "datajparnpvgrszrbwt", "ywvyldqpzfzxsox", "dataxacfculzjrmhp"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("unjlzkdrocq") + .withParameters(mapOf("wdwisvn", "datatqqtcm", "zcrryklleynqa", "dataeylajamcajyhft"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("etxdqcmyctajqzj").withDatasetParameters("datalecxbibiwks") - .withParameters(mapOf("oikvntwcz", "datayxsbfpz")).withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("dgsjsat") - .withParameters(mapOf("azdfsqxhyqmrej", "datac", "bwtdr", "dataarnpvgrsz"))) - .withRejectedDataLinkedService( - new LinkedServiceReference().withReferenceName("gz").withParameters( - mapOf("fi", "dataxzlh"))), - new DataFlowSink().withName("rycgnwplrrbph").withDescription("sbbi") - .withDataset(new DatasetReference().withReferenceName("icuhqvumspb") - .withParameters(mapOf("xmzrmtmvwitu", "dataeqbbewfcuqfpy"))) + .withReferenceName("igglclwalhvub") + .withDatasetParameters("datazphetxdqcm") + .withParameters(mapOf("alec", "dataajqzj", "egyxsbfpzvoik", "databibiwks")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ohzixyqhfnkvycqq") + .withParameters(mapOf("rptyo", "dataeipnquwzx"))), + new DataFlowSource().withName("eddobmcnltm") + .withDescription("tkujsqycm") + .withDataset(new DatasetReference().withReferenceName("xfabl") + .withParameters(mapOf("cgnwplrrbphcts", "datawbmwhrialiwr", "pbfs", "databibticuhqvum", + "xmzrmtmvwitu", "dataeqbbewfcuqfpy"))) .withLinkedService(new LinkedServiceReference().withReferenceName("yyjshcybwfuppo") .withParameters(mapOf("zsvavlr", "datacmvouujxdiikmoxr", "oywlunpipcwyb", "dataikj", "npatpftsae", "datazfn"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("whxorpwaltz").withDatasetParameters("datagexojfccylhtrht") + .withReferenceName("whxorpwaltz") + .withDatasetParameters("datagexojfccylhtrht") .withParameters(mapOf("zxezmnr", "datazjpwexcdrzprob", "hlokfpmijpdvzv", "datajgpjeuxs", "rwyambhbafebzxfk", "databhwbdqufvcgnrgla", "nntrvrkps", "dataqutibhl")) .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ijzzcaoijolbuauk") - .withParameters(mapOf("lxqdwr", "dataeopex", "pibkgxyxyaux", "datawyil", "ytkujsq", - "dataeddobmcnltm", "oxfab", "datacm"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("gpwb") - .withParameters(mapOf("li", "datari"))), - new DataFlowSink().withName("ljekn").withDescription("n") - .withDataset(new DatasetReference().withReferenceName("j") - .withParameters(mapOf("pnowawonoehrguql", "datawkdnjrxgkrhwiehy", "pyrgu", "datafwafbjz"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("azbkocbygvthrmxk") - .withParameters(mapOf("keboo", "datawwdxomrawp"))) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("saao") + .withParameters(mapOf("xjqfiafcnlrtbfi", "datagpto", "wieopexelxqdw", "datazzcaoijolbuauk", + "ux", "dataswyiljpibkgxyxy"))))) + .withSinks( + Arrays + .asList( + new DataFlowSink().withName("ljekn") + .withDescription("n") + .withDataset(new DatasetReference().withReferenceName("j") + .withParameters( + mapOf("pnowawonoehrguql", "datawkdnjrxgkrhwiehy", "pyrgu", "datafwafbjz"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("azbkocbygvthrmxk") + .withParameters(mapOf("keboo", "datawwdxomrawp"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("yacagae") + .withDatasetParameters("dataoiqclmgdtwgab") + .withParameters(mapOf("wjecooyvhtuqbpe", "datakuz")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ysvlpyeuukppd") + .withParameters(mapOf("xvhhyqq", "databo"))) + .withRejectedDataLinkedService(new LinkedServiceReference() + .withReferenceName("at") + .withParameters(mapOf( + "nmtsdixc", "dataznmg", "ibmg", "dataw", "gair", "dataymncjc", "fbhtleberp", + "datacqzoofjnqjsve"))), + new DataFlowSink().withName("kfhaxttpfsmwgsgh") + .withDescription("cum") + .withDataset(new DatasetReference().withReferenceName("dau") + .withParameters( + mapOf("dlj", "datalfeothxuarigr", "pruulhg", "datakqhvkrbzkuastaxk"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("oizwxvs") + .withParameters(mapOf("yekgafxc", "datagfyys"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("cckwrtwle") + .withDatasetParameters("dataveszrtlhpdhw") + .withParameters(mapOf("njfmbbfnvjxit", "datatacz")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ufdmxuq") + .withParameters(mapOf("tsixhgvbhxmndztg", "datanasttuxvzfqayop"))) + .withRejectedDataLinkedService(new LinkedServiceReference() + .withReferenceName("jay") + .withParameters( + mapOf( + "q", "datarxneibpgbrhbj", "nmotpuwnnoh", "datanh", "wyiulaynosu", + "datamzngocfrjuy"))), + new DataFlowSink().withName("tej") + .withDescription("ilhvtozyagjj") + .withDataset(new DatasetReference().withReferenceName("k") + .withParameters(mapOf("dgscnvqeonsgnwx", "datahyyxgffklvqzrwt"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("mezyo") + .withParameters(mapOf("sobpbo", "datathceopvkvtwfv", "yarnggcjfwblqhxk", + "datahmmlc", "aosd", "datasmcolmugpy"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("wzxeygzvtyevjhu") + .withDatasetParameters("dataobguqisqsqk") + .withParameters(mapOf("nmlvi", "dataioyj", "yrx", "datacpszpmcvqdvrdmv")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService( + new LinkedServiceReference().withReferenceName("qlcoqksyiibhyx") + .withParameters( + mapOf("avqdorbccqcd", "databu", "wrdtnagzlgpy", "datathojvlirknucos"))) + .withRejectedDataLinkedService( + new LinkedServiceReference().withReferenceName("xihzqjjts") + .withParameters(mapOf("acybdueurgmcdcpk", "datadqftt", "ermhzicsbfdjhyaa", + "datahlo", "glhphzwx", "datanyukibx"))))) + .withTransformations(Arrays.asList( + new Transformation().withName("sxmrszbknimxlp") + .withDescription("rxrzutylcur") + .withDataset(new DatasetReference().withReferenceName("q") + .withParameters(mapOf("jmbnvynf", "dataxqaehtd"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("oeactedcglskakdd") + .withParameters(mapOf("duyqypf", "datahzllrqmtlpbyxro", "brjjtalxrdsjr", "datamnoiicsudy", + "pjwyblvtbdmvs", "dataoluqwgusxxhdo"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("daelqpv") + .withDatasetParameters("datamkwjfbotl") + .withParameters(mapOf("ivuxcjkcoqwczs", "datadusxurs", "qtnhjrfd", "dataiqrizfwihvaan")) + .withAdditionalProperties(mapOf())), + new Transformation().withName("mczfedyuepsvplt") + .withDescription("ajjvywe") + .withDataset(new DatasetReference().withReferenceName("fkumcfjxoky") + .withParameters(mapOf("k", "datayasvf", "jekrknfd", "datamyg", "lcr", "dataugjqyckgtxkrdt", + "tcsubmzoo", "datajdkl"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("vo") + .withParameters(mapOf("yslb", "datakxfpwh", "yqxridttb", "datalglmnnkkwayqsh", + "uylztpziizevjyk", "dataaqjmkgxqwque", "kqtwqlepjjzkcasf", "datafvezefk"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("yacagae").withDatasetParameters("dataoiqclmgdtwgab") - .withParameters(mapOf("wjecooyvhtuqbpe", "datakuz")).withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("hnysvlpyeu") - .withParameters(mapOf("hyqqegatxgr", "datapdixqbolxv"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("mg") - .withParameters(mapOf("ibmg", "datatsdixchw", "gair", "dataymncjc", "fbhtleberp", - "datacqzoofjnqjsve"))))) - .withTransformations(Arrays.asList( - new Transformation().withName("fdmxuqb").withDescription("nasttuxvzfqayop") - .withDataset(new DatasetReference().withReferenceName("sixhgvbhx") - .withParameters(mapOf("mar", "dataztgsqjay", "nh", "dataneibpgbrhbjdq"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("motpuwnnoh") - .withParameters(mapOf("laynosugkf", "datangocfrjuypwyi", "hqucum", "dataaxttpfsmwgs", - "uqmllfeothxu", "datadd", "vkrbzkuastaxklpr", "datarigrjdljlkq"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("hgltoizwxvs").withDatasetParameters("datasgfy") - .withParameters(mapOf("vfcck", "datayekgafxc")).withAdditionalProperties(mapOf())), - new Transformation().withName("iklsmni").withDescription("lcoqksyiib") - .withDataset(new DatasetReference().withReferenceName("xwbgbudavqd") - .withParameters(mapOf("jvlirk", "dataccqcdhth", "agzlgpyai", "dataucosawrdt", "qfttkacybdueur", - "dataihzqjjtsmuy", "jermhzic", "datamcdcpkshl"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("fdjhyaaknyukibxi") - .withParameters(mapOf("piilhvtozy", "dataphzwxqte", "f", "datagjjnxkbylhyyx"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("vqzrwtrd").withDatasetParameters("datacnvqeons") - .withParameters(mapOf("ezyohxpthceopv", "dataxlw", "lc", "datavtwfvesobpbokhm")) - .withAdditionalProperties(mapOf())))) - .withScript("qcpszp") - .withScriptLines(Arrays.asList("qdvrdmvxyrxdh", "vqojbxaotcgbz", "mbtple", "oioyidoxznvgvd")); + .withReferenceName("ntogffjwajnrt") + .withDatasetParameters("datavaqkifmxawost") + .withParameters(mapOf("hemvwfnqqwypvnd", "datanhrkmjqncfvdsc", "jfkainj", + "datawbgodtggrssg", "bgmusaictd", "datauymvecvzts")) + .withAdditionalProperties(mapOf())))) + .withScript("zygba") + .withScriptLines(Arrays.asList("ecovs", "qhzrtdbak", "limzfvppk")); model = BinaryData.fromObject(model).toObject(FlowletTypeProperties.class); - Assertions.assertEquals("lcxiqqzjko", model.sources().get(0).name()); - Assertions.assertEquals("upnamglroui", model.sources().get(0).description()); - Assertions.assertEquals("mfivjqterd", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("dmkr", model.sources().get(0).linkedService().referenceName()); + Assertions.assertEquals("lh", model.sources().get(0).name()); + Assertions.assertEquals("ktltddsobjopnouh", model.sources().get(0).description()); + Assertions.assertEquals("ezkqxsalu", model.sources().get(0).dataset().referenceName()); + Assertions.assertEquals("unjlzkdrocq", model.sources().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("y", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("sd", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("acfculzjrmhpf", model.sinks().get(0).name()); - Assertions.assertEquals("vyldqpzfzxsoxin", model.sinks().get(0).description()); - Assertions.assertEquals("jlzkdrocqsxy", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("kig", model.sinks().get(0).linkedService().referenceName()); + Assertions.assertEquals("igglclwalhvub", model.sources().get(0).flowlet().referenceName()); + Assertions.assertEquals("ohzixyqhfnkvycqq", model.sources().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("ljekn", model.sinks().get(0).name()); + Assertions.assertEquals("n", model.sinks().get(0).description()); + Assertions.assertEquals("j", model.sinks().get(0).dataset().referenceName()); + Assertions.assertEquals("azbkocbygvthrmxk", model.sinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("etxdqcmyctajqzj", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("dgsjsat", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("gz", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("fdmxuqb", model.transformations().get(0).name()); - Assertions.assertEquals("nasttuxvzfqayop", model.transformations().get(0).description()); - Assertions.assertEquals("sixhgvbhx", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("motpuwnnoh", model.transformations().get(0).linkedService().referenceName()); + Assertions.assertEquals("yacagae", model.sinks().get(0).flowlet().referenceName()); + Assertions.assertEquals("ysvlpyeuukppd", model.sinks().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("at", model.sinks().get(0).rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("sxmrszbknimxlp", model.transformations().get(0).name()); + Assertions.assertEquals("rxrzutylcur", model.transformations().get(0).description()); + Assertions.assertEquals("q", model.transformations().get(0).dataset().referenceName()); + Assertions.assertEquals("oeactedcglskakdd", model.transformations().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("hgltoizwxvs", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("qcpszp", model.script()); - Assertions.assertEquals("qdvrdmvxyrxdh", model.scriptLines().get(0)); + Assertions.assertEquals("daelqpv", model.transformations().get(0).flowlet().referenceName()); + Assertions.assertEquals("zygba", model.script()); + Assertions.assertEquals("ecovs", model.scriptLines().get(0)); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTests.java index 6edc53e985253..3697e6dbf82ed 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTests.java @@ -22,94 +22,81 @@ public final class ForEachActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ForEachActivity model = BinaryData.fromString( - "{\"type\":\"ForEach\",\"typeProperties\":{\"isSequential\":false,\"batchCount\":2055643232,\"items\":{\"value\":\"bgzyafazwieiz\"},\"activities\":[{\"type\":\"Activity\",\"name\":\"jjdboxuinrsrrij\",\"description\":\"nthtqtbcwtcqj\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"tzbvdz\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Completed\",\"Skipped\"],\"\":{\"gwpyljnqiprjahgq\":\"datajclrtssbkz\",\"ufnumf\":\"databyic\",\"ajpp\":\"dataahnruqhmur\",\"zo\":\"dataflvazp\"}},{\"activity\":\"sqmli\",\"dependencyConditions\":[\"Failed\"],\"\":{\"xnts\":\"datallpobz\",\"wwhml\":\"datayntkfziitbwth\",\"bfg\":\"datas\",\"nzuufpdwk\":\"datajkkra\"}}],\"userProperties\":[{\"name\":\"phwxdwlowymeqiqn\",\"value\":\"dataca\"},{\"name\":\"mxuoxk\",\"value\":\"datapleooom\"}],\"\":{\"gaofobjl\":\"datajfldzvgogqu\",\"exbjbknpzhfh\":\"datanaxfvsyustrb\",\"sl\":\"databhgw\"}},{\"type\":\"Activity\",\"name\":\"qb\",\"description\":\"cjbxochijwpsk\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"ikwahbzdgwki\",\"dependencyConditions\":[\"Completed\"],\"\":{\"xcrxqpen\":\"datarvj\"}},{\"activity\":\"ujxdnia\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Failed\",\"Failed\"],\"\":{\"duwxqytppjdyikdy\":\"datahwgi\",\"utrpdgmukm\":\"dataxhxrkdtuc\",\"xaednczvnwyfzav\":\"datacvftijlshlcrjyne\"}},{\"activity\":\"ajbah\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Succeeded\"],\"\":{\"mqeeodfpl\":\"datajtywluxy\",\"ofxgwyvjef\":\"datafsmpbwwphjwqmc\",\"xjrttzhn\":\"datalxqmtedzxujx\"}},{\"activity\":\"mbjqynwqcovpjv\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Succeeded\"],\"\":{\"k\":\"databdd\"}}],\"userProperties\":[{\"name\":\"rcmayqasdve\",\"value\":\"dataldafx\"},{\"name\":\"pyvlfujsbcfogu\",\"value\":\"databqcqnchdzyju\"},{\"name\":\"dknblbrixvcp\",\"value\":\"datasvprumttrvkhu\"}],\"\":{\"egxgymxplrtue\":\"dataxxwbjbanlmpm\",\"nj\":\"dataqhqu\",\"ebpvhdk\":\"dataybgpjyuvjuowk\",\"ttjmdtfuwx\":\"datadqcgedipnnzmvt\"}}]},\"name\":\"ee\",\"description\":\"mies\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"btumttmixewparb\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Failed\"],\"\":{\"tixggnld\":\"datavqnkwjhj\",\"uklepetsxe\":\"datalgqsoiuncmuv\",\"xhkzcdni\":\"dataneherqbelmsx\"}}],\"userProperties\":[{\"name\":\"d\",\"value\":\"datavsvgydtdto\"}],\"\":{\"pooaskflrqwfmbk\":\"datazotmiizk\",\"bwudiyfixpwrrqiv\":\"datashbrzvnouthbvv\",\"lozg\":\"datazqcmrxh\",\"yttxspaafs\":\"datafhijcetcystrs\"}}") + "{\"type\":\"o\",\"typeProperties\":{\"isSequential\":false,\"batchCount\":305424192,\"items\":{\"value\":\"fdgnaoirrufdgt\"},\"activities\":[{\"type\":\"esrhvgp\",\"name\":\"fzdgsmeeqelmrpvg\",\"description\":\"rgqskd\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"dxjxkxvgodekfe\",\"dependencyConditions\":[\"Failed\",\"Completed\"],\"\":{\"ooqjraknngad\":\"datarfeqefqd\",\"pvomxtosdbv\":\"datallhzlicvrdwjght\",\"eebzewbifcyptl\":\"datadoieo\"}}],\"userProperties\":[{\"name\":\"hdlrrivvuewrhk\",\"value\":\"datamphfhmuaoouu\"}],\"\":{\"olhhlggobjc\":\"datadggsr\"}}]},\"name\":\"rphzd\",\"description\":\"kjyhaqkglupmyqi\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"vvmmjw\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\"],\"\":{\"catyryykonvenmv\":\"dataglm\",\"zqfzbiyv\":\"databgpgvliinueltcoi\"}},{\"activity\":\"wyyvsbjpyxlzxjir\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\",\"Failed\"],\"\":{\"scitizroru\":\"datadgoqxf\",\"ehagorbspotq\":\"dataduwqovlqfz\"}}],\"userProperties\":[{\"name\":\"vubfugd\",\"value\":\"datapmtzqpivochmexim\"},{\"name\":\"misve\",\"value\":\"datauqibkjam\"},{\"name\":\"h\",\"value\":\"datarulgypnaqwjsdwn\"}],\"\":{\"ygn\":\"dataqntxbeeziiqix\",\"euztpss\":\"datarynfoa\"}}") .toObject(ForEachActivity.class); - Assertions.assertEquals("ee", model.name()); - Assertions.assertEquals("mies", model.description()); + Assertions.assertEquals("rphzd", model.name()); + Assertions.assertEquals("kjyhaqkglupmyqi", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("btumttmixewparb", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("d", model.userProperties().get(0).name()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("vvmmjw", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("vubfugd", model.userProperties().get(0).name()); Assertions.assertEquals(false, model.isSequential()); - Assertions.assertEquals(2055643232, model.batchCount()); - Assertions.assertEquals("bgzyafazwieiz", model.items().value()); - Assertions.assertEquals("jjdboxuinrsrrij", model.activities().get(0).name()); - Assertions.assertEquals("nthtqtbcwtcqj", model.activities().get(0).description()); + Assertions.assertEquals(305424192, model.batchCount()); + Assertions.assertEquals("fdgnaoirrufdgt", model.items().value()); + Assertions.assertEquals("fzdgsmeeqelmrpvg", model.activities().get(0).name()); + Assertions.assertEquals("rgqskd", model.activities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("tzbvdz", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.activities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("dxjxkxvgodekfe", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("phwxdwlowymeqiqn", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("hdlrrivvuewrhk", model.activities().get(0).userProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ForEachActivity model = new ForEachActivity().withName("ee").withDescription("mies") - .withState(ActivityState.INACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("btumttmixewparb") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, - DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("d").withValue("datavsvgydtdto"))) - .withIsSequential(false).withBatchCount(2055643232).withItems(new Expression().withValue("bgzyafazwieiz")) - .withActivities(Arrays.asList( - new Activity().withName("jjdboxuinrsrrij").withDescription("nthtqtbcwtcqj") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("tzbvdz") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.FAILED, DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("sqmli") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) + ForEachActivity model + = new ForEachActivity().withName("rphzd") + .withDescription("kjyhaqkglupmyqi") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("vvmmjw") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("wyyvsbjpyxlzxjir") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.COMPLETED, DependencyCondition.COMPLETED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList( + new UserProperty().withName("vubfugd").withValue("datapmtzqpivochmexim"), + new UserProperty().withName("misve").withValue("datauqibkjam"), + new UserProperty().withName("h").withValue("datarulgypnaqwjsdwn"))) + .withIsSequential(false) + .withBatchCount(305424192) + .withItems(new Expression().withValue("fdgnaoirrufdgt")) + .withActivities(Arrays.asList(new Activity().withName("fzdgsmeeqelmrpvg") + .withDescription("rgqskd") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("dxjxkxvgodekfe") + .withDependencyConditions( + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()))) .withUserProperties( - Arrays.asList(new UserProperty().withName("phwxdwlowymeqiqn").withValue("dataca"), - new UserProperty().withName("mxuoxk").withValue("datapleooom"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("qb").withDescription("cjbxochijwpsk").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ikwahbzdgwki") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ujxdnia") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.FAILED, DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ajbah") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("mbjqynwqcovpjv") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("rcmayqasdve").withValue("dataldafx"), - new UserProperty().withName("pyvlfujsbcfogu").withValue("databqcqnchdzyju"), - new UserProperty().withName("dknblbrixvcp").withValue("datasvprumttrvkhu"))) - .withAdditionalProperties(mapOf("type", "Activity")))); + Arrays.asList(new UserProperty().withName("hdlrrivvuewrhk").withValue("datamphfhmuaoouu"))) + .withAdditionalProperties(mapOf("type", "esrhvgp")))); model = BinaryData.fromObject(model).toObject(ForEachActivity.class); - Assertions.assertEquals("ee", model.name()); - Assertions.assertEquals("mies", model.description()); + Assertions.assertEquals("rphzd", model.name()); + Assertions.assertEquals("kjyhaqkglupmyqi", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("btumttmixewparb", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("d", model.userProperties().get(0).name()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("vvmmjw", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("vubfugd", model.userProperties().get(0).name()); Assertions.assertEquals(false, model.isSequential()); - Assertions.assertEquals(2055643232, model.batchCount()); - Assertions.assertEquals("bgzyafazwieiz", model.items().value()); - Assertions.assertEquals("jjdboxuinrsrrij", model.activities().get(0).name()); - Assertions.assertEquals("nthtqtbcwtcqj", model.activities().get(0).description()); + Assertions.assertEquals(305424192, model.batchCount()); + Assertions.assertEquals("fdgnaoirrufdgt", model.items().value()); + Assertions.assertEquals("fzdgsmeeqelmrpvg", model.activities().get(0).name()); + Assertions.assertEquals("rgqskd", model.activities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("tzbvdz", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.activities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("dxjxkxvgodekfe", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("phwxdwlowymeqiqn", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("hdlrrivvuewrhk", model.activities().get(0).userProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTypePropertiesTests.java index 67af99c5cc0bc..bf1a2124f0fba 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTypePropertiesTests.java @@ -22,115 +22,83 @@ public final class ForEachActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ForEachActivityTypeProperties model = BinaryData.fromString( - "{\"isSequential\":false,\"batchCount\":614609889,\"items\":{\"value\":\"erlrqtqnxhu\"},\"activities\":[{\"type\":\"Activity\",\"name\":\"tqveumwbmqpbfjbs\",\"description\":\"jqkykjzbxmgsxb\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"oqfeobkmxohmr\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Failed\"],\"\":{\"kfxdaqwnkjyfyy\":\"datanwcejcz\",\"gryo\":\"databtiugcaas\",\"onuocmxt\":\"datahuvuokrkib\"}},{\"activity\":\"jaxkby\",\"dependencyConditions\":[\"Completed\"],\"\":{\"ryzxhtvythp\":\"datamyvasnmzsvd\",\"yzacjxczjosixter\":\"datanlmfvq\"}}],\"userProperties\":[{\"name\":\"khtmmkm\",\"value\":\"datazlh\"},{\"name\":\"trqhncscaynh\",\"value\":\"datamziwxwwpi\"},{\"name\":\"wl\",\"value\":\"dataluqqlpphotbsgkl\"},{\"name\":\"uct\",\"value\":\"datafzhvxqotwcfbq\"}],\"\":{\"huhyxxftrfw\":\"datachp\",\"jcsxqkmo\":\"datax\"}},{\"type\":\"Activity\",\"name\":\"aule\",\"description\":\"dxzno\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"utvqgnugiiy\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"crabrqdbxhg\":\"datakntdynbrf\",\"ruvnlubk\":\"datalz\",\"yxntuzgceuzhp\":\"datagfzii\"}},{\"activity\":\"mnpodsqil\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Completed\"],\"\":{\"qvbutesxufrwiiv\":\"dataophpudccaqh\",\"ozccdoat\":\"datakrgvzjtvjrrkdlw\",\"mijraei\":\"datanktheh\",\"hhci\":\"datav\"}},{\"activity\":\"lbnroxgwqgbv\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Skipped\"],\"\":{\"twkzfp\":\"dataecozvxdb\",\"uxtyvpvegxdzopfk\":\"datawjvjuixbtkuv\",\"jkteubntqvlktq\":\"datajxjnxznlx\",\"urblbtvsxnaothlr\":\"datab\"}},{\"activity\":\"jiktwfjyl\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Skipped\"],\"\":{\"ixfdbicpchbc\":\"datawclbznwegyhzuc\",\"hyx\":\"datadpyorhqbpfvh\"}}],\"userProperties\":[{\"name\":\"nhszmuvareakcxda\",\"value\":\"datanmnmqydpieleruoy\"},{\"name\":\"npcrsfqwqm\",\"value\":\"dataisjqo\"},{\"name\":\"vjurjczdelqaz\",\"value\":\"datazixgqrk\"}],\"\":{\"nloqjmojgdb\":\"datawpaeceuvqacaed\",\"jksmyeegbertf\":\"databrrqxldkhgngyofe\",\"rd\":\"datancxkazmydsqvjkfz\",\"bw\":\"datacwgcmmv\"}},{\"type\":\"Activity\",\"name\":\"i\",\"description\":\"y\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"dujkdacuvyeck\",\"dependencyConditions\":[\"Skipped\",\"Completed\"],\"\":{\"dfy\":\"datalbx\",\"wmehaic\":\"dataywmezoi\",\"v\":\"datakkcpkvujwf\",\"fsiiadfjxfiv\":\"datavvnbbeys\"}},{\"activity\":\"uq\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Failed\"],\"\":{\"zl\":\"datawnvfbrgtoqk\",\"wiitxye\":\"datakjhu\",\"ehhkcutxmqvbh\":\"dataidk\"}}],\"userProperties\":[{\"name\":\"duu\",\"value\":\"datakrskqgokhpzvph\"}],\"\":{\"mhrfwch\":\"datafcxvfurkdhopz\"}},{\"type\":\"Activity\",\"name\":\"mgeovjiezk\",\"description\":\"exldocqhl\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"exmfeechl\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Completed\",\"Completed\"],\"\":{\"bmeksegdjq\":\"dataavofeouucgzifo\"}},{\"activity\":\"oc\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Succeeded\",\"Skipped\"],\"\":{\"zcwjaqyvnol\":\"dataimenjhtwkn\",\"m\":\"datapz\",\"quiqkuxajl\":\"databss\"}},{\"activity\":\"iffzpkrno\",\"dependencyConditions\":[\"Failed\",\"Completed\"],\"\":{\"w\":\"datadir\",\"irxngmmv\":\"datanmaiqdj\"}},{\"activity\":\"rxoidmnsmd\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"fi\":\"datalhkcogxrsb\",\"n\":\"datachfxmedybj\",\"mpzbxqfm\":\"dataoaeudhvszw\",\"djushzfn\":\"dataypwglkvsp\"}}],\"userProperties\":[{\"name\":\"rdsmrvp\",\"value\":\"datawewgda\"},{\"name\":\"hzdhszk\",\"value\":\"datadlilkw\"}],\"\":{\"fxnokpkgrub\":\"datayvdabgctmfntl\",\"hdkx\":\"datazgz\",\"rhgelsvo\":\"datahlinjerkdurch\"}}]}") + "{\"isSequential\":true,\"batchCount\":1961751824,\"items\":{\"value\":\"rigyg\"},\"activities\":[{\"type\":\"puqilpdzby\",\"name\":\"rvkx\",\"description\":\"fzs\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"ucvq\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\"],\"\":{\"ktmsphcrn\":\"datad\",\"spykcreu\":\"datavxtz\",\"crdrdkexcyw\":\"datapigsulejukack\"}},{\"activity\":\"riddcnljllypchqh\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\"],\"\":{\"mmpgczxi\":\"datatdzpeliktk\"}},{\"activity\":\"qyvwzxqmvestnr\",\"dependencyConditions\":[\"Skipped\",\"Completed\"],\"\":{\"rxvnjjim\":\"datagyttfzo\",\"huuezxcpxwqg\":\"datacgbdupslw\",\"osipl\":\"datanqueqeabedf\",\"hrgeymspvgatzru\":\"dataygpsahupmmsh\"}}],\"userProperties\":[{\"name\":\"rjswrptxr\",\"value\":\"datacwdleivmuqmzx\"}],\"\":{\"xazkqiqzaead\":\"datahlpnyehhqytjr\"}},{\"type\":\"hpokflxivhozhr\",\"name\":\"bvfljxljgtirn\",\"description\":\"zrbkhyzufkzqtv\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"uupeflk\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Skipped\"],\"\":{\"ydntupbrvdgtblx\":\"datahdkeayuowivpne\",\"ztlsnkwullvu\":\"datamdabpifygxuaidr\"}},{\"activity\":\"wymosjzmandjjq\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"udjrotqdiaxf\":\"dataq\",\"vjaw\":\"dataiwrfocbetlljqkgl\",\"jlcj\":\"dataxvl\",\"c\":\"dataseqmejerjyz\"}},{\"activity\":\"zbjieeivdrqtlcx\",\"dependencyConditions\":[\"Completed\",\"Skipped\"],\"\":{\"vgroewhsnpcwy\":\"datamfxlturx\"}}],\"userProperties\":[{\"name\":\"berxnljtvu\",\"value\":\"datasnzuebyznkd\"},{\"name\":\"hzc\",\"value\":\"dataamnzt\"}],\"\":{\"rrneorb\":\"datasbgksfjqfeeqhjc\"}}]}") .toObject(ForEachActivityTypeProperties.class); - Assertions.assertEquals(false, model.isSequential()); - Assertions.assertEquals(614609889, model.batchCount()); - Assertions.assertEquals("erlrqtqnxhu", model.items().value()); - Assertions.assertEquals("tqveumwbmqpbfjbs", model.activities().get(0).name()); - Assertions.assertEquals("jqkykjzbxmgsxb", model.activities().get(0).description()); + Assertions.assertEquals(true, model.isSequential()); + Assertions.assertEquals(1961751824, model.batchCount()); + Assertions.assertEquals("rigyg", model.items().value()); + Assertions.assertEquals("rvkx", model.activities().get(0).name()); + Assertions.assertEquals("fzs", model.activities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("oqfeobkmxohmr", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals("ucvq", model.activities().get(0).dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("khtmmkm", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("rjswrptxr", model.activities().get(0).userProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ForEachActivityTypeProperties model = new ForEachActivityTypeProperties().withIsSequential(false) - .withBatchCount(614609889).withItems(new Expression().withValue("erlrqtqnxhu")) - .withActivities(Arrays.asList( - new Activity().withName("tqveumwbmqpbfjbs").withDescription("jqkykjzbxmgsxb") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("oqfeobkmxohmr") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("jaxkby") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("khtmmkm").withValue("datazlh"), - new UserProperty().withName("trqhncscaynh").withValue("datamziwxwwpi"), new UserProperty() - .withName("wl").withValue("dataluqqlpphotbsgkl"), - new UserProperty().withName("uct").withValue("datafzhvxqotwcfbq"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("aule").withDescription("dxzno").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("utvqgnugiiy") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("mnpodsqil") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("lbnroxgwqgbv") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("jiktwfjyl") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList( - new UserProperty().withName("nhszmuvareakcxda").withValue("datanmnmqydpieleruoy"), - new UserProperty().withName("npcrsfqwqm").withValue("dataisjqo"), - new UserProperty().withName("vjurjczdelqaz").withValue("datazixgqrk"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("i").withDescription("y").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("dujkdacuvyeck") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("uq") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("duu").withValue("datakrskqgokhpzvph"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity() - .withName("mgeovjiezk").withDescription("exldocqhl").withState( - ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("exmfeechl") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("oc") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("iffzpkrno") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("rxoidmnsmd") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("rdsmrvp").withValue("datawewgda"), - new UserProperty().withName("hzdhszk").withValue("datadlilkw"))) - .withAdditionalProperties(mapOf("type", "Activity")))); + ForEachActivityTypeProperties model + = new ForEachActivityTypeProperties().withIsSequential(true) + .withBatchCount(1961751824) + .withItems(new Expression().withValue("rigyg")) + .withActivities( + Arrays + .asList( + new Activity().withName("rvkx") + .withDescription("fzs") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn( + Arrays.asList( + new ActivityDependency().withActivity("ucvq") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("riddcnljllypchqh") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, + DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("qyvwzxqmvestnr") + .withDependencyConditions(Arrays + .asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays + .asList(new UserProperty().withName("rjswrptxr").withValue("datacwdleivmuqmzx"))) + .withAdditionalProperties(mapOf("type", "puqilpdzby")), + new Activity().withName("bvfljxljgtirn") + .withDescription("zrbkhyzufkzqtv") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("uupeflk") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, + DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("wymosjzmandjjq") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("zbjieeivdrqtlcx") + .withDependencyConditions( + Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList( + new UserProperty().withName("berxnljtvu").withValue("datasnzuebyznkd"), + new UserProperty().withName("hzc").withValue("dataamnzt"))) + .withAdditionalProperties(mapOf("type", "hpokflxivhozhr")))); model = BinaryData.fromObject(model).toObject(ForEachActivityTypeProperties.class); - Assertions.assertEquals(false, model.isSequential()); - Assertions.assertEquals(614609889, model.batchCount()); - Assertions.assertEquals("erlrqtqnxhu", model.items().value()); - Assertions.assertEquals("tqveumwbmqpbfjbs", model.activities().get(0).name()); - Assertions.assertEquals("jqkykjzbxmgsxb", model.activities().get(0).description()); + Assertions.assertEquals(true, model.isSequential()); + Assertions.assertEquals(1961751824, model.batchCount()); + Assertions.assertEquals("rigyg", model.items().value()); + Assertions.assertEquals("rvkx", model.activities().get(0).name()); + Assertions.assertEquals("fzs", model.activities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("oqfeobkmxohmr", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals("ucvq", model.activities().get(0).dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("khtmmkm", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("rjswrptxr", model.activities().get(0).userProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatReadSettingsTests.java index 084ddd494970b..a08f5b9d386c7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatReadSettingsTests.java @@ -13,14 +13,13 @@ public final class FormatReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FormatReadSettings model = BinaryData.fromString( - "{\"type\":\"FormatReadSettings\",\"\":{\"hvcrjqzbmyftzbx\":\"datavcjdrmknk\",\"sbzmixwaxtnk\":\"datagosrbullqnfzsegu\"}}") + "{\"type\":\"lgkkiuq\",\"\":{\"qaf\":\"dataafobpyeobrtae\",\"kelow\":\"datahpkiiunyrob\",\"upski\":\"datamrvdtqhrtnqssqy\",\"aued\":\"dataokphamefzzgwj\"}}") .toObject(FormatReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FormatReadSettings model - = new FormatReadSettings().withAdditionalProperties(mapOf("type", "FormatReadSettings")); + FormatReadSettings model = new FormatReadSettings().withAdditionalProperties(mapOf("type", "lgkkiuq")); model = BinaryData.fromObject(model).toObject(FormatReadSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatWriteSettingsTests.java index 4bfb078e0a525..c8cfa26262be6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatWriteSettingsTests.java @@ -13,14 +13,13 @@ public final class FormatWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FormatWriteSettings model = BinaryData.fromString( - "{\"type\":\"FormatWriteSettings\",\"\":{\"oksstaljiqlxjjl\":\"datagqsqvf\",\"qhscaand\":\"datauymna\",\"lsb\":\"datalvccuvcva\"}}") + "{\"type\":\"hgnugcbjx\",\"\":{\"wrjuiuzlf\":\"datadg\",\"ujunqwkjfmt\":\"datahzihlzljqcmmgsm\",\"xbckxen\":\"dataybdz\"}}") .toObject(FormatWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FormatWriteSettings model - = new FormatWriteSettings().withAdditionalProperties(mapOf("type", "FormatWriteSettings")); + FormatWriteSettings model = new FormatWriteSettings().withAdditionalProperties(mapOf("type", "hgnugcbjx")); model = BinaryData.fromObject(model).toObject(FormatWriteSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpReadSettingsTests.java index b14ae7e2af3d9..91da7a9223f75 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpReadSettingsTests.java @@ -11,18 +11,23 @@ public final class FtpReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FtpReadSettings model = BinaryData.fromString( - "{\"type\":\"FtpReadSettings\",\"recursive\":\"datadqgy\",\"wildcardFolderPath\":\"dataulzguvckpdp\",\"wildcardFileName\":\"datanrjqskikqd\",\"enablePartitionDiscovery\":\"dataybqtlvofjjsetiz\",\"partitionRootPath\":\"datanadn\",\"deleteFilesAfterCompletion\":\"datasbpxlserqgxnh\",\"fileListPath\":\"dataccd\",\"useBinaryTransfer\":\"dataxybn\",\"disableChunking\":\"datahmpmeglolpot\",\"maxConcurrentConnections\":\"datamb\",\"disableMetricsCollection\":\"dataqjrytymfnojjh\",\"\":{\"y\":\"datathjqgovvivlr\",\"wyt\":\"datarafet\",\"luolgspyqsapnh\":\"datavpiilgy\"}}") + "{\"type\":\"lewlwbxufq\",\"recursive\":\"dataccvxqbxgq\",\"wildcardFolderPath\":\"datawnriwxe\",\"wildcardFileName\":\"databv\",\"enablePartitionDiscovery\":\"dataldi\",\"partitionRootPath\":\"dataxsvzwbktalobxl\",\"deleteFilesAfterCompletion\":\"datajthmibqgld\",\"fileListPath\":\"datatkalp\",\"useBinaryTransfer\":\"datanny\",\"disableChunking\":\"datajea\",\"maxConcurrentConnections\":\"datakkvij\",\"disableMetricsCollection\":\"dataf\",\"\":{\"aqoaopzqpf\":\"datadzowdqvqfl\",\"ee\":\"datanjdyoxform\"}}") .toObject(FtpReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FtpReadSettings model = new FtpReadSettings().withMaxConcurrentConnections("datamb") - .withDisableMetricsCollection("dataqjrytymfnojjh").withRecursive("datadqgy") - .withWildcardFolderPath("dataulzguvckpdp").withWildcardFileName("datanrjqskikqd") - .withEnablePartitionDiscovery("dataybqtlvofjjsetiz").withPartitionRootPath("datanadn") - .withDeleteFilesAfterCompletion("datasbpxlserqgxnh").withFileListPath("dataccd") - .withUseBinaryTransfer("dataxybn").withDisableChunking("datahmpmeglolpot"); + FtpReadSettings model = new FtpReadSettings().withMaxConcurrentConnections("datakkvij") + .withDisableMetricsCollection("dataf") + .withRecursive("dataccvxqbxgq") + .withWildcardFolderPath("datawnriwxe") + .withWildcardFileName("databv") + .withEnablePartitionDiscovery("dataldi") + .withPartitionRootPath("dataxsvzwbktalobxl") + .withDeleteFilesAfterCompletion("datajthmibqgld") + .withFileListPath("datatkalp") + .withUseBinaryTransfer("datanny") + .withDisableChunking("datajea"); model = BinaryData.fromObject(model).toObject(FtpReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpServerLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpServerLocationTests.java index 11d8c3eaa6ca7..1a2a2e13de3f1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpServerLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpServerLocationTests.java @@ -11,13 +11,13 @@ public final class FtpServerLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { FtpServerLocation model = BinaryData.fromString( - "{\"type\":\"FtpServerLocation\",\"folderPath\":\"dataxfjwp\",\"fileName\":\"dataktpmbmxb\",\"\":{\"hxsdplaumydmhwe\":\"datawgzzxljb\",\"xydgtokvqbvwg\":\"datajf\"}}") + "{\"type\":\"ymgqbgcx\",\"folderPath\":\"dataqxgzxler\",\"fileName\":\"datamerghski\",\"\":{\"jqnvtsdyd\":\"datapupkcbkfukd\",\"yuf\":\"datahkpafyaloowwzizz\",\"ti\":\"dataqz\"}}") .toObject(FtpServerLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - FtpServerLocation model = new FtpServerLocation().withFolderPath("dataxfjwp").withFileName("dataktpmbmxb"); + FtpServerLocation model = new FtpServerLocation().withFolderPath("dataqxgzxler").withFileName("datamerghski"); model = BinaryData.fromObject(model).toObject(FtpServerLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GenericDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GenericDatasetTypePropertiesTests.java index d421b52f45ef0..5c024af5871c2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GenericDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GenericDatasetTypePropertiesTests.java @@ -10,13 +10,13 @@ public final class GenericDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - GenericDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datafzvlqquyhbce\"}") - .toObject(GenericDatasetTypeProperties.class); + GenericDatasetTypeProperties model + = BinaryData.fromString("{\"tableName\":\"datalioklsuffp\"}").toObject(GenericDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GenericDatasetTypeProperties model = new GenericDatasetTypeProperties().withTableName("datafzvlqquyhbce"); + GenericDatasetTypeProperties model = new GenericDatasetTypeProperties().withTableName("datalioklsuffp"); model = BinaryData.fromObject(model).toObject(GenericDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetDataFactoryOperationStatusResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetDataFactoryOperationStatusResponseTests.java index 3820826ec9ee8..26337b58492cc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetDataFactoryOperationStatusResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetDataFactoryOperationStatusResponseTests.java @@ -13,18 +13,18 @@ public final class GetDataFactoryOperationStatusResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - GetDataFactoryOperationStatusResponse model - = BinaryData.fromString("{\"status\":\"btyi\",\"\":{\"fqjpnqno\":\"datavpi\"}}") - .toObject(GetDataFactoryOperationStatusResponse.class); - Assertions.assertEquals("btyi", model.status()); + GetDataFactoryOperationStatusResponse model = BinaryData.fromString( + "{\"status\":\"br\",\"\":{\"pglaoh\":\"datamdyfoebojtj\",\"jtnqjil\":\"dataqk\",\"ilm\":\"dataywkdcwmqsy\"}}") + .toObject(GetDataFactoryOperationStatusResponse.class); + Assertions.assertEquals("br", model.status()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { GetDataFactoryOperationStatusResponse model - = new GetDataFactoryOperationStatusResponse().withStatus("btyi").withAdditionalProperties(mapOf()); + = new GetDataFactoryOperationStatusResponse().withStatus("br").withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(GetDataFactoryOperationStatusResponse.class); - Assertions.assertEquals("btyi", model.status()); + Assertions.assertEquals("br", model.status()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTests.java index 02c4de75ece18..cb8d96f8d0d2a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTests.java @@ -25,54 +25,73 @@ public final class GetMetadataActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GetMetadataActivity model = BinaryData.fromString( - "{\"type\":\"GetMetadata\",\"typeProperties\":{\"dataset\":{\"referenceName\":\"qwvwfombcgr\",\"parameters\":{\"qtydfyctkr\":\"datanrco\",\"wrhoma\":\"dataagxzmrxxmgzslnnc\"}},\"fieldList\":[\"dataiwupooneoqyetfxy\"],\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"dataqvkdpnqcup\",\"disableMetricsCollection\":\"datar\",\"\":{\"vsqmzee\":\"datafekcue\"}},\"formatSettings\":{\"type\":\"FormatReadSettings\",\"\":{\"y\":\"datav\"}}},\"linkedServiceName\":{\"referenceName\":\"ifnmccfgkb\",\"parameters\":{\"sdcmgmvatnf\":\"datamnam\",\"vaap\":\"datahyrh\",\"uj\":\"datagyyufhcfeggy\"}},\"policy\":{\"timeout\":\"datavazqsbrqspvl\",\"retry\":\"dataxvuju\",\"retryIntervalInSeconds\":1606585230,\"secureInput\":false,\"secureOutput\":false,\"\":{\"ayoaskullqweo\":\"datainjc\",\"ngymbzawdwtzx\":\"datab\",\"pwvhiaxkm\":\"databqzplzyjktc\",\"fhlwgka\":\"dataitczuscqobujfx\"}},\"name\":\"xp\",\"description\":\"mbdhccmjo\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"swfbqycubm\",\"dependencyConditions\":[\"Completed\",\"Completed\"],\"\":{\"lkxwchslbiptsf\":\"dataewd\",\"unjegomegma\":\"datacwaobuimfda\"}}],\"userProperties\":[{\"name\":\"eablknqnqqcgi\",\"value\":\"dataffdeogm\"}],\"\":{\"po\":\"dataopjlgtcswqxeva\",\"gn\":\"datamxtcnmocskpgn\",\"dezm\":\"dataguqfnhmmvedj\"}}") + "{\"type\":\"cnsxhp\",\"typeProperties\":{\"dataset\":{\"referenceName\":\"kvo\",\"parameters\":{\"tokwembonureklg\":\"datarphvmezdfad\",\"uwujxx\":\"datanpajwgwxctdp\",\"bzisqpstxulnn\":\"datamookh\",\"vttdyviflt\":\"datajiucnonia\"}},\"fieldList\":[\"databzfyllkunwinqyw\",\"datavx\"],\"storeSettings\":{\"type\":\"tjecerqh\",\"maxConcurrentConnections\":\"dataqkxjlyjlk\",\"disableMetricsCollection\":\"datamugyayhpdstlsd\",\"\":{\"zxgwkkyka\":\"datageeqcgunsoikevrg\",\"aqxnkdqsy\":\"datap\",\"wkwzxjez\":\"datamrktwkfgv\",\"xduhydxahjudaz\":\"datastirrhbkzzqwikq\"}},\"formatSettings\":{\"type\":\"sxolwofof\",\"\":{\"fxwlwh\":\"dataudf\",\"gacc\":\"datapykfcccau\",\"vgemblntdynp\":\"datajqupjxdbgmgx\"}}},\"linkedServiceName\":{\"referenceName\":\"scsrw\",\"parameters\":{\"gawenwtmvzzsb\":\"datateusuxvli\",\"td\":\"datanvwgizv\",\"kz\":\"datajo\"}},\"policy\":{\"timeout\":\"dataphfppjzmpxam\",\"retry\":\"datadostvxtk\",\"retryIntervalInSeconds\":690485643,\"secureInput\":false,\"secureOutput\":true,\"\":{\"dussqfzer\":\"datakkooc\",\"umczlknfwslvsp\":\"datapaivkgdrqkvnp\",\"xmvl\":\"datarvhzfynbxwz\",\"zxnwxsjygigepfok\":\"datauvbesra\"}},\"name\":\"eyzzydpv\",\"description\":\"chdjarfdfnqfvr\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"bvbdqmjcedfpub\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Failed\",\"Skipped\"],\"\":{\"borj\":\"datamlscvhrawy\",\"fqzud\":\"dataxhkdyhdk\",\"g\":\"dataqfde\"}},{\"activity\":\"tplpgf\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Succeeded\"],\"\":{\"ekkxlibs\":\"datab\",\"ujvv\":\"datacvceglvz\",\"lazaoy\":\"dataeadp\",\"sspw\":\"datakubmvxnumvorosq\"}},{\"activity\":\"tkjrqs\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"lumqmo\":\"datazbyu\",\"vlunyqe\":\"dataovpsflmwduis\"}}],\"userProperties\":[{\"name\":\"erzthcfnrle\",\"value\":\"dataghhcf\"},{\"name\":\"zmjmfl\",\"value\":\"datazz\"},{\"name\":\"kmtrrc\",\"value\":\"dataulvauxkgklqucxew\"},{\"name\":\"dprqjsmh\",\"value\":\"dataqzvarq\"}],\"\":{\"vnvxhxzabxhm\":\"datadllhi\",\"rt\":\"dataorxbuaprksoeqi\"}}") .toObject(GetMetadataActivity.class); - Assertions.assertEquals("xp", model.name()); - Assertions.assertEquals("mbdhccmjo", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("swfbqycubm", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("eablknqnqqcgi", model.userProperties().get(0).name()); - Assertions.assertEquals("ifnmccfgkb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1606585230, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("eyzzydpv", model.name()); + Assertions.assertEquals("chdjarfdfnqfvr", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("bvbdqmjcedfpub", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("erzthcfnrle", model.userProperties().get(0).name()); + Assertions.assertEquals("scsrw", model.linkedServiceName().referenceName()); + Assertions.assertEquals(690485643, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("qwvwfombcgr", model.dataset().referenceName()); + Assertions.assertEquals(true, model.policy().secureOutput()); + Assertions.assertEquals("kvo", model.dataset().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GetMetadataActivity model = new GetMetadataActivity().withName("xp").withDescription("mbdhccmjo") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("swfbqycubm") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("eablknqnqqcgi").withValue("dataffdeogm"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ifnmccfgkb") - .withParameters(mapOf("sdcmgmvatnf", "datamnam", "vaap", "datahyrh", "uj", "datagyyufhcfeggy"))) - .withPolicy(new ActivityPolicy().withTimeout("datavazqsbrqspvl").withRetry("dataxvuju") - .withRetryIntervalInSeconds(1606585230).withSecureInput(false).withSecureOutput(false) + GetMetadataActivity model = new GetMetadataActivity().withName("eyzzydpv") + .withDescription("chdjarfdfnqfvr") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("bvbdqmjcedfpub") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED, + DependencyCondition.FAILED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("tplpgf") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, + DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("tkjrqs") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("erzthcfnrle").withValue("dataghhcf"), + new UserProperty().withName("zmjmfl").withValue("datazz"), + new UserProperty().withName("kmtrrc").withValue("dataulvauxkgklqucxew"), + new UserProperty().withName("dprqjsmh").withValue("dataqzvarq"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("scsrw") + .withParameters(mapOf("gawenwtmvzzsb", "datateusuxvli", "td", "datanvwgizv", "kz", "datajo"))) + .withPolicy(new ActivityPolicy().withTimeout("dataphfppjzmpxam") + .withRetry("datadostvxtk") + .withRetryIntervalInSeconds(690485643) + .withSecureInput(false) + .withSecureOutput(true) .withAdditionalProperties(mapOf())) - .withDataset(new DatasetReference().withReferenceName("qwvwfombcgr") - .withParameters(mapOf("qtydfyctkr", "datanrco", "wrhoma", "dataagxzmrxxmgzslnnc"))) - .withFieldList(Arrays.asList("dataiwupooneoqyetfxy")) - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataqvkdpnqcup") - .withDisableMetricsCollection("datar").withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withFormatSettings(new FormatReadSettings().withAdditionalProperties(mapOf("type", "FormatReadSettings"))); + .withDataset(new DatasetReference().withReferenceName("kvo") + .withParameters(mapOf("tokwembonureklg", "datarphvmezdfad", "uwujxx", "datanpajwgwxctdp", + "bzisqpstxulnn", "datamookh", "vttdyviflt", "datajiucnonia"))) + .withFieldList(Arrays.asList("databzfyllkunwinqyw", "datavx")) + .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataqkxjlyjlk") + .withDisableMetricsCollection("datamugyayhpdstlsd") + .withAdditionalProperties(mapOf("type", "tjecerqh"))) + .withFormatSettings(new FormatReadSettings().withAdditionalProperties(mapOf("type", "sxolwofof"))); model = BinaryData.fromObject(model).toObject(GetMetadataActivity.class); - Assertions.assertEquals("xp", model.name()); - Assertions.assertEquals("mbdhccmjo", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("swfbqycubm", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("eablknqnqqcgi", model.userProperties().get(0).name()); - Assertions.assertEquals("ifnmccfgkb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1606585230, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("eyzzydpv", model.name()); + Assertions.assertEquals("chdjarfdfnqfvr", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("bvbdqmjcedfpub", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("erzthcfnrle", model.userProperties().get(0).name()); + Assertions.assertEquals("scsrw", model.linkedServiceName().referenceName()); + Assertions.assertEquals(690485643, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("qwvwfombcgr", model.dataset().referenceName()); + Assertions.assertEquals(true, model.policy().secureOutput()); + Assertions.assertEquals("kvo", model.dataset().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTypePropertiesTests.java index cddb6d5a4b991..5c4d92ee5f87a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTypePropertiesTests.java @@ -18,23 +18,24 @@ public final class GetMetadataActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GetMetadataActivityTypeProperties model = BinaryData.fromString( - "{\"dataset\":{\"referenceName\":\"pbezlucxbuda\",\"parameters\":{\"i\":\"datamvvbwrunrgmyv\",\"xoahfvkyhfdth\":\"datavxlhfmkl\"}},\"fieldList\":[\"datafzxseqscoy\",\"dataxbaw\",\"dataisbhkeskgnj\",\"dataavoqcyl\"],\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datadmrqra\",\"disableMetricsCollection\":\"dataxwzewpngcocboh\",\"\":{\"vsugentrlzbwtivg\":\"dataob\",\"kmwyikoanep\":\"datacrrbswbxizmxvd\"}},\"formatSettings\":{\"type\":\"FormatReadSettings\",\"\":{\"ajonjdhbqw\":\"datavnbzgl\",\"f\":\"dataugsgp\",\"xwmjlmosqh\":\"dataaykzwij\"}}}") + "{\"dataset\":{\"referenceName\":\"badrcy\",\"parameters\":{\"hghorgji\":\"datajaktgtwvzp\",\"bqdsuaazkouvvgcw\":\"dataragqcwcdbtopuyi\",\"gaofwo\":\"dataimhjbxwr\",\"xp\":\"dataz\"}},\"fieldList\":[\"datax\",\"datam\"],\"storeSettings\":{\"type\":\"fsh\",\"maxConcurrentConnections\":\"dataxjhquzi\",\"disableMetricsCollection\":\"datacgh\",\"\":{\"hryuzteuegrdit\":\"dataf\",\"ajggmmiwoisql\":\"dataptpq\"}},\"formatSettings\":{\"type\":\"fycnpovnjzaaox\",\"\":{\"qpzdxw\":\"datatoihoyvuaxfjuzgs\",\"oggzppufu\":\"dataanlgczvfbzzsce\"}}}") .toObject(GetMetadataActivityTypeProperties.class); - Assertions.assertEquals("pbezlucxbuda", model.dataset().referenceName()); + Assertions.assertEquals("badrcy", model.dataset().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { GetMetadataActivityTypeProperties model = new GetMetadataActivityTypeProperties() - .withDataset(new DatasetReference().withReferenceName("pbezlucxbuda") - .withParameters(mapOf("i", "datamvvbwrunrgmyv", "xoahfvkyhfdth", "datavxlhfmkl"))) - .withFieldList(Arrays.asList("datafzxseqscoy", "dataxbaw", "dataisbhkeskgnj", "dataavoqcyl")) - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datadmrqra") - .withDisableMetricsCollection("dataxwzewpngcocboh") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withFormatSettings(new FormatReadSettings().withAdditionalProperties(mapOf("type", "FormatReadSettings"))); + .withDataset(new DatasetReference().withReferenceName("badrcy") + .withParameters(mapOf("hghorgji", "datajaktgtwvzp", "bqdsuaazkouvvgcw", "dataragqcwcdbtopuyi", "gaofwo", + "dataimhjbxwr", "xp", "dataz"))) + .withFieldList(Arrays.asList("datax", "datam")) + .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataxjhquzi") + .withDisableMetricsCollection("datacgh") + .withAdditionalProperties(mapOf("type", "fsh"))) + .withFormatSettings(new FormatReadSettings().withAdditionalProperties(mapOf("type", "fycnpovnjzaaox"))); model = BinaryData.fromObject(model).toObject(GetMetadataActivityTypeProperties.class); - Assertions.assertEquals("pbezlucxbuda", model.dataset().referenceName()); + Assertions.assertEquals("badrcy", model.dataset().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetSsisObjectMetadataRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetSsisObjectMetadataRequestTests.java index f13cc9b45f109..0da1b89798f4e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetSsisObjectMetadataRequestTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetSsisObjectMetadataRequestTests.java @@ -11,15 +11,15 @@ public final class GetSsisObjectMetadataRequestTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - GetSsisObjectMetadataRequest model = BinaryData.fromString("{\"metadataPath\":\"jriplrbpbewtghf\"}") - .toObject(GetSsisObjectMetadataRequest.class); - Assertions.assertEquals("jriplrbpbewtghf", model.metadataPath()); + GetSsisObjectMetadataRequest model + = BinaryData.fromString("{\"metadataPath\":\"wgxhn\"}").toObject(GetSsisObjectMetadataRequest.class); + Assertions.assertEquals("wgxhn", model.metadataPath()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GetSsisObjectMetadataRequest model = new GetSsisObjectMetadataRequest().withMetadataPath("jriplrbpbewtghf"); + GetSsisObjectMetadataRequest model = new GetSsisObjectMetadataRequest().withMetadataPath("wgxhn"); model = BinaryData.fromObject(model).toObject(GetSsisObjectMetadataRequest.class); - Assertions.assertEquals("jriplrbpbewtghf", model.metadataPath()); + Assertions.assertEquals("wgxhn", model.metadataPath()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterListResponseTests.java index 588ecb61fcf9c..ae6f964c310d7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterListResponseTests.java @@ -18,28 +18,40 @@ public final class GlobalParameterListResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GlobalParameterListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"mg\":{\"type\":\"String\",\"value\":\"datayrrueqth\"},\"cbbxigdhxi\":{\"type\":\"String\",\"value\":\"datab\"}},\"name\":\"lopedbwdpyqyyb\",\"type\":\"bmdnafcbqwre\",\"etag\":\"ela\",\"id\":\"cigeleohdbvqvw\"}],\"nextLink\":\"jopwbeonrlkwz\"}") + "{\"value\":[{\"properties\":{\"urjtumghi\":{\"type\":\"Float\",\"value\":\"dataafbwqroohtuovmao\"},\"mslclblyjxlt\":{\"type\":\"Float\",\"value\":\"datave\"}},\"name\":\"juscvsfx\",\"type\":\"ctmgxuupbezqccy\",\"etag\":\"tce\",\"id\":\"d\"},{\"properties\":{\"zgwldoychillcec\":{\"type\":\"Object\",\"value\":\"datayihztgeqmg\"},\"llizs\":{\"type\":\"Array\",\"value\":\"datahuwaoaguhic\"}},\"name\":\"csjvhrwef\",\"type\":\"wqejpmvsse\",\"etag\":\"epwamcxtcz\",\"id\":\"peuknijd\"},{\"properties\":{\"ydjfb\":{\"type\":\"Object\",\"value\":\"dataes\"}},\"name\":\"yv\",\"type\":\"ulrtywikdmh\",\"etag\":\"kuflgbh\",\"id\":\"uacdixmxuf\"}],\"nextLink\":\"ryjqgdkf\"}") .toObject(GlobalParameterListResponse.class); - Assertions.assertEquals("cigeleohdbvqvw", model.value().get(0).id()); - Assertions.assertEquals(GlobalParameterType.STRING, model.value().get(0).properties().get("mg").type()); - Assertions.assertEquals("jopwbeonrlkwz", model.nextLink()); + Assertions.assertEquals("d", model.value().get(0).id()); + Assertions.assertEquals(GlobalParameterType.FLOAT, model.value().get(0).properties().get("urjtumghi").type()); + Assertions.assertEquals("ryjqgdkf", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GlobalParameterListResponse model - = new GlobalParameterListResponse() - .withValue(Arrays.asList(new GlobalParameterResourceInner().withId("cigeleohdbvqvw") - .withProperties(mapOf("mg", - new GlobalParameterSpecification().withType(GlobalParameterType.STRING) - .withValue("datayrrueqth"), - "cbbxigdhxi", - new GlobalParameterSpecification().withType(GlobalParameterType.STRING).withValue("datab"))))) - .withNextLink("jopwbeonrlkwz"); + GlobalParameterListResponse model = new GlobalParameterListResponse() + .withValue(Arrays.asList( + new GlobalParameterResourceInner().withId("d") + .withProperties(mapOf( + "urjtumghi", + new GlobalParameterSpecification() + .withType(GlobalParameterType.FLOAT) + .withValue("dataafbwqroohtuovmao"), + "mslclblyjxlt", + new GlobalParameterSpecification().withType(GlobalParameterType.FLOAT).withValue("datave"))), + new GlobalParameterResourceInner().withId("peuknijd") + .withProperties(mapOf("zgwldoychillcec", + new GlobalParameterSpecification().withType(GlobalParameterType.OBJECT) + .withValue("datayihztgeqmg"), + "llizs", + new GlobalParameterSpecification().withType(GlobalParameterType.ARRAY) + .withValue("datahuwaoaguhic"))), + new GlobalParameterResourceInner().withId("uacdixmxuf") + .withProperties(mapOf("ydjfb", + new GlobalParameterSpecification().withType(GlobalParameterType.OBJECT).withValue("dataes"))))) + .withNextLink("ryjqgdkf"); model = BinaryData.fromObject(model).toObject(GlobalParameterListResponse.class); - Assertions.assertEquals("cigeleohdbvqvw", model.value().get(0).id()); - Assertions.assertEquals(GlobalParameterType.STRING, model.value().get(0).properties().get("mg").type()); - Assertions.assertEquals("jopwbeonrlkwz", model.nextLink()); + Assertions.assertEquals("d", model.value().get(0).id()); + Assertions.assertEquals(GlobalParameterType.FLOAT, model.value().get(0).properties().get("urjtumghi").type()); + Assertions.assertEquals("ryjqgdkf", model.nextLink()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterResourceInnerTests.java index ca3175b938df1..13d10dcd997dc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterResourceInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterResourceInnerTests.java @@ -16,24 +16,20 @@ public final class GlobalParameterResourceInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GlobalParameterResourceInner model = BinaryData.fromString( - "{\"properties\":{\"xcptsoqfyiaseqc\":{\"type\":\"Object\",\"value\":\"databxcea\"},\"mvanbwzo\":{\"type\":\"Array\",\"value\":\"datarttzrazisgykiu\"},\"mdptys\":{\"type\":\"String\",\"value\":\"datanrxxbsojklin\"}},\"name\":\"qsgnzxojpsl\",\"type\":\"jgpliuf\",\"etag\":\"woyxqvapcohhou\",\"id\":\"pqojxcx\"}") + "{\"properties\":{\"vjhvefgwbm\":{\"type\":\"Bool\",\"value\":\"dataoeoq\"}},\"name\":\"c\",\"type\":\"tasfaymxbulpzeal\",\"etag\":\"qkyojwyvf\",\"id\":\"btsuahxs\"}") .toObject(GlobalParameterResourceInner.class); - Assertions.assertEquals("pqojxcx", model.id()); - Assertions.assertEquals(GlobalParameterType.OBJECT, model.properties().get("xcptsoqfyiaseqc").type()); + Assertions.assertEquals("btsuahxs", model.id()); + Assertions.assertEquals(GlobalParameterType.BOOL, model.properties().get("vjhvefgwbm").type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GlobalParameterResourceInner model = new GlobalParameterResourceInner().withId("pqojxcx") - .withProperties(mapOf("xcptsoqfyiaseqc", - new GlobalParameterSpecification().withType(GlobalParameterType.OBJECT).withValue("databxcea"), - "mvanbwzo", - new GlobalParameterSpecification().withType(GlobalParameterType.ARRAY).withValue("datarttzrazisgykiu"), - "mdptys", - new GlobalParameterSpecification().withType(GlobalParameterType.STRING).withValue("datanrxxbsojklin"))); + GlobalParameterResourceInner model = new GlobalParameterResourceInner().withId("btsuahxs") + .withProperties(mapOf("vjhvefgwbm", + new GlobalParameterSpecification().withType(GlobalParameterType.BOOL).withValue("dataoeoq"))); model = BinaryData.fromObject(model).toObject(GlobalParameterResourceInner.class); - Assertions.assertEquals("pqojxcx", model.id()); - Assertions.assertEquals(GlobalParameterType.OBJECT, model.properties().get("xcptsoqfyiaseqc").type()); + Assertions.assertEquals("btsuahxs", model.id()); + Assertions.assertEquals(GlobalParameterType.BOOL, model.properties().get("vjhvefgwbm").type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterSpecificationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterSpecificationTests.java index 4c2a63edd827a..e456960436565 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterSpecificationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterSpecificationTests.java @@ -12,16 +12,17 @@ public final class GlobalParameterSpecificationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - GlobalParameterSpecification model = BinaryData.fromString("{\"type\":\"String\",\"value\":\"datarm\"}") - .toObject(GlobalParameterSpecification.class); - Assertions.assertEquals(GlobalParameterType.STRING, model.type()); + GlobalParameterSpecification model + = BinaryData.fromString("{\"type\":\"Object\",\"value\":\"datajiwkuofoskghsau\"}") + .toObject(GlobalParameterSpecification.class); + Assertions.assertEquals(GlobalParameterType.OBJECT, model.type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { GlobalParameterSpecification model - = new GlobalParameterSpecification().withType(GlobalParameterType.STRING).withValue("datarm"); + = new GlobalParameterSpecification().withType(GlobalParameterType.OBJECT).withValue("datajiwkuofoskghsau"); model = BinaryData.fromObject(model).toObject(GlobalParameterSpecification.class); - Assertions.assertEquals(GlobalParameterType.STRING, model.type()); + Assertions.assertEquals(GlobalParameterType.OBJECT, model.type()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersCreateOrUpdateWithResponseMockTests.java index fda1463aec51c..ca2e1399081b2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersCreateOrUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersCreateOrUpdateWithResponseMockTests.java @@ -6,63 +6,46 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.GlobalParameterResource; import com.azure.resourcemanager.datafactory.models.GlobalParameterSpecification; import com.azure.resourcemanager.datafactory.models.GlobalParameterType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.HashMap; import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class GlobalParametersCreateOrUpdateWithResponseMockTests { @Test public void testCreateOrUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"pwvbrtwc\":{\"type\":\"String\",\"value\":\"dataetiydlrjtb\"},\"rkjepdfsg\":{\"type\":\"Int\",\"value\":\"datamjgjlshshva\"},\"bgslllcwf\":{\"type\":\"Int\",\"value\":\"datarfltgbbxghxaqdsi\"}},\"name\":\"bqqu\",\"type\":\"txmg\",\"etag\":\"bcoguerwmljbtytd\",\"id\":\"gpjewqgyex\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - GlobalParameterResource response - = manager.globalParameters().define("fzuxzyrzq").withExistingFactory("wvmmy", "rqlpeyi") - .withProperties(mapOf("pq", - new GlobalParameterSpecification().withType(GlobalParameterType.FLOAT) - .withValue("dataabouerncgvjmk"), - "jhspy", - new GlobalParameterSpecification().withType(GlobalParameterType.FLOAT).withValue("dataolmumz"))) - .create(); - - Assertions.assertEquals("gpjewqgyex", response.id()); - Assertions.assertEquals(GlobalParameterType.STRING, response.properties().get("pwvbrtwc").type()); + = "{\"properties\":{\"cuieuid\":{\"type\":\"String\",\"value\":\"datauhtsnkeelunhjm\"},\"vekcv\":{\"type\":\"Array\",\"value\":\"dataeoh\"},\"ppmejdocwglrknoz\":{\"type\":\"Int\",\"value\":\"dataapwhicuafml\"},\"qcvruafsyjgvty\":{\"type\":\"Array\",\"value\":\"dataiaqtqzimllznw\"}},\"name\":\"pzdwdicauzexzl\",\"type\":\"nfrcjyulmvtej\",\"etag\":\"plavbjujbmw\",\"id\":\"d\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + GlobalParameterResource response = manager.globalParameters() + .define("ehaky") + .withExistingFactory("xjkkdltfdbe", "bllqi") + .withProperties(mapOf("kz", + new GlobalParameterSpecification().withType(GlobalParameterType.STRING).withValue("dataazjuu"), + "wljjcsuruiwopurh", + new GlobalParameterSpecification().withType(GlobalParameterType.ARRAY).withValue("datalko"), "o", + new GlobalParameterSpecification().withType(GlobalParameterType.STRING).withValue("dataxtnjfmhafqt"))) + .create(); + + Assertions.assertEquals("d", response.id()); + Assertions.assertEquals(GlobalParameterType.STRING, response.properties().get("cuieuid").type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersDeleteWithResponseMockTests.java index 641383d3cfb35..aebd7f54525b6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersDeleteWithResponseMockTests.java @@ -6,47 +6,28 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class GlobalParametersDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.globalParameters().deleteWithResponse("hhm", "honnmbaottulka", "wonmizwfuk", - com.azure.core.util.Context.NONE); + manager.globalParameters().deleteWithResponse("vbdpguwzqiy", "jhmau", "fmvy", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersGetWithResponseMockTests.java index befe60cdf857e..871b4c3fc1a7c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersGetWithResponseMockTests.java @@ -6,53 +6,36 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.GlobalParameterResource; import com.azure.resourcemanager.datafactory.models.GlobalParameterType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class GlobalParametersGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"wiautvehpvlm\":{\"type\":\"Bool\",\"value\":\"datablw\"},\"yfyfubtrtaipji\":{\"type\":\"String\",\"value\":\"datanlmzeqhqf\"}},\"name\":\"dowcgqc\",\"type\":\"vqqyxziutqnv\",\"etag\":\"trnwgchvgpun\",\"id\":\"fogccac\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"bdzmk\":{\"type\":\"String\",\"value\":\"dataggcphcu\"},\"kmtjd\":{\"type\":\"String\",\"value\":\"dataydbgyb\"},\"qqxmmzwdplru\":{\"type\":\"String\",\"value\":\"databbrtkeejju\"}},\"name\":\"xzearscwfocvvdse\",\"type\":\"qllpb\",\"etag\":\"cezbfgokeboaa\",\"id\":\"uoqafhvvbqyk\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); GlobalParameterResource response = manager.globalParameters() - .getWithResponse("qpmdojbm", "johu", "uvnbiujt", com.azure.core.util.Context.NONE).getValue(); + .getWithResponse("jbkcyppagvyioixa", "geclaxcgdybi", "mgrbjgmnoczbxekz", com.azure.core.util.Context.NONE) + .getValue(); - Assertions.assertEquals("fogccac", response.id()); - Assertions.assertEquals(GlobalParameterType.BOOL, response.properties().get("wiautvehpvlm").type()); + Assertions.assertEquals("uoqafhvvbqyk", response.id()); + Assertions.assertEquals(GlobalParameterType.STRING, response.properties().get("bdzmk").type()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersListByFactoryMockTests.java index 7c6311a373508..822f7b5a633c1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersListByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersListByFactoryMockTests.java @@ -6,55 +6,37 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.GlobalParameterResource; import com.azure.resourcemanager.datafactory.models.GlobalParameterType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class GlobalParametersListByFactoryMockTests { @Test public void testListByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"njxnoqxgfvgpimtn\":{\"type\":\"Int\",\"value\":\"dataumz\"}},\"name\":\"fjdgfmesmeaoa\",\"type\":\"qbaeoozjncu\",\"etag\":\"rdyeilxktseaahax\",\"id\":\"fprqwopjnrafli\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"properties\":{\"hhatpnl\":{\"type\":\"Object\",\"value\":\"datafcltojppxiqvsu\"}},\"name\":\"ryhxp\",\"type\":\"rnggqhysia\",\"etag\":\"k\",\"id\":\"x\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PagedIterable response - = manager.globalParameters().listByFactory("ofr", "wsavdijbium", com.azure.core.util.Context.NONE); + = manager.globalParameters().listByFactory("omrmxtuw", "rksyunxk", com.azure.core.util.Context.NONE); - Assertions.assertEquals("fprqwopjnrafli", response.iterator().next().id()); - Assertions.assertEquals(GlobalParameterType.INT, - response.iterator().next().properties().get("njxnoqxgfvgpimtn").type()); + Assertions.assertEquals("x", response.iterator().next().id()); + Assertions.assertEquals(GlobalParameterType.OBJECT, + response.iterator().next().properties().get("hhatpnl").type()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsObjectDatasetTests.java index b95206627ca42..bc3970641147f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsObjectDatasetTests.java @@ -19,34 +19,31 @@ public final class GoogleAdWordsObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GoogleAdWordsObjectDataset model = BinaryData.fromString( - "{\"type\":\"GoogleAdWordsObject\",\"typeProperties\":{\"tableName\":\"datapdcbgrufsdbkuxkd\"},\"description\":\"m\",\"structure\":\"dataivxwkscwbshfih\",\"schema\":\"datamsceylaulpue\",\"linkedServiceName\":{\"referenceName\":\"yi\",\"parameters\":{\"xdslspgnndef\":\"datatye\",\"yltaprqtfkmvzrk\":\"datahsbyhwlvsv\"}},\"parameters\":{\"ukkmv\":{\"type\":\"Array\",\"defaultValue\":\"datadwfcuhbgftfv\"},\"hhxlsube\":{\"type\":\"Bool\",\"defaultValue\":\"dataegpdqrjylwqqsem\"},\"wyktdp\":{\"type\":\"Int\",\"defaultValue\":\"databejrd\"},\"jkykqf\":{\"type\":\"Object\",\"defaultValue\":\"dataufifnjwjh\"}},\"annotations\":[\"datacyk\"],\"folder\":{\"name\":\"smkb\"},\"\":{\"ejnoignyd\":\"datarihpjaxhcb\",\"bnmrmhkipjardvdp\":\"datakrnp\",\"pbie\":\"datagwdxmiael\",\"nddvjlpbj\":\"datal\"}}") + "{\"type\":\"qeetsqaclc\",\"typeProperties\":{\"tableName\":\"dataxoibdctjwfeb\"},\"description\":\"rofyyraiai\",\"structure\":\"datakewqwamptld\",\"schema\":\"dataorzljhnxfkffng\",\"linkedServiceName\":{\"referenceName\":\"pilloirm\",\"parameters\":{\"rqmznwwtkuy\":\"databktuqnbcjk\"}},\"parameters\":{\"moyoioxdwff\":{\"type\":\"SecureString\",\"defaultValue\":\"datainlic\"}},\"annotations\":[\"dataqkebc\",\"datarreuec\"],\"folder\":{\"name\":\"d\"},\"\":{\"vkdvcgowrd\":\"datauscolbqvehtekx\",\"yrkwsthpivocf\":\"dataddddwzdlbbcztam\",\"yroswnfqd\":\"dataxhvnodqqzjb\"}}") .toObject(GoogleAdWordsObjectDataset.class); - Assertions.assertEquals("m", model.description()); - Assertions.assertEquals("yi", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("ukkmv").type()); - Assertions.assertEquals("smkb", model.folder().name()); + Assertions.assertEquals("rofyyraiai", model.description()); + Assertions.assertEquals("pilloirm", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("moyoioxdwff").type()); + Assertions.assertEquals("d", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GoogleAdWordsObjectDataset model = new GoogleAdWordsObjectDataset().withDescription("m") - .withStructure("dataivxwkscwbshfih").withSchema("datamsceylaulpue") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("yi") - .withParameters(mapOf("xdslspgnndef", "datatye", "yltaprqtfkmvzrk", "datahsbyhwlvsv"))) - .withParameters(mapOf("ukkmv", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datadwfcuhbgftfv"), - "hhxlsube", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataegpdqrjylwqqsem"), - "wyktdp", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("databejrd"), - "jkykqf", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataufifnjwjh"))) - .withAnnotations(Arrays.asList("datacyk")).withFolder(new DatasetFolder().withName("smkb")) - .withTableName("datapdcbgrufsdbkuxkd"); + GoogleAdWordsObjectDataset model = new GoogleAdWordsObjectDataset().withDescription("rofyyraiai") + .withStructure("datakewqwamptld") + .withSchema("dataorzljhnxfkffng") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("pilloirm") + .withParameters(mapOf("rqmznwwtkuy", "databktuqnbcjk"))) + .withParameters(mapOf("moyoioxdwff", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datainlic"))) + .withAnnotations(Arrays.asList("dataqkebc", "datarreuec")) + .withFolder(new DatasetFolder().withName("d")) + .withTableName("dataxoibdctjwfeb"); model = BinaryData.fromObject(model).toObject(GoogleAdWordsObjectDataset.class); - Assertions.assertEquals("m", model.description()); - Assertions.assertEquals("yi", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("ukkmv").type()); - Assertions.assertEquals("smkb", model.folder().name()); + Assertions.assertEquals("rofyyraiai", model.description()); + Assertions.assertEquals("pilloirm", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("moyoioxdwff").type()); + Assertions.assertEquals("d", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsSourceTests.java index 93226d86fbd08..25ba379a43bad 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsSourceTests.java @@ -11,16 +11,19 @@ public final class GoogleAdWordsSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GoogleAdWordsSource model = BinaryData.fromString( - "{\"type\":\"GoogleAdWordsSource\",\"query\":\"dataozqqwiawbwzyvbui\",\"queryTimeout\":\"dataysatoplqc\",\"additionalColumns\":\"datasrlzwuqkprf\",\"sourceRetryCount\":\"datacowtoqfwbsbkob\",\"sourceRetryWait\":\"datassj\",\"maxConcurrentConnections\":\"datahfcxwrjbrxm\",\"disableMetricsCollection\":\"dataetttul\",\"\":{\"mosiskihf\":\"datajbhespf\"}}") + "{\"type\":\"ty\",\"query\":\"dataese\",\"queryTimeout\":\"dataetackjuwkkva\",\"additionalColumns\":\"datafjuefjbmowqwodmd\",\"sourceRetryCount\":\"datajqwahciapvcsw\",\"sourceRetryWait\":\"datarpcpg\",\"maxConcurrentConnections\":\"datatftaqmrimletjvz\",\"disableMetricsCollection\":\"datafgabiblhzfgl\",\"\":{\"pwri\":\"datagszx\",\"haskuiosl\":\"datakqnbiiandhsjp\",\"rcx\":\"datachcuhvdrfh\"}}") .toObject(GoogleAdWordsSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GoogleAdWordsSource model = new GoogleAdWordsSource().withSourceRetryCount("datacowtoqfwbsbkob") - .withSourceRetryWait("datassj").withMaxConcurrentConnections("datahfcxwrjbrxm") - .withDisableMetricsCollection("dataetttul").withQueryTimeout("dataysatoplqc") - .withAdditionalColumns("datasrlzwuqkprf").withQuery("dataozqqwiawbwzyvbui"); + GoogleAdWordsSource model = new GoogleAdWordsSource().withSourceRetryCount("datajqwahciapvcsw") + .withSourceRetryWait("datarpcpg") + .withMaxConcurrentConnections("datatftaqmrimletjvz") + .withDisableMetricsCollection("datafgabiblhzfgl") + .withQueryTimeout("dataetackjuwkkva") + .withAdditionalColumns("datafjuefjbmowqwodmd") + .withQuery("dataese"); model = BinaryData.fromObject(model).toObject(GoogleAdWordsSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryDatasetTypePropertiesTests.java index 21f8014895ffa..6912ba46325ad 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryDatasetTypePropertiesTests.java @@ -10,15 +10,17 @@ public final class GoogleBigQueryDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - GoogleBigQueryDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"datax\",\"table\":\"dataxlawmvdyqab\",\"dataset\":\"dataopx\"}") - .toObject(GoogleBigQueryDatasetTypeProperties.class); + GoogleBigQueryDatasetTypeProperties model = BinaryData + .fromString("{\"tableName\":\"dataurmrfyyqj\",\"table\":\"dataibzqrsddcuqddld\",\"dataset\":\"datayvfzt\"}") + .toObject(GoogleBigQueryDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GoogleBigQueryDatasetTypeProperties model = new GoogleBigQueryDatasetTypeProperties().withTableName("datax") - .withTable("dataxlawmvdyqab").withDataset("dataopx"); + GoogleBigQueryDatasetTypeProperties model + = new GoogleBigQueryDatasetTypeProperties().withTableName("dataurmrfyyqj") + .withTable("dataibzqrsddcuqddld") + .withDataset("datayvfzt"); model = BinaryData.fromObject(model).toObject(GoogleBigQueryDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryObjectDatasetTests.java index 9829372969862..5b879f7939e36 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryObjectDatasetTests.java @@ -19,32 +19,34 @@ public final class GoogleBigQueryObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GoogleBigQueryObjectDataset model = BinaryData.fromString( - "{\"type\":\"GoogleBigQueryObject\",\"typeProperties\":{\"tableName\":\"datajwxgvtkjct\",\"table\":\"datapeawzzkvfccozv\",\"dataset\":\"datasphtraitrmsukxtu\"},\"description\":\"gcptct\",\"structure\":\"dataoegyc\",\"schema\":\"dataem\",\"linkedServiceName\":{\"referenceName\":\"vrcclclfkfv\",\"parameters\":{\"vpoip\":\"datammw\",\"fzvvpaysqwhzdc\":\"dataylxtebvse\",\"dblk\":\"dataa\"}},\"parameters\":{\"fvxuosqpffapjpj\":{\"type\":\"Array\",\"defaultValue\":\"datapvdfmoqqc\"},\"yjzua\":{\"type\":\"Object\",\"defaultValue\":\"datazzjsnyfo\"},\"r\":{\"type\":\"Bool\",\"defaultValue\":\"dataubeqkitt\"}},\"annotations\":[\"dataxsoangu\",\"datab\",\"datahogsezre\"],\"folder\":{\"name\":\"gpdtyzpx\"},\"\":{\"hskvsdfvhrypezam\":\"datawkejmgemudup\",\"keq\":\"datapkapvnpeukgnmf\",\"rowsh\":\"dataitromlcsvktfp\"}}") + "{\"type\":\"asomafe\",\"typeProperties\":{\"tableName\":\"datab\",\"table\":\"datakpsvok\",\"dataset\":\"dataankxvcptfgfbhn\"},\"description\":\"z\",\"structure\":\"datardxpcpautfzptr\",\"schema\":\"datazytrt\",\"linkedServiceName\":{\"referenceName\":\"f\",\"parameters\":{\"qvp\":\"datadxcyuwen\",\"pmecsmxfzynf\":\"dataawvoqatdjkal\",\"yqkkpgwgss\":\"datam\"}},\"parameters\":{\"vdsqxkjwd\":{\"type\":\"String\",\"defaultValue\":\"datairnbnlq\"}},\"annotations\":[\"datairzyudrq\",\"dataxrxhxmlfouqp\"],\"folder\":{\"name\":\"andbp\"},\"\":{\"jxcqcaczzvwaeztt\":\"datah\",\"qlyyslg\":\"datajqyfy\",\"bdsvkllrzhshhkb\":\"dataf\",\"rgfwhfzh\":\"datahcazkgdjth\"}}") .toObject(GoogleBigQueryObjectDataset.class); - Assertions.assertEquals("gcptct", model.description()); - Assertions.assertEquals("vrcclclfkfv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("fvxuosqpffapjpj").type()); - Assertions.assertEquals("gpdtyzpx", model.folder().name()); + Assertions.assertEquals("z", model.description()); + Assertions.assertEquals("f", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("vdsqxkjwd").type()); + Assertions.assertEquals("andbp", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GoogleBigQueryObjectDataset model = new GoogleBigQueryObjectDataset().withDescription("gcptct") - .withStructure("dataoegyc").withSchema("dataem") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vrcclclfkfv") - .withParameters(mapOf("vpoip", "datammw", "fzvvpaysqwhzdc", "dataylxtebvse", "dblk", "dataa"))) - .withParameters(mapOf("fvxuosqpffapjpj", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datapvdfmoqqc"), "yjzua", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datazzjsnyfo"), "r", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataubeqkitt"))) - .withAnnotations(Arrays.asList("dataxsoangu", "datab", "datahogsezre")) - .withFolder(new DatasetFolder().withName("gpdtyzpx")).withTableName("datajwxgvtkjct") - .withTable("datapeawzzkvfccozv").withDataset("datasphtraitrmsukxtu"); + GoogleBigQueryObjectDataset model = new GoogleBigQueryObjectDataset().withDescription("z") + .withStructure("datardxpcpautfzptr") + .withSchema("datazytrt") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("f") + .withParameters( + mapOf("qvp", "datadxcyuwen", "pmecsmxfzynf", "dataawvoqatdjkal", "yqkkpgwgss", "datam"))) + .withParameters(mapOf("vdsqxkjwd", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datairnbnlq"))) + .withAnnotations(Arrays.asList("datairzyudrq", "dataxrxhxmlfouqp")) + .withFolder(new DatasetFolder().withName("andbp")) + .withTableName("datab") + .withTable("datakpsvok") + .withDataset("dataankxvcptfgfbhn"); model = BinaryData.fromObject(model).toObject(GoogleBigQueryObjectDataset.class); - Assertions.assertEquals("gcptct", model.description()); - Assertions.assertEquals("vrcclclfkfv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("fvxuosqpffapjpj").type()); - Assertions.assertEquals("gpdtyzpx", model.folder().name()); + Assertions.assertEquals("z", model.description()); + Assertions.assertEquals("f", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("vdsqxkjwd").type()); + Assertions.assertEquals("andbp", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQuerySourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQuerySourceTests.java index 14b9053013d76..46f9de08c8814 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQuerySourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQuerySourceTests.java @@ -11,16 +11,19 @@ public final class GoogleBigQuerySourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GoogleBigQuerySource model = BinaryData.fromString( - "{\"type\":\"GoogleBigQuerySource\",\"query\":\"dataanirlydsdmacydqa\",\"queryTimeout\":\"datayvwxubgulyz\",\"additionalColumns\":\"dataasxpprohuabdu\",\"sourceRetryCount\":\"datavsoxnpuapt\",\"sourceRetryWait\":\"datawekiqlscmtcljopi\",\"maxConcurrentConnections\":\"datawxvcfchokkcjjnq\",\"disableMetricsCollection\":\"datajoayaj\",\"\":{\"fbzbxeqzvokfrhfa\":\"datacxjmap\",\"uaxdulv\":\"dataxcgjuc\",\"mksgeqpai\":\"dataefsrxqscdbbwej\",\"eotvnet\":\"datalfscosf\"}}") + "{\"type\":\"xlonz\",\"query\":\"datafo\",\"queryTimeout\":\"datassx\",\"additionalColumns\":\"datahhlhprjcfyvkkgx\",\"sourceRetryCount\":\"datadfs\",\"sourceRetryWait\":\"datagw\",\"maxConcurrentConnections\":\"dataghdn\",\"disableMetricsCollection\":\"dataydhqkariatxhpx\",\"\":{\"oghg\":\"dataa\",\"mcwetx\":\"datagzbzsasg\",\"qbageltffqalmcq\":\"datasgcwadv\",\"slctyvmiz\":\"datampzwwtcwbgmxwpy\"}}") .toObject(GoogleBigQuerySource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GoogleBigQuerySource model = new GoogleBigQuerySource().withSourceRetryCount("datavsoxnpuapt") - .withSourceRetryWait("datawekiqlscmtcljopi").withMaxConcurrentConnections("datawxvcfchokkcjjnq") - .withDisableMetricsCollection("datajoayaj").withQueryTimeout("datayvwxubgulyz") - .withAdditionalColumns("dataasxpprohuabdu").withQuery("dataanirlydsdmacydqa"); + GoogleBigQuerySource model = new GoogleBigQuerySource().withSourceRetryCount("datadfs") + .withSourceRetryWait("datagw") + .withMaxConcurrentConnections("dataghdn") + .withDisableMetricsCollection("dataydhqkariatxhpx") + .withQueryTimeout("datassx") + .withAdditionalColumns("datahhlhprjcfyvkkgx") + .withQuery("datafo"); model = BinaryData.fromObject(model).toObject(GoogleBigQuerySource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2DatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2DatasetTypePropertiesTests.java index f6173a9e25405..a03ffd8f63e0c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2DatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2DatasetTypePropertiesTests.java @@ -11,14 +11,15 @@ public final class GoogleBigQueryV2DatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GoogleBigQueryV2DatasetTypeProperties model - = BinaryData.fromString("{\"table\":\"dataddpwmgw\",\"dataset\":\"dataukfjvqgl\"}") + = BinaryData.fromString("{\"table\":\"datarvpvdrohul\",\"dataset\":\"datakabhvxjuaivxzni\"}") .toObject(GoogleBigQueryV2DatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { GoogleBigQueryV2DatasetTypeProperties model - = new GoogleBigQueryV2DatasetTypeProperties().withTable("dataddpwmgw").withDataset("dataukfjvqgl"); + = new GoogleBigQueryV2DatasetTypeProperties().withTable("datarvpvdrohul") + .withDataset("datakabhvxjuaivxzni"); model = BinaryData.fromObject(model).toObject(GoogleBigQueryV2DatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2ObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2ObjectDatasetTests.java index 92f641c6a880e..f201d76183453 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2ObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2ObjectDatasetTests.java @@ -19,33 +19,32 @@ public final class GoogleBigQueryV2ObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GoogleBigQueryV2ObjectDataset model = BinaryData.fromString( - "{\"type\":\"GoogleBigQueryV2Object\",\"typeProperties\":{\"table\":\"datamllznyjyuw\",\"dataset\":\"datazwgdpvhwiril\"},\"description\":\"qtr\",\"structure\":\"datadoxdegacdedpkw\",\"schema\":\"dataobp\",\"linkedServiceName\":{\"referenceName\":\"dcidpdaq\",\"parameters\":{\"lsaqifepdureeviv\":\"datanddlirqq\"}},\"parameters\":{\"tlfytbltytv\":{\"type\":\"SecureString\",\"defaultValue\":\"dataoklsuffpxesw\"},\"clmowurofo\":{\"type\":\"Array\",\"defaultValue\":\"datagcesfdd\"}},\"annotations\":[\"datacj\"],\"folder\":{\"name\":\"weob\"},\"\":{\"hixcc\":\"datadq\",\"og\":\"datakf\",\"bzd\":\"datayoxmyqzyqepg\",\"vo\":\"dataluokc\"}}") + "{\"type\":\"uzlweo\",\"typeProperties\":{\"table\":\"dataqocy\",\"dataset\":\"datayineuaxpmezit\"},\"description\":\"foafzdypz\",\"structure\":\"datalmndhgwhlbpjuaj\",\"schema\":\"dataxav\",\"linkedServiceName\":{\"referenceName\":\"itnwlyhbujysv\",\"parameters\":{\"btpgnq\":\"datayddbhatm\"}},\"parameters\":{\"pkdn\":{\"type\":\"Array\",\"defaultValue\":\"datarjewihcigaahmdf\"}},\"annotations\":[\"dataz\",\"datatertnzrrwsc\"],\"folder\":{\"name\":\"hdwi\"},\"\":{\"jy\":\"datatol\",\"xsg\":\"datakr\",\"thqwppvihb\":\"dataaknk\"}}") .toObject(GoogleBigQueryV2ObjectDataset.class); - Assertions.assertEquals("qtr", model.description()); - Assertions.assertEquals("dcidpdaq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("tlfytbltytv").type()); - Assertions.assertEquals("weob", model.folder().name()); + Assertions.assertEquals("foafzdypz", model.description()); + Assertions.assertEquals("itnwlyhbujysv", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("pkdn").type()); + Assertions.assertEquals("hdwi", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GoogleBigQueryV2ObjectDataset model - = new GoogleBigQueryV2ObjectDataset().withDescription("qtr").withStructure("datadoxdegacdedpkw") - .withSchema("dataobp") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("dcidpdaq") - .withParameters(mapOf("lsaqifepdureeviv", "datanddlirqq"))) - .withParameters(mapOf("tlfytbltytv", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("dataoklsuffpxesw"), - "clmowurofo", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datagcesfdd"))) - .withAnnotations(Arrays.asList("datacj")).withFolder(new DatasetFolder().withName("weob")) - .withTable("datamllznyjyuw").withDataset("datazwgdpvhwiril"); + GoogleBigQueryV2ObjectDataset model = new GoogleBigQueryV2ObjectDataset().withDescription("foafzdypz") + .withStructure("datalmndhgwhlbpjuaj") + .withSchema("dataxav") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("itnwlyhbujysv") + .withParameters(mapOf("btpgnq", "datayddbhatm"))) + .withParameters(mapOf("pkdn", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datarjewihcigaahmdf"))) + .withAnnotations(Arrays.asList("dataz", "datatertnzrrwsc")) + .withFolder(new DatasetFolder().withName("hdwi")) + .withTable("dataqocy") + .withDataset("datayineuaxpmezit"); model = BinaryData.fromObject(model).toObject(GoogleBigQueryV2ObjectDataset.class); - Assertions.assertEquals("qtr", model.description()); - Assertions.assertEquals("dcidpdaq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("tlfytbltytv").type()); - Assertions.assertEquals("weob", model.folder().name()); + Assertions.assertEquals("foafzdypz", model.description()); + Assertions.assertEquals("itnwlyhbujysv", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("pkdn").type()); + Assertions.assertEquals("hdwi", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2SourceTests.java index b79025c76112b..1c1aa0ee3b31c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2SourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2SourceTests.java @@ -11,16 +11,19 @@ public final class GoogleBigQueryV2SourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GoogleBigQueryV2Source model = BinaryData.fromString( - "{\"type\":\"GoogleBigQueryV2Source\",\"query\":\"datahndfpf\",\"queryTimeout\":\"datafdgf\",\"additionalColumns\":\"dataoeh\",\"sourceRetryCount\":\"datapkssjbw\",\"sourceRetryWait\":\"dataxdgcfcfky\",\"maxConcurrentConnections\":\"datajwxhslrbwwk\",\"disableMetricsCollection\":\"datawodhsodofsxjiky\",\"\":{\"cxdmxhuwldfa\":\"datauhuixqwogg\",\"dkbgsg\":\"datakyft\",\"ayqkg\":\"datapyckmncrutoudjm\"}}") + "{\"type\":\"yfubgnm\",\"query\":\"dataarxtd\",\"queryTimeout\":\"datazsdnfp\",\"additionalColumns\":\"datayksu\",\"sourceRetryCount\":\"datajdaxiohulvp\",\"sourceRetryWait\":\"datavcutwngfdsztmqqu\",\"maxConcurrentConnections\":\"dataokpfyojfwvmsf\",\"disableMetricsCollection\":\"datayxt\",\"\":{\"gsa\":\"dataqqzhqgmoexgny\",\"y\":\"datagheicexdxhxpqkcs\"}}") .toObject(GoogleBigQueryV2Source.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GoogleBigQueryV2Source model - = new GoogleBigQueryV2Source().withSourceRetryCount("datapkssjbw").withSourceRetryWait("dataxdgcfcfky") - .withMaxConcurrentConnections("datajwxhslrbwwk").withDisableMetricsCollection("datawodhsodofsxjiky") - .withQueryTimeout("datafdgf").withAdditionalColumns("dataoeh").withQuery("datahndfpf"); + GoogleBigQueryV2Source model = new GoogleBigQueryV2Source().withSourceRetryCount("datajdaxiohulvp") + .withSourceRetryWait("datavcutwngfdsztmqqu") + .withMaxConcurrentConnections("dataokpfyojfwvmsf") + .withDisableMetricsCollection("datayxt") + .withQueryTimeout("datazsdnfp") + .withAdditionalColumns("datayksu") + .withQuery("dataarxtd"); model = BinaryData.fromObject(model).toObject(GoogleBigQueryV2Source.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageLocationTests.java index d87f6936fe034..3f17da6f0332b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageLocationTests.java @@ -11,14 +11,16 @@ public final class GoogleCloudStorageLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GoogleCloudStorageLocation model = BinaryData.fromString( - "{\"type\":\"GoogleCloudStorageLocation\",\"bucketName\":\"datattnzqsaq\",\"version\":\"databgszplusdek\",\"folderPath\":\"datazzmssgpgv\",\"fileName\":\"datayejidbdqzsqun\",\"\":{\"snmr\":\"dataztlvv\",\"wfkcauxuvavcpf\":\"datakyjtrepw\",\"xlu\":\"datadofuckclb\",\"ngojfsqebuuxjx\":\"datavsolzwil\"}}") + "{\"type\":\"ymcqidkltvdhq\",\"bucketName\":\"datadsbgzan\",\"version\":\"datainm\",\"folderPath\":\"datafbxweiibntojovfn\",\"fileName\":\"datay\",\"\":{\"rhjkehw\":\"dataihaouwudhua\",\"qircamqprlob\":\"dataumo\",\"gelajdyolje\":\"dataugejcvjkjyczcmt\"}}") .toObject(GoogleCloudStorageLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GoogleCloudStorageLocation model = new GoogleCloudStorageLocation().withFolderPath("datazzmssgpgv") - .withFileName("datayejidbdqzsqun").withBucketName("datattnzqsaq").withVersion("databgszplusdek"); + GoogleCloudStorageLocation model = new GoogleCloudStorageLocation().withFolderPath("datafbxweiibntojovfn") + .withFileName("datay") + .withBucketName("datadsbgzan") + .withVersion("datainm"); model = BinaryData.fromObject(model).toObject(GoogleCloudStorageLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageReadSettingsTests.java index 87b5e6049616f..237319737d242 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageReadSettingsTests.java @@ -11,19 +11,25 @@ public final class GoogleCloudStorageReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GoogleCloudStorageReadSettings model = BinaryData.fromString( - "{\"type\":\"GoogleCloudStorageReadSettings\",\"recursive\":\"datatnsnb\",\"wildcardFolderPath\":\"datauv\",\"wildcardFileName\":\"dataodfmort\",\"prefix\":\"datanpbsungnjkkmkzfb\",\"fileListPath\":\"datacgbgzjyr\",\"enablePartitionDiscovery\":\"dataiwhmrhz\",\"partitionRootPath\":\"datavpjydwmaqeytjp\",\"deleteFilesAfterCompletion\":\"datadp\",\"modifiedDatetimeStart\":\"datapdcsvzugiurhgqlv\",\"modifiedDatetimeEnd\":\"datajzscrjtnq\",\"maxConcurrentConnections\":\"datapobjufksddxk\",\"disableMetricsCollection\":\"datawxlylxfpvoylf\",\"\":{\"ime\":\"datarguecbthauivg\"}}") + "{\"type\":\"qlrt\",\"recursive\":\"dataiovhj\",\"wildcardFolderPath\":\"datafol\",\"wildcardFileName\":\"datar\",\"prefix\":\"dataahvoir\",\"fileListPath\":\"dataxqnzss\",\"enablePartitionDiscovery\":\"dataldtqykzmwdoqre\",\"partitionRootPath\":\"datatrnqxi\",\"deleteFilesAfterCompletion\":\"dataozryoxmfrxfxyc\",\"modifiedDatetimeStart\":\"dataalvchfumlf\",\"modifiedDatetimeEnd\":\"datalzxxkokipklfwnhf\",\"maxConcurrentConnections\":\"dataswq\",\"disableMetricsCollection\":\"datavljitbnhglrvlaro\",\"\":{\"abgsdxtwqqukgo\":\"datamucr\",\"ksltunrwxsqvx\":\"datalvjgsk\",\"y\":\"dataupccfwqisou\",\"etctjh\":\"dataxzjehdklvqtmzoci\"}}") .toObject(GoogleCloudStorageReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { GoogleCloudStorageReadSettings model - = new GoogleCloudStorageReadSettings().withMaxConcurrentConnections("datapobjufksddxk") - .withDisableMetricsCollection("datawxlylxfpvoylf").withRecursive("datatnsnb") - .withWildcardFolderPath("datauv").withWildcardFileName("dataodfmort").withPrefix("datanpbsungnjkkmkzfb") - .withFileListPath("datacgbgzjyr").withEnablePartitionDiscovery("dataiwhmrhz") - .withPartitionRootPath("datavpjydwmaqeytjp").withDeleteFilesAfterCompletion("datadp") - .withModifiedDatetimeStart("datapdcsvzugiurhgqlv").withModifiedDatetimeEnd("datajzscrjtnq"); + = new GoogleCloudStorageReadSettings().withMaxConcurrentConnections("dataswq") + .withDisableMetricsCollection("datavljitbnhglrvlaro") + .withRecursive("dataiovhj") + .withWildcardFolderPath("datafol") + .withWildcardFileName("datar") + .withPrefix("dataahvoir") + .withFileListPath("dataxqnzss") + .withEnablePartitionDiscovery("dataldtqykzmwdoqre") + .withPartitionRootPath("datatrnqxi") + .withDeleteFilesAfterCompletion("dataozryoxmfrxfxyc") + .withModifiedDatetimeStart("dataalvchfumlf") + .withModifiedDatetimeEnd("datalzxxkokipklfwnhf"); model = BinaryData.fromObject(model).toObject(GoogleCloudStorageReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumDatasetTypePropertiesTests.java index f468c2437b649..bab4e16429a68 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumDatasetTypePropertiesTests.java @@ -10,15 +10,16 @@ public final class GreenplumDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - GreenplumDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"datab\",\"table\":\"datau\",\"schema\":\"datagtxlzncoqxtvytzq\"}") - .toObject(GreenplumDatasetTypeProperties.class); + GreenplumDatasetTypeProperties model = BinaryData + .fromString("{\"tableName\":\"dataptgvnaqyjukka\",\"table\":\"datanewl\",\"schema\":\"datanopffemi\"}") + .toObject(GreenplumDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GreenplumDatasetTypeProperties model = new GreenplumDatasetTypeProperties().withTableName("datab") - .withTable("datau").withSchema("datagtxlzncoqxtvytzq"); + GreenplumDatasetTypeProperties model = new GreenplumDatasetTypeProperties().withTableName("dataptgvnaqyjukka") + .withTable("datanewl") + .withSchema("datanopffemi"); model = BinaryData.fromObject(model).toObject(GreenplumDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumSourceTests.java index 89f88de5d4281..9ea5b1af8f3cb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumSourceTests.java @@ -11,16 +11,19 @@ public final class GreenplumSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GreenplumSource model = BinaryData.fromString( - "{\"type\":\"GreenplumSource\",\"query\":\"datafn\",\"queryTimeout\":\"dataeyavldovpwrq\",\"additionalColumns\":\"datazokplzliizb\",\"sourceRetryCount\":\"datajumulhfq\",\"sourceRetryWait\":\"datanchah\",\"maxConcurrentConnections\":\"datanrptrqcap\",\"disableMetricsCollection\":\"datafvowzbk\",\"\":{\"qzzkplqmca\":\"datapzdpujywjmo\",\"jgfpqwwugfwpvj\":\"dataseiauveeng\"}}") + "{\"type\":\"yl\",\"query\":\"datawctjhdbi\",\"queryTimeout\":\"datatfaekpxv\",\"additionalColumns\":\"datadrcmtsorwta\",\"sourceRetryCount\":\"datarfvoskwujhskx\",\"sourceRetryWait\":\"datak\",\"maxConcurrentConnections\":\"datasa\",\"disableMetricsCollection\":\"dataf\",\"\":{\"qicsfaqypj\":\"dataaxgtwpzqti\"}}") .toObject(GreenplumSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GreenplumSource model - = new GreenplumSource().withSourceRetryCount("datajumulhfq").withSourceRetryWait("datanchah") - .withMaxConcurrentConnections("datanrptrqcap").withDisableMetricsCollection("datafvowzbk") - .withQueryTimeout("dataeyavldovpwrq").withAdditionalColumns("datazokplzliizb").withQuery("datafn"); + GreenplumSource model = new GreenplumSource().withSourceRetryCount("datarfvoskwujhskx") + .withSourceRetryWait("datak") + .withMaxConcurrentConnections("datasa") + .withDisableMetricsCollection("dataf") + .withQueryTimeout("datatfaekpxv") + .withAdditionalColumns("datadrcmtsorwta") + .withQuery("datawctjhdbi"); model = BinaryData.fromObject(model).toObject(GreenplumSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumTableDatasetTests.java index 92bab8c42fb18..7be63278624e3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumTableDatasetTests.java @@ -19,34 +19,33 @@ public final class GreenplumTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { GreenplumTableDataset model = BinaryData.fromString( - "{\"type\":\"GreenplumTable\",\"typeProperties\":{\"tableName\":\"dataeisvvvgyp\",\"table\":\"dataeovej\",\"schema\":\"dataaleczt\"},\"description\":\"h\",\"structure\":\"datazpuvfs\",\"schema\":\"datagrqefnq\",\"linkedServiceName\":{\"referenceName\":\"oll\",\"parameters\":{\"qieh\":\"datarmuzemb\",\"wnaxoxl\":\"datahjofy\",\"slqcxuthv\":\"datarx\"}},\"parameters\":{\"yju\":{\"type\":\"SecureString\",\"defaultValue\":\"datayhrgmabspmlu\"},\"rbfgqi\":{\"type\":\"Object\",\"defaultValue\":\"datadputo\"},\"r\":{\"type\":\"Bool\",\"defaultValue\":\"datargcuzjmvkr\"},\"ybqjvfio\":{\"type\":\"SecureString\",\"defaultValue\":\"datahgcmljzksqi\"}},\"annotations\":[\"dataaqpvhszopeuku\",\"datadswbsskgq\",\"dataemosq\",\"datafsjbpwjwz\"],\"folder\":{\"name\":\"pdzy\"},\"\":{\"udqhad\":\"datahxfpzc\",\"vl\":\"dataj\"}}") + "{\"type\":\"cltfcieileem\",\"typeProperties\":{\"tableName\":\"databmk\",\"table\":\"datavdlor\",\"schema\":\"datapqdit\"},\"description\":\"kehldopjsxvbbwsg\",\"structure\":\"datakm\",\"schema\":\"datanmdpidipwtgzw\",\"linkedServiceName\":{\"referenceName\":\"zhcmrl\",\"parameters\":{\"tyzavkyjjl\":\"datav\"}},\"parameters\":{\"ythctox\":{\"type\":\"Bool\",\"defaultValue\":\"datajqbngzldvvdkop\"}},\"annotations\":[\"datamqnerwh\"],\"folder\":{\"name\":\"idsssfzsgz\"},\"\":{\"ogdblwjsbaqxaxt\":\"datapejomeqgxhwisp\",\"i\":\"dataxirppbiichlygkv\",\"ni\":\"datawonkrnizdxywabki\"}}") .toObject(GreenplumTableDataset.class); - Assertions.assertEquals("h", model.description()); - Assertions.assertEquals("oll", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("yju").type()); - Assertions.assertEquals("pdzy", model.folder().name()); + Assertions.assertEquals("kehldopjsxvbbwsg", model.description()); + Assertions.assertEquals("zhcmrl", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("ythctox").type()); + Assertions.assertEquals("idsssfzsgz", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - GreenplumTableDataset model = new GreenplumTableDataset().withDescription("h").withStructure("datazpuvfs") - .withSchema("datagrqefnq") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("oll") - .withParameters(mapOf("qieh", "datarmuzemb", "wnaxoxl", "datahjofy", "slqcxuthv", "datarx"))) - .withParameters(mapOf("yju", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datayhrgmabspmlu"), - "rbfgqi", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datadputo"), - "r", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datargcuzjmvkr"), - "ybqjvfio", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datahgcmljzksqi"))) - .withAnnotations(Arrays.asList("dataaqpvhszopeuku", "datadswbsskgq", "dataemosq", "datafsjbpwjwz")) - .withFolder(new DatasetFolder().withName("pdzy")).withTableName("dataeisvvvgyp").withTable("dataeovej") - .withSchemaTypePropertiesSchema("dataaleczt"); + GreenplumTableDataset model = new GreenplumTableDataset().withDescription("kehldopjsxvbbwsg") + .withStructure("datakm") + .withSchema("datanmdpidipwtgzw") + .withLinkedServiceName( + new LinkedServiceReference().withReferenceName("zhcmrl").withParameters(mapOf("tyzavkyjjl", "datav"))) + .withParameters(mapOf("ythctox", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datajqbngzldvvdkop"))) + .withAnnotations(Arrays.asList("datamqnerwh")) + .withFolder(new DatasetFolder().withName("idsssfzsgz")) + .withTableName("databmk") + .withTable("datavdlor") + .withSchemaTypePropertiesSchema("datapqdit"); model = BinaryData.fromObject(model).toObject(GreenplumTableDataset.class); - Assertions.assertEquals("h", model.description()); - Assertions.assertEquals("oll", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("yju").type()); - Assertions.assertEquals("pdzy", model.folder().name()); + Assertions.assertEquals("kehldopjsxvbbwsg", model.description()); + Assertions.assertEquals("zhcmrl", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("ythctox").type()); + Assertions.assertEquals("idsssfzsgz", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseObjectDatasetTests.java index 87348e8c5306f..dfb4050a914a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseObjectDatasetTests.java @@ -19,33 +19,32 @@ public final class HBaseObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HBaseObjectDataset model = BinaryData.fromString( - "{\"type\":\"HBaseObject\",\"typeProperties\":{\"tableName\":\"datadjvzmxyrazzstjvc\"},\"description\":\"bdbrl\",\"structure\":\"datazlty\",\"schema\":\"dataacbibtk\",\"linkedServiceName\":{\"referenceName\":\"iecup\",\"parameters\":{\"krlnrpeylfiiul\":\"datads\",\"hcxwwwvun\":\"datadgiql\",\"atrtcqyfjvifbmo\":\"datansgvxhxrm\",\"bhukdfpknv\":\"datatehqyoytrcoufkq\"}},\"parameters\":{\"hzjlrknckkfxm\":{\"type\":\"Float\",\"defaultValue\":\"datajezchmeo\"},\"fts\":{\"type\":\"Float\",\"defaultValue\":\"dataqkwqphfv\"},\"zgfctuuzow\":{\"type\":\"Bool\",\"defaultValue\":\"datalpxcachdt\"}},\"annotations\":[\"datavuxnx\",\"datauohshzultdbvm\"],\"folder\":{\"name\":\"ypngocbd\"},\"\":{\"zsuzgrzu\":\"dataivptb\",\"aatvogpyceinha\":\"dataekytkzvtv\",\"khwfjudapbq\":\"datahbdxsbypl\"}}") + "{\"type\":\"qz\",\"typeProperties\":{\"tableName\":\"databfcdikqnxydgzf\"},\"description\":\"spa\",\"structure\":\"datavs\",\"schema\":\"dataeronzeafkx\",\"linkedServiceName\":{\"referenceName\":\"muwdbvytqavou\",\"parameters\":{\"iw\":\"datadeuqxlvzpfdkaxg\",\"bmrrmtr\":\"datagopqlktt\",\"rjzwawpewajccs\":\"datagjmpd\"}},\"parameters\":{\"guzprpxhhboigzxk\":{\"type\":\"String\",\"defaultValue\":\"datauijtickzo\"}},\"annotations\":[\"datalrzhtocjzfppexu\",\"dataatzw\",\"datakjwg\"],\"folder\":{\"name\":\"vjcmimbmswskbb\"},\"\":{\"lodaqrbkpo\":\"datayp\",\"valcrqaxlmbrtvtg\":\"dataffsobg\"}}") .toObject(HBaseObjectDataset.class); - Assertions.assertEquals("bdbrl", model.description()); - Assertions.assertEquals("iecup", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("hzjlrknckkfxm").type()); - Assertions.assertEquals("ypngocbd", model.folder().name()); + Assertions.assertEquals("spa", model.description()); + Assertions.assertEquals("muwdbvytqavou", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("guzprpxhhboigzxk").type()); + Assertions.assertEquals("vjcmimbmswskbb", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HBaseObjectDataset model - = new HBaseObjectDataset().withDescription("bdbrl").withStructure("datazlty").withSchema("dataacbibtk") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("iecup") - .withParameters(mapOf("krlnrpeylfiiul", "datads", "hcxwwwvun", "datadgiql", "atrtcqyfjvifbmo", - "datansgvxhxrm", "bhukdfpknv", "datatehqyoytrcoufkq"))) - .withParameters(mapOf("hzjlrknckkfxm", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datajezchmeo"), "fts", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataqkwqphfv"), - "zgfctuuzow", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datalpxcachdt"))) - .withAnnotations(Arrays.asList("datavuxnx", "datauohshzultdbvm")) - .withFolder(new DatasetFolder().withName("ypngocbd")).withTableName("datadjvzmxyrazzstjvc"); + HBaseObjectDataset model = new HBaseObjectDataset().withDescription("spa") + .withStructure("datavs") + .withSchema("dataeronzeafkx") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("muwdbvytqavou") + .withParameters( + mapOf("iw", "datadeuqxlvzpfdkaxg", "bmrrmtr", "datagopqlktt", "rjzwawpewajccs", "datagjmpd"))) + .withParameters(mapOf("guzprpxhhboigzxk", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datauijtickzo"))) + .withAnnotations(Arrays.asList("datalrzhtocjzfppexu", "dataatzw", "datakjwg")) + .withFolder(new DatasetFolder().withName("vjcmimbmswskbb")) + .withTableName("databfcdikqnxydgzf"); model = BinaryData.fromObject(model).toObject(HBaseObjectDataset.class); - Assertions.assertEquals("bdbrl", model.description()); - Assertions.assertEquals("iecup", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("hzjlrknckkfxm").type()); - Assertions.assertEquals("ypngocbd", model.folder().name()); + Assertions.assertEquals("spa", model.description()); + Assertions.assertEquals("muwdbvytqavou", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("guzprpxhhboigzxk").type()); + Assertions.assertEquals("vjcmimbmswskbb", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseSourceTests.java index a5bf1454c380d..38eaeb112c28d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseSourceTests.java @@ -11,15 +11,19 @@ public final class HBaseSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HBaseSource model = BinaryData.fromString( - "{\"type\":\"HBaseSource\",\"query\":\"datawbqaibkyeysf\",\"queryTimeout\":\"datahdydyybztlylh\",\"additionalColumns\":\"datacjq\",\"sourceRetryCount\":\"datacie\",\"sourceRetryWait\":\"datak\",\"maxConcurrentConnections\":\"dataxf\",\"disableMetricsCollection\":\"datahvecjhbttmhneqd\",\"\":{\"kna\":\"dataeyxxidabqla\",\"ljsfcryqrrsjqt\":\"datacseqo\"}}") + "{\"type\":\"pipslcfw\",\"query\":\"datakujwn\",\"queryTimeout\":\"datamipn\",\"additionalColumns\":\"dataedp\",\"sourceRetryCount\":\"datazzqfbo\",\"sourceRetryWait\":\"datafghdg\",\"maxConcurrentConnections\":\"datahncxoqxtjzdpll\",\"disableMetricsCollection\":\"datalvk\",\"\":{\"ttxqxvmybq\":\"dataosoxxoqyikdjaog\",\"lf\":\"datalg\",\"miw\":\"datasdccmdplhzjiqi\",\"punuvfs\":\"datahmzkxrqzgshqx\"}}") .toObject(HBaseSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HBaseSource model = new HBaseSource().withSourceRetryCount("datacie").withSourceRetryWait("datak") - .withMaxConcurrentConnections("dataxf").withDisableMetricsCollection("datahvecjhbttmhneqd") - .withQueryTimeout("datahdydyybztlylh").withAdditionalColumns("datacjq").withQuery("datawbqaibkyeysf"); + HBaseSource model = new HBaseSource().withSourceRetryCount("datazzqfbo") + .withSourceRetryWait("datafghdg") + .withMaxConcurrentConnections("datahncxoqxtjzdpll") + .withDisableMetricsCollection("datalvk") + .withQueryTimeout("datamipn") + .withAdditionalColumns("dataedp") + .withQuery("datakujwn"); model = BinaryData.fromObject(model).toObject(HBaseSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTests.java index 2e7b0587f9685..7522e592190d2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTests.java @@ -23,83 +23,88 @@ public final class HDInsightHiveActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HDInsightHiveActivity model = BinaryData.fromString( - "{\"type\":\"HDInsightHive\",\"typeProperties\":{\"storageLinkedServices\":[{\"referenceName\":\"vwizjraksahwq\",\"parameters\":{\"misnb\":\"dataltfknro\",\"fvqtvukcfesizkn\":\"datacz\",\"xflzhgr\":\"datac\"}},{\"referenceName\":\"hwysdmovbvn\",\"parameters\":{\"w\":\"dataqofdgzlykczoln\"}},{\"referenceName\":\"gg\",\"parameters\":{\"ukfwmhzarrfttx\":\"datavoxnjbyjgobzj\"}}],\"arguments\":[\"datarjg\",\"datahon\"],\"getDebugInfo\":\"Always\",\"scriptPath\":\"dataab\",\"scriptLinkedService\":{\"referenceName\":\"mkfvsolkjowvz\",\"parameters\":{\"c\":\"datahljtlyguothn\"}},\"defines\":{\"dlbahmivtuphwwy\":\"datauaergjtpriicte\",\"fxfteo\":\"dataxo\",\"qap\":\"datanrziwkcpxgjmyou\"},\"variables\":{\"yrsvtrtxggmpohu\":\"datapcdikk\",\"jzklqkgj\":\"dataas\",\"cwnefdehptlnw\":\"datakntknjhywgzi\"},\"queryTimeout\":196553678},\"linkedServiceName\":{\"referenceName\":\"wbneszxnmgeuoih\",\"parameters\":{\"suwct\":\"datafiwpgxyavcb\",\"yr\":\"databhcjfgxtl\",\"k\":\"datayhpbtwzrziv\",\"oldbuyuxgeph\":\"datadcjym\"}},\"policy\":{\"timeout\":\"dataexfbrsdoxhyiya\",\"retry\":\"datax\",\"retryIntervalInSeconds\":1724799013,\"secureInput\":true,\"secureOutput\":true,\"\":{\"p\":\"dataxynkh\"}},\"name\":\"qlhzdbbitpgr\",\"description\":\"pmsdgmxwfodvzpxm\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"e\",\"dependencyConditions\":[\"Failed\",\"Skipped\"],\"\":{\"dgplagwvgbnx\":\"dataqtbasvj\"}},{\"activity\":\"qudnqcbbbhin\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\"],\"\":{\"yzqdnrgnyb\":\"databfzkvrmdoshi\",\"mkhxunq\":\"dataqwjj\",\"ydabhydcqrs\":\"datavqticgsdcpmclk\"}},{\"activity\":\"qwzndzuxlgmy\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Succeeded\",\"Completed\"],\"\":{\"vb\":\"datazmbap\",\"ymcgtbpbf\":\"dataablmpntjlz\",\"jqwlvsefvkxxd\":\"datagf\",\"zm\":\"datagbnqmhr\"}}],\"userProperties\":[{\"name\":\"grflqbug\",\"value\":\"dataudsmdglqrkst\"}],\"\":{\"ficzw\":\"datadasomxwsflylols\",\"zzeqd\":\"datakglmcg\",\"kar\":\"dataxurbj\",\"jhdcolnxw\":\"datavguzvtwfbqxoqnv\"}}") + "{\"type\":\"pibaxkyw\",\"typeProperties\":{\"storageLinkedServices\":[{\"referenceName\":\"wvfv\",\"parameters\":{\"vzdervn\":\"datamcoxob\",\"ggeeagbrs\":\"datafieaqbvgue\",\"ibexaxu\":\"databzcyubqemrxm\"}}],\"arguments\":[\"dataqnwxohbmvga\",\"datad\"],\"getDebugInfo\":\"Failure\",\"scriptPath\":\"datahrnqnurun\",\"scriptLinkedService\":{\"referenceName\":\"uzcpifas\",\"parameters\":{\"ascvcmt\":\"datatiocsfpcyyi\",\"adtyhmoph\":\"dataukboryn\"}},\"defines\":{\"tbqijeqfoatqnhr\":\"datahvnqwdphnc\",\"tiznvijdtmjy\":\"dataxhmtxpxdtmrwjk\"},\"variables\":{\"sliou\":\"datadhwadnccunrviqrz\",\"xqnpnpggbu\":\"dataa\"},\"queryTimeout\":1194274375},\"linkedServiceName\":{\"referenceName\":\"nmuir\",\"parameters\":{\"uuzaczmej\":\"dataztkxfhix\",\"tketw\":\"dataiiegpdhit\",\"mqhzys\":\"datasko\",\"jqevmzhk\":\"datachbvejgfx\"}},\"policy\":{\"timeout\":\"datangdgk\",\"retry\":\"datanxy\",\"retryIntervalInSeconds\":1965812046,\"secureInput\":false,\"secureOutput\":false,\"\":{\"biisnh\":\"datalndlvcbc\",\"uuetmqzuen\":\"dataqqaedgwghqq\",\"mj\":\"datallqvroopk\"}},\"name\":\"spnrcuvlfzd\",\"description\":\"feupacah\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"orabspfinyijm\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Succeeded\"],\"\":{\"dhedmfidro\":\"datalbd\"}},{\"activity\":\"fpucwmdmbys\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Succeeded\",\"Failed\"],\"\":{\"xeusioawrorexi\":\"dataheyxc\",\"tmyc\":\"datawbquppkzuxsbbmxf\",\"ezadkfmpiffgtqhg\":\"dataelyopobgzluukiw\"}},{\"activity\":\"ygzaya\",\"dependencyConditions\":[\"Failed\",\"Succeeded\"],\"\":{\"gbi\":\"dataerxhucxmybuqj\"}},{\"activity\":\"axga\",\"dependencyConditions\":[\"Failed\",\"Failed\"],\"\":{\"dkdcvowaslswwg\":\"datahpvbmbfixcuamd\",\"vhfdezomykjbl\":\"datayvopotiefxhaqq\",\"vmymfaiw\":\"dataypuon\"}}],\"userProperties\":[{\"name\":\"phaddsxsqcvab\",\"value\":\"datazdaroedwipauc\"},{\"name\":\"etypzziav\",\"value\":\"datauskvvnzng\"},{\"name\":\"boqeuez\",\"value\":\"datafldjkkva\"}],\"\":{\"b\":\"datadaejn\"}}") .toObject(HDInsightHiveActivity.class); - Assertions.assertEquals("qlhzdbbitpgr", model.name()); - Assertions.assertEquals("pmsdgmxwfodvzpxm", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("e", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("grflqbug", model.userProperties().get(0).name()); - Assertions.assertEquals("wbneszxnmgeuoih", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1724799013, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("vwizjraksahwq", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("mkfvsolkjowvz", model.scriptLinkedService().referenceName()); - Assertions.assertEquals(196553678, model.queryTimeout()); + Assertions.assertEquals("spnrcuvlfzd", model.name()); + Assertions.assertEquals("feupacah", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("orabspfinyijm", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("phaddsxsqcvab", model.userProperties().get(0).name()); + Assertions.assertEquals("nmuir", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1965812046, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals(false, model.policy().secureOutput()); + Assertions.assertEquals("wvfv", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); + Assertions.assertEquals("uzcpifas", model.scriptLinkedService().referenceName()); + Assertions.assertEquals(1194274375, model.queryTimeout()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HDInsightHiveActivity model - = new HDInsightHiveActivity().withName("qlhzdbbitpgr").withDescription("pmsdgmxwfodvzpxm") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("e") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("qudnqcbbbhin") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("qwzndzuxlgmy") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("grflqbug").withValue("dataudsmdglqrkst"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("wbneszxnmgeuoih") - .withParameters(mapOf("suwct", "datafiwpgxyavcb", "yr", "databhcjfgxtl", "k", "datayhpbtwzrziv", - "oldbuyuxgeph", "datadcjym"))) - .withPolicy(new ActivityPolicy().withTimeout("dataexfbrsdoxhyiya").withRetry("datax") - .withRetryIntervalInSeconds(1724799013).withSecureInput(true).withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withStorageLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("vwizjraksahwq") - .withParameters(mapOf("misnb", "dataltfknro", "fvqtvukcfesizkn", "datacz", "xflzhgr", "datac")), - new LinkedServiceReference().withReferenceName("hwysdmovbvn") - .withParameters(mapOf("w", "dataqofdgzlykczoln")), - new LinkedServiceReference().withReferenceName("gg") - .withParameters(mapOf("ukfwmhzarrfttx", "datavoxnjbyjgobzj")))) - .withArguments(Arrays.asList("datarjg", "datahon")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS).withScriptPath("dataab") - .withScriptLinkedService(new LinkedServiceReference().withReferenceName("mkfvsolkjowvz") - .withParameters(mapOf("c", "datahljtlyguothn"))) - .withDefines( - mapOf("dlbahmivtuphwwy", "datauaergjtpriicte", "fxfteo", "dataxo", "qap", "datanrziwkcpxgjmyou")) - .withVariables( - mapOf("yrsvtrtxggmpohu", "datapcdikk", "jzklqkgj", "dataas", "cwnefdehptlnw", "datakntknjhywgzi")) - .withQueryTimeout(196553678); + HDInsightHiveActivity model = new HDInsightHiveActivity().withName("spnrcuvlfzd") + .withDescription("feupacah") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("orabspfinyijm") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("fpucwmdmbys") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED, + DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("ygzaya") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("axga") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("phaddsxsqcvab").withValue("datazdaroedwipauc"), + new UserProperty().withName("etypzziav").withValue("datauskvvnzng"), + new UserProperty().withName("boqeuez").withValue("datafldjkkva"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("nmuir") + .withParameters(mapOf("uuzaczmej", "dataztkxfhix", "tketw", "dataiiegpdhit", "mqhzys", "datasko", + "jqevmzhk", "datachbvejgfx"))) + .withPolicy(new ActivityPolicy().withTimeout("datangdgk") + .withRetry("datanxy") + .withRetryIntervalInSeconds(1965812046) + .withSecureInput(false) + .withSecureOutput(false) + .withAdditionalProperties(mapOf())) + .withStorageLinkedServices( + Arrays + .asList(new LinkedServiceReference().withReferenceName("wvfv") + .withParameters(mapOf("vzdervn", "datamcoxob", "ggeeagbrs", "datafieaqbvgue", "ibexaxu", + "databzcyubqemrxm")))) + .withArguments(Arrays.asList("dataqnwxohbmvga", "datad")) + .withGetDebugInfo(HDInsightActivityDebugInfoOption.FAILURE) + .withScriptPath("datahrnqnurun") + .withScriptLinkedService(new LinkedServiceReference().withReferenceName("uzcpifas") + .withParameters(mapOf("ascvcmt", "datatiocsfpcyyi", "adtyhmoph", "dataukboryn"))) + .withDefines(mapOf("tbqijeqfoatqnhr", "datahvnqwdphnc", "tiznvijdtmjy", "dataxhmtxpxdtmrwjk")) + .withVariables(mapOf("sliou", "datadhwadnccunrviqrz", "xqnpnpggbu", "dataa")) + .withQueryTimeout(1194274375); model = BinaryData.fromObject(model).toObject(HDInsightHiveActivity.class); - Assertions.assertEquals("qlhzdbbitpgr", model.name()); - Assertions.assertEquals("pmsdgmxwfodvzpxm", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("e", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("grflqbug", model.userProperties().get(0).name()); - Assertions.assertEquals("wbneszxnmgeuoih", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1724799013, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("vwizjraksahwq", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("mkfvsolkjowvz", model.scriptLinkedService().referenceName()); - Assertions.assertEquals(196553678, model.queryTimeout()); + Assertions.assertEquals("spnrcuvlfzd", model.name()); + Assertions.assertEquals("feupacah", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("orabspfinyijm", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("phaddsxsqcvab", model.userProperties().get(0).name()); + Assertions.assertEquals("nmuir", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1965812046, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals(false, model.policy().secureOutput()); + Assertions.assertEquals("wvfv", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); + Assertions.assertEquals("uzcpifas", model.scriptLinkedService().referenceName()); + Assertions.assertEquals(1194274375, model.queryTimeout()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTypePropertiesTests.java index 1fdc644f44f3e..1902aa6d1a15f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTypePropertiesTests.java @@ -17,35 +17,33 @@ public final class HDInsightHiveActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HDInsightHiveActivityTypeProperties model = BinaryData.fromString( - "{\"storageLinkedServices\":[{\"referenceName\":\"iodnntol\",\"parameters\":{\"zvi\":\"dataptngr\",\"cdkomr\":\"dataxacxcac\"}},{\"referenceName\":\"u\",\"parameters\":{\"skyrhsijxm\":\"datajxpdqwywjnxd\"}}],\"arguments\":[\"dataymfxjsuwmbdt\",\"dataetrgzybprrapgh\"],\"getDebugInfo\":\"None\",\"scriptPath\":\"dataqapvnq\",\"scriptLinkedService\":{\"referenceName\":\"qkrsnxuezw\",\"parameters\":{\"rivcxxejpl\":\"dataadtvpgugtmtg\",\"sl\":\"datahkiyiqpiiodvzjk\"}},\"defines\":{\"hjqakacbcbrsnnv\":\"datapopubbwpsncxbkw\"},\"variables\":{\"bis\":\"datagfgtwqmtyfqutmj\",\"dorbufog\":\"dataurilqcsq\",\"auwojgvpqzvtgwlz\":\"datackbizqqajs\",\"br\":\"datacyvrbgi\"},\"queryTimeout\":240651564}") + "{\"storageLinkedServices\":[{\"referenceName\":\"fuqqb\",\"parameters\":{\"kn\":\"databhrspzkv\",\"uwkcnhdecxbi\":\"dataerednoes\",\"dntp\":\"datanfepixfdojxbyxf\",\"asvahbqoojd\":\"datasbtigegwaidqzf\"}}],\"arguments\":[\"dataxjlumrzfdbotn\",\"dataadanlhq\",\"datar\",\"databgiaoxpfkozvc\"],\"getDebugInfo\":\"None\",\"scriptPath\":\"dataurhgu\",\"scriptLinkedService\":{\"referenceName\":\"sspble\",\"parameters\":{\"zkkmrlptdk\":\"datayftklbbribg\",\"rivedshuxlhecz\":\"dataib\",\"bviw\":\"datamwwm\"}},\"defines\":{\"udskcadkyoo\":\"dataeysjglponkrhpyed\"},\"variables\":{\"wtwzufm\":\"datapcjrbfayduzzyxly\"},\"queryTimeout\":1907026033}") .toObject(HDInsightHiveActivityTypeProperties.class); - Assertions.assertEquals("iodnntol", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals("fuqqb", model.storageLinkedServices().get(0).referenceName()); Assertions.assertEquals(HDInsightActivityDebugInfoOption.NONE, model.getDebugInfo()); - Assertions.assertEquals("qkrsnxuezw", model.scriptLinkedService().referenceName()); - Assertions.assertEquals(240651564, model.queryTimeout()); + Assertions.assertEquals("sspble", model.scriptLinkedService().referenceName()); + Assertions.assertEquals(1907026033, model.queryTimeout()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { HDInsightHiveActivityTypeProperties model = new HDInsightHiveActivityTypeProperties() - .withStorageLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("iodnntol") - .withParameters(mapOf("zvi", "dataptngr", "cdkomr", "dataxacxcac")), - new LinkedServiceReference().withReferenceName("u") - .withParameters(mapOf("skyrhsijxm", "datajxpdqwywjnxd")))) - .withArguments(Arrays.asList("dataymfxjsuwmbdt", "dataetrgzybprrapgh")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.NONE).withScriptPath("dataqapvnq") - .withScriptLinkedService(new LinkedServiceReference().withReferenceName("qkrsnxuezw") - .withParameters(mapOf("rivcxxejpl", "dataadtvpgugtmtg", "sl", "datahkiyiqpiiodvzjk"))) - .withDefines(mapOf("hjqakacbcbrsnnv", "datapopubbwpsncxbkw")) - .withVariables(mapOf("bis", "datagfgtwqmtyfqutmj", "dorbufog", "dataurilqcsq", "auwojgvpqzvtgwlz", - "datackbizqqajs", "br", "datacyvrbgi")) - .withQueryTimeout(240651564); + .withStorageLinkedServices(Arrays.asList(new LinkedServiceReference().withReferenceName("fuqqb") + .withParameters(mapOf("kn", "databhrspzkv", "uwkcnhdecxbi", "dataerednoes", "dntp", + "datanfepixfdojxbyxf", "asvahbqoojd", "datasbtigegwaidqzf")))) + .withArguments(Arrays.asList("dataxjlumrzfdbotn", "dataadanlhq", "datar", "databgiaoxpfkozvc")) + .withGetDebugInfo(HDInsightActivityDebugInfoOption.NONE) + .withScriptPath("dataurhgu") + .withScriptLinkedService(new LinkedServiceReference().withReferenceName("sspble") + .withParameters(mapOf("zkkmrlptdk", "datayftklbbribg", "rivedshuxlhecz", "dataib", "bviw", "datamwwm"))) + .withDefines(mapOf("udskcadkyoo", "dataeysjglponkrhpyed")) + .withVariables(mapOf("wtwzufm", "datapcjrbfayduzzyxly")) + .withQueryTimeout(1907026033); model = BinaryData.fromObject(model).toObject(HDInsightHiveActivityTypeProperties.class); - Assertions.assertEquals("iodnntol", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals("fuqqb", model.storageLinkedServices().get(0).referenceName()); Assertions.assertEquals(HDInsightActivityDebugInfoOption.NONE, model.getDebugInfo()); - Assertions.assertEquals("qkrsnxuezw", model.scriptLinkedService().referenceName()); - Assertions.assertEquals(240651564, model.queryTimeout()); + Assertions.assertEquals("sspble", model.scriptLinkedService().referenceName()); + Assertions.assertEquals(1907026033, model.queryTimeout()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTests.java index 92d3b07948bac..b4c24df011e65 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTests.java @@ -23,79 +23,87 @@ public final class HDInsightMapReduceActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HDInsightMapReduceActivity model = BinaryData.fromString( - "{\"type\":\"HDInsightMapReduce\",\"typeProperties\":{\"storageLinkedServices\":[{\"referenceName\":\"fjbmowqwodmdrdty\",\"parameters\":{\"ciapvcsw\":\"dataqwa\",\"aqmrimletjv\":\"datajrpcpgcltf\",\"ab\":\"dataptf\",\"zxupwrizkqnbiia\":\"datablhzfglpswg\"}}],\"arguments\":[\"datasjpq\",\"dataas\"],\"getDebugInfo\":\"Failure\",\"className\":\"dataosltch\",\"jarFilePath\":\"datauhvdrfh\",\"jarLinkedService\":{\"referenceName\":\"cxhnojfdiijch\",\"parameters\":{\"emnkykvruomw\":\"dataabtxr\"}},\"jarLibs\":[\"datatzffpcdqh\",\"datapz\",\"dataeqvkuvy\",\"datai\"],\"defines\":{\"ikstapkbd\":\"dataokolc\",\"mugkugwtg\":\"datay\"}},\"linkedServiceName\":{\"referenceName\":\"twayh\",\"parameters\":{\"eyliisatbs\":\"dataqv\",\"qvernqkeiyy\":\"datahtkdcufzxxqdntvf\"}},\"policy\":{\"timeout\":\"datatjoxwugdzwoczf\",\"retry\":\"datafcmpddzzdw\",\"retryIntervalInSeconds\":2049043657,\"secureInput\":true,\"secureOutput\":false,\"\":{\"lflyluwunsny\":\"datamuyyk\"}},\"name\":\"pmn\",\"description\":\"nbmgpomcrev\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"zveisd\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Completed\"],\"\":{\"eodix\":\"datapnr\",\"xr\":\"dataflxvsu\"}},{\"activity\":\"tcozfjsfrbjrbqcb\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Completed\",\"Completed\"],\"\":{\"fbwsicmhhvbovbl\":\"dataasm\",\"gd\":\"datafyle\",\"ezszlr\":\"dataiurfemnykfzsouo\"}},{\"activity\":\"z\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"bvlljkqlruh\":\"datajnpkxprbutyjfhj\",\"mfp\":\"datakkbfgrmscbmdrbt\"}},{\"activity\":\"redcvwsbsd\",\"dependencyConditions\":[\"Completed\"],\"\":{\"deuphgnfaanub\":\"datarqouyfcfd\",\"dhw\":\"dataeboelksghsowmvtm\",\"c\":\"datafbjhhpfj\",\"pwavdope\":\"datal\"}}],\"userProperties\":[{\"name\":\"ovrsrtldijgrbit\",\"value\":\"datadwuoxirziluzokx\"},{\"name\":\"hcjtwhwgbaj\",\"value\":\"datagctwamjjwvmugis\"},{\"name\":\"nyzmhhopedmk\",\"value\":\"datatdyxzg\"},{\"name\":\"qtgfbmocvb\",\"value\":\"datarvkqxhkhj\"}],\"\":{\"z\":\"dataqpxaajtiyrqtuz\"}}") + "{\"type\":\"hukuypyeo\",\"typeProperties\":{\"storageLinkedServices\":[{\"referenceName\":\"dukrjjfnexuqalwj\",\"parameters\":{\"vjdcehlgryvkubf\":\"datanvbzemdff\",\"rxyffttsdtnqlggt\":\"datatgivporiok\",\"yinly\":\"datajzimxzbuqcinje\",\"ddujzenagmhhm\":\"datagul\"}},{\"referenceName\":\"tbqzftm\",\"parameters\":{\"oldxmgyjvjyxueu\":\"databmngbkqcprbwsnd\",\"xaktkbryolzbmdn\":\"datacbgbsc\"}},{\"referenceName\":\"ajggzuyokc\",\"parameters\":{\"jfuwt\":\"datasbhdixzaobsenxg\",\"euwpivsltlyqc\":\"dataukbpwwfeixm\",\"qcmsrzrcddlzga\":\"datapwndcjr\"}}],\"arguments\":[\"datawqfg\"],\"getDebugInfo\":\"Always\",\"className\":\"dataondaop\",\"jarFilePath\":\"datar\",\"jarLinkedService\":{\"referenceName\":\"pmezlloflcilr\",\"parameters\":{\"vdwknymqzmu\":\"datar\",\"vdjinuos\":\"datanuvtgjgp\"}},\"jarLibs\":[\"databzzfevwc\",\"datarbjgdvwamcvcne\",\"datak\",\"datakmenawsvximqkuy\"],\"defines\":{\"comqyqvywzhb\":\"dataxgsuuapktfvemwf\"}},\"linkedServiceName\":{\"referenceName\":\"snfeybkhyqouzzp\",\"parameters\":{\"lhnmydshgfdvwsh\":\"datadzsnv\",\"nfsjnrfpzlvae\":\"dataczy\",\"vvrk\":\"datajnskekhmo\"}},\"policy\":{\"timeout\":\"datafazsiizcwha\",\"retry\":\"datahaet\",\"retryIntervalInSeconds\":1382688900,\"secureInput\":false,\"secureOutput\":true,\"\":{\"yiwvqlrzobvkgf\":\"dataacacdya\",\"qrjbzrzf\":\"datajbjavnk\"}},\"name\":\"saeabbxkldtwrry\",\"description\":\"jr\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"x\",\"dependencyConditions\":[\"Completed\"],\"\":{\"pxyrfkslg\":\"dataudfwlp\",\"ilcntmueemexa\":\"dataladqcmkdhgpzqib\",\"ntkokndjwpxe\":\"datajxaaw\"}},{\"activity\":\"n\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Completed\"],\"\":{\"nnvtlbclgrkfwofw\":\"databv\",\"xpeodbmuzpd\":\"dataayowzp\",\"hi\":\"datathpsycasx\",\"xrzqagmcivsqa\":\"dataymmiipffjgjmysnf\"}},{\"activity\":\"iabyfzadeui\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Skipped\",\"Succeeded\"],\"\":{\"uxg\":\"datazakqgabrb\",\"ulozdoi\":\"datawuy\",\"iv\":\"datahrxjiw\"}},{\"activity\":\"orqlkycwnb\",\"dependencyConditions\":[\"Completed\"],\"\":{\"ah\":\"dataazyrisciokbvft\",\"dlmag\":\"datansllfkcroviim\",\"gfx\":\"dataw\",\"sxxk\":\"dataendfpdoxtif\"}}],\"userProperties\":[{\"name\":\"lojlgs\",\"value\":\"dataystznwjg\"}],\"\":{\"riqbptvkjdowu\":\"datalefl\",\"jadippdnt\":\"dataasdztufm\"}}") .toObject(HDInsightMapReduceActivity.class); - Assertions.assertEquals("pmn", model.name()); - Assertions.assertEquals("nbmgpomcrev", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("zveisd", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ovrsrtldijgrbit", model.userProperties().get(0).name()); - Assertions.assertEquals("twayh", model.linkedServiceName().referenceName()); - Assertions.assertEquals(2049043657, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("fjbmowqwodmdrdty", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); - Assertions.assertEquals("cxhnojfdiijch", model.jarLinkedService().referenceName()); + Assertions.assertEquals("saeabbxkldtwrry", model.name()); + Assertions.assertEquals("jr", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("x", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("lojlgs", model.userProperties().get(0).name()); + Assertions.assertEquals("snfeybkhyqouzzp", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1382688900, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals(true, model.policy().secureOutput()); + Assertions.assertEquals("dukrjjfnexuqalwj", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); + Assertions.assertEquals("pmezlloflcilr", model.jarLinkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HDInsightMapReduceActivity model = new HDInsightMapReduceActivity().withName("pmn") - .withDescription("nbmgpomcrev").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + HDInsightMapReduceActivity model = new HDInsightMapReduceActivity().withName("saeabbxkldtwrry") + .withDescription("jr") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("zveisd") + new ActivityDependency().withActivity("x") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("n") .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("tcozfjsfrbjrbqcb") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("z") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED)) + new ActivityDependency().withActivity("iabyfzadeui") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, + DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("redcvwsbsd") + new ActivityDependency().withActivity("orqlkycwnb") .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("ovrsrtldijgrbit").withValue("datadwuoxirziluzokx"), - new UserProperty().withName("hcjtwhwgbaj").withValue("datagctwamjjwvmugis"), - new UserProperty().withName("nyzmhhopedmk").withValue("datatdyxzg"), - new UserProperty().withName("qtgfbmocvb").withValue("datarvkqxhkhj"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("twayh") - .withParameters(mapOf("eyliisatbs", "dataqv", "qvernqkeiyy", "datahtkdcufzxxqdntvf"))) - .withPolicy(new ActivityPolicy().withTimeout("datatjoxwugdzwoczf").withRetry("datafcmpddzzdw") - .withRetryIntervalInSeconds(2049043657).withSecureInput(true).withSecureOutput(false) + .withUserProperties(Arrays.asList(new UserProperty().withName("lojlgs").withValue("dataystznwjg"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("snfeybkhyqouzzp") + .withParameters( + mapOf("lhnmydshgfdvwsh", "datadzsnv", "nfsjnrfpzlvae", "dataczy", "vvrk", "datajnskekhmo"))) + .withPolicy(new ActivityPolicy().withTimeout("datafazsiizcwha") + .withRetry("datahaet") + .withRetryIntervalInSeconds(1382688900) + .withSecureInput(false) + .withSecureOutput(true) .withAdditionalProperties(mapOf())) - .withStorageLinkedServices(Arrays.asList(new LinkedServiceReference().withReferenceName("fjbmowqwodmdrdty") - .withParameters(mapOf("ciapvcsw", "dataqwa", "aqmrimletjv", "datajrpcpgcltf", "ab", "dataptf", - "zxupwrizkqnbiia", "datablhzfglpswg")))) - .withArguments(Arrays.asList("datasjpq", "dataas")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.FAILURE).withClassName("dataosltch") - .withJarFilePath("datauhvdrfh") - .withJarLinkedService(new LinkedServiceReference().withReferenceName("cxhnojfdiijch") - .withParameters(mapOf("emnkykvruomw", "dataabtxr"))) - .withJarLibs(Arrays.asList("datatzffpcdqh", "datapz", "dataeqvkuvy", "datai")) - .withDefines(mapOf("ikstapkbd", "dataokolc", "mugkugwtg", "datay")); + .withStorageLinkedServices(Arrays.asList( + new LinkedServiceReference().withReferenceName("dukrjjfnexuqalwj") + .withParameters(mapOf("vjdcehlgryvkubf", "datanvbzemdff", "rxyffttsdtnqlggt", "datatgivporiok", + "yinly", "datajzimxzbuqcinje", "ddujzenagmhhm", "datagul")), + new LinkedServiceReference().withReferenceName("tbqzftm") + .withParameters(mapOf("oldxmgyjvjyxueu", "databmngbkqcprbwsnd", "xaktkbryolzbmdn", "datacbgbsc")), + new LinkedServiceReference().withReferenceName("ajggzuyokc") + .withParameters(mapOf("jfuwt", "datasbhdixzaobsenxg", "euwpivsltlyqc", "dataukbpwwfeixm", + "qcmsrzrcddlzga", "datapwndcjr")))) + .withArguments(Arrays.asList("datawqfg")) + .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS) + .withClassName("dataondaop") + .withJarFilePath("datar") + .withJarLinkedService(new LinkedServiceReference().withReferenceName("pmezlloflcilr") + .withParameters(mapOf("vdwknymqzmu", "datar", "vdjinuos", "datanuvtgjgp"))) + .withJarLibs(Arrays.asList("databzzfevwc", "datarbjgdvwamcvcne", "datak", "datakmenawsvximqkuy")) + .withDefines(mapOf("comqyqvywzhb", "dataxgsuuapktfvemwf")); model = BinaryData.fromObject(model).toObject(HDInsightMapReduceActivity.class); - Assertions.assertEquals("pmn", model.name()); - Assertions.assertEquals("nbmgpomcrev", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("zveisd", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ovrsrtldijgrbit", model.userProperties().get(0).name()); - Assertions.assertEquals("twayh", model.linkedServiceName().referenceName()); - Assertions.assertEquals(2049043657, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("fjbmowqwodmdrdty", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); - Assertions.assertEquals("cxhnojfdiijch", model.jarLinkedService().referenceName()); + Assertions.assertEquals("saeabbxkldtwrry", model.name()); + Assertions.assertEquals("jr", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); + Assertions.assertEquals("x", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("lojlgs", model.userProperties().get(0).name()); + Assertions.assertEquals("snfeybkhyqouzzp", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1382688900, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals(true, model.policy().secureOutput()); + Assertions.assertEquals("dukrjjfnexuqalwj", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); + Assertions.assertEquals("pmezlloflcilr", model.jarLinkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTypePropertiesTests.java index c3006170fe7f2..23834c13751c3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTypePropertiesTests.java @@ -17,34 +17,41 @@ public final class HDInsightMapReduceActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HDInsightMapReduceActivityTypeProperties model = BinaryData.fromString( - "{\"storageLinkedServices\":[{\"referenceName\":\"xngspazmxssb\",\"parameters\":{\"cakkewgz\":\"datanatbecuh\",\"sjlqt\":\"datao\",\"hqjdihjo\":\"datajewezcknpmev\",\"l\":\"datadwahehudicx\"}},{\"referenceName\":\"mh\",\"parameters\":{\"jqvmpzcjvogr\":\"datavxoiwb\"}}],\"arguments\":[\"dataopzyd\",\"dataspwwkdm\",\"datan\",\"datazdumjqdhrgwyq\"],\"getDebugInfo\":\"None\",\"className\":\"dataelmqkbepie\",\"jarFilePath\":\"datasveaerg\",\"jarLinkedService\":{\"referenceName\":\"brnlbfnuppwqks\",\"parameters\":{\"xyphdkxwstabgejo\":\"datatjfkjboyggrmzt\",\"tgoeayhojdgw\":\"dataveg\",\"dpxbwqgk\":\"datazcrssmbdjzc\"}},\"jarLibs\":[\"datadtj\"],\"defines\":{\"csflemxbma\":\"datavvuddnwjp\",\"vhlobjpumpq\":\"dataivopftzbk\"}}") + "{\"storageLinkedServices\":[{\"referenceName\":\"peeprmeb\",\"parameters\":{\"ozjces\":\"dataaacrqrov\",\"t\":\"datarcxug\",\"dbuz\":\"datajxyv\",\"asdrrfozz\":\"dataphogmrcmguel\"}},{\"referenceName\":\"ygolz\",\"parameters\":{\"fc\":\"datakb\",\"rkrjmzqnbwnl\":\"dataysvca\"}},{\"referenceName\":\"ozahrpv\",\"parameters\":{\"lypbrnjor\":\"datatcoq\",\"pebuvxxloqrdefhb\":\"datacrgrjxit\",\"dwg\":\"dataict\"}}],\"arguments\":[\"datanwesxzuklz\",\"datavmwjuqchcooty\",\"datacarjmhiewvcpys\"],\"getDebugInfo\":\"Always\",\"className\":\"datavkw\",\"jarFilePath\":\"datatbvyclg\",\"jarLinkedService\":{\"referenceName\":\"byxtprxtfwvn\",\"parameters\":{\"vdrjlgwzbrggnt\":\"datasnoakglygeuoolyw\"}},\"jarLibs\":[\"datarjtyhth\"],\"defines\":{\"hdxchaogawtvrnw\":\"datazdnhvkhkubpo\",\"fhiqliulfxgz\":\"datao\",\"uhca\":\"datartquxltekix\",\"sqcwjxatghuixc\":\"dataozsodp\"}}") .toObject(HDInsightMapReduceActivityTypeProperties.class); - Assertions.assertEquals("xngspazmxssb", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.NONE, model.getDebugInfo()); - Assertions.assertEquals("brnlbfnuppwqks", model.jarLinkedService().referenceName()); + Assertions.assertEquals("peeprmeb", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); + Assertions.assertEquals("byxtprxtfwvn", model.jarLinkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HDInsightMapReduceActivityTypeProperties model = new HDInsightMapReduceActivityTypeProperties() - .withStorageLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("xngspazmxssb") - .withParameters(mapOf("cakkewgz", "datanatbecuh", "sjlqt", "datao", "hqjdihjo", "datajewezcknpmev", - "l", "datadwahehudicx")), - new LinkedServiceReference().withReferenceName("mh") - .withParameters(mapOf("jqvmpzcjvogr", "datavxoiwb")))) - .withArguments(Arrays.asList("dataopzyd", "dataspwwkdm", "datan", "datazdumjqdhrgwyq")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.NONE).withClassName("dataelmqkbepie") - .withJarFilePath("datasveaerg") - .withJarLinkedService(new LinkedServiceReference().withReferenceName("brnlbfnuppwqks") - .withParameters(mapOf("xyphdkxwstabgejo", "datatjfkjboyggrmzt", "tgoeayhojdgw", "dataveg", "dpxbwqgk", - "datazcrssmbdjzc"))) - .withJarLibs(Arrays.asList("datadtj")) - .withDefines(mapOf("csflemxbma", "datavvuddnwjp", "vhlobjpumpq", "dataivopftzbk")); + HDInsightMapReduceActivityTypeProperties model + = new HDInsightMapReduceActivityTypeProperties() + .withStorageLinkedServices( + Arrays + .asList( + new LinkedServiceReference().withReferenceName("peeprmeb") + .withParameters(mapOf("ozjces", "dataaacrqrov", "t", "datarcxug", "dbuz", "datajxyv", + "asdrrfozz", "dataphogmrcmguel")), + new LinkedServiceReference().withReferenceName("ygolz") + .withParameters(mapOf("fc", "datakb", "rkrjmzqnbwnl", "dataysvca")), + new LinkedServiceReference().withReferenceName("ozahrpv") + .withParameters(mapOf("lypbrnjor", "datatcoq", "pebuvxxloqrdefhb", "datacrgrjxit", + "dwg", "dataict")))) + .withArguments(Arrays.asList("datanwesxzuklz", "datavmwjuqchcooty", "datacarjmhiewvcpys")) + .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS) + .withClassName("datavkw") + .withJarFilePath("datatbvyclg") + .withJarLinkedService(new LinkedServiceReference().withReferenceName("byxtprxtfwvn") + .withParameters(mapOf("vdrjlgwzbrggnt", "datasnoakglygeuoolyw"))) + .withJarLibs(Arrays.asList("datarjtyhth")) + .withDefines(mapOf("hdxchaogawtvrnw", "datazdnhvkhkubpo", "fhiqliulfxgz", "datao", "uhca", + "datartquxltekix", "sqcwjxatghuixc", "dataozsodp")); model = BinaryData.fromObject(model).toObject(HDInsightMapReduceActivityTypeProperties.class); - Assertions.assertEquals("xngspazmxssb", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.NONE, model.getDebugInfo()); - Assertions.assertEquals("brnlbfnuppwqks", model.jarLinkedService().referenceName()); + Assertions.assertEquals("peeprmeb", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); + Assertions.assertEquals("byxtprxtfwvn", model.jarLinkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTests.java index d5b9a5c207490..33d766cc6cdd0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTests.java @@ -23,61 +23,80 @@ public final class HDInsightPigActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HDInsightPigActivity model = BinaryData.fromString( - "{\"type\":\"HDInsightPig\",\"typeProperties\":{\"storageLinkedServices\":[{\"referenceName\":\"ljfkqubnnmzzcbcx\",\"parameters\":{\"dfxxaoyisky\":\"datahlw\"}}],\"arguments\":\"datasxakbqbwjt\",\"getDebugInfo\":\"Always\",\"scriptPath\":\"datamqkgc\",\"scriptLinkedService\":{\"referenceName\":\"dxuwe\",\"parameters\":{\"yfaprhf\":\"dataegszwsn\"}},\"defines\":{\"x\":\"databcribqdsz\",\"yknfrhbknragpnmc\":\"dataaqlyw\"}},\"linkedServiceName\":{\"referenceName\":\"dfxhfgdrzeg\",\"parameters\":{\"arufjfordzwbsk\":\"dataebzoujhijlduuvxk\",\"zlia\":\"dataexgnfjwfo\"}},\"policy\":{\"timeout\":\"dataowlsrxy\",\"retry\":\"datavzqxpmfhe\",\"retryIntervalInSeconds\":1691661362,\"secureInput\":false,\"secureOutput\":false,\"\":{\"sueutby\":\"datazhylzzuxqqrmckqm\",\"nrivsiwwsohsuh\":\"datalzgkzhbnbnjpie\",\"mjtanrirrnqloom\":\"dataikouvpcjyhsz\"}},\"name\":\"ywyqgaskap\",\"description\":\"vknquipipgvfch\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"borwzpfgk\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Succeeded\",\"Skipped\"],\"\":{\"lbfpteiir\":\"datab\",\"bhcfswpda\":\"dataeopx\",\"wvmhbizi\":\"datavcabsmrfxucgvz\"}}],\"userProperties\":[{\"name\":\"seexdboatvsfyxdf\",\"value\":\"dataqrnawnqy\"}],\"\":{\"ngxogqvwchynrdt\":\"dataxacojcaraxorqjbo\",\"oqvcjspjp\":\"dataaptwmawypkpbmi\",\"bmluy\":\"datatsgvvizayg\"}}") + "{\"type\":\"djmv\",\"typeProperties\":{\"storageLinkedServices\":[{\"referenceName\":\"mgho\",\"parameters\":{\"qcxrrkcvclqlibpm\":\"datacontsr\",\"zs\":\"datan\",\"gzljgrtfic\":\"datahkkktlodsyyzmf\",\"ufet\":\"dataejmzbasxapcegtc\"}}],\"arguments\":\"datattjnneynmgvqysgh\",\"getDebugInfo\":\"Failure\",\"scriptPath\":\"datax\",\"scriptLinkedService\":{\"referenceName\":\"mvfdo\",\"parameters\":{\"ktutwczdwmt\":\"datafcfidd\"}},\"defines\":{\"mobsjudpeed\":\"datauqhyq\",\"igtsrrlelpobm\":\"dataowverhtyc\",\"qgluhr\":\"datamdsisll\",\"fqb\":\"dataimojozhdcptxxb\"}},\"linkedServiceName\":{\"referenceName\":\"pvnrngiffsn\",\"parameters\":{\"vqqrcyeu\":\"datafqgu\",\"u\":\"datawvzagurgurpc\",\"dcrwoiqsrqebjgo\":\"datayuzhkefownc\",\"chkmq\":\"datamjcahda\"}},\"policy\":{\"timeout\":\"dataseaomqqb\",\"retry\":\"datawxcf\",\"retryIntervalInSeconds\":952320530,\"secureInput\":false,\"secureOutput\":false,\"\":{\"nxppgfeppne\":\"datarwn\",\"nvvkfbmrppjf\":\"datajvataeaoqknz\",\"s\":\"dataeabgpw\",\"n\":\"dataewi\"}},\"name\":\"cycgftelimqxw\",\"description\":\"npyexjr\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"ih\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Completed\",\"Skipped\"],\"\":{\"twnppstpqws\":\"datafciskl\"}},{\"activity\":\"eawolhlfffe\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Failed\",\"Skipped\"],\"\":{\"ipchdpdev\":\"datarsywiscoqtvx\",\"kfesh\":\"datampoqkc\",\"lpyp\":\"dataomtvkxps\",\"utspocr\":\"datagdetydqgyhuy\"}},{\"activity\":\"kkraapczmziiftj\",\"dependencyConditions\":[\"Completed\"],\"\":{\"wcflciooxybmk\":\"datazocfy\",\"cpwtj\":\"databwdfjcep\"}},{\"activity\":\"kuhrtqnbdgc\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\"],\"\":{\"xhf\":\"databbccbqxwojve\"}}],\"userProperties\":[{\"name\":\"zftfyjcen\",\"value\":\"dataidlpmlxhzwyy\"}],\"\":{\"dnegwsyxdf\":\"datazbmlhgytkthevod\",\"i\":\"datajftcrjoh\"}}") .toObject(HDInsightPigActivity.class); - Assertions.assertEquals("ywyqgaskap", model.name()); - Assertions.assertEquals("vknquipipgvfch", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("borwzpfgk", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("seexdboatvsfyxdf", model.userProperties().get(0).name()); - Assertions.assertEquals("dfxhfgdrzeg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1691661362, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("cycgftelimqxw", model.name()); + Assertions.assertEquals("npyexjr", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("ih", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("zftfyjcen", model.userProperties().get(0).name()); + Assertions.assertEquals("pvnrngiffsn", model.linkedServiceName().referenceName()); + Assertions.assertEquals(952320530, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("ljfkqubnnmzzcbcx", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("dxuwe", model.scriptLinkedService().referenceName()); + Assertions.assertEquals("mgho", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); + Assertions.assertEquals("mvfdo", model.scriptLinkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HDInsightPigActivity model = new HDInsightPigActivity().withName("ywyqgaskap").withDescription("vknquipipgvfch") - .withState(ActivityState.INACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("borwzpfgk") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("seexdboatvsfyxdf").withValue("dataqrnawnqy"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("dfxhfgdrzeg") - .withParameters(mapOf("arufjfordzwbsk", "dataebzoujhijlduuvxk", "zlia", "dataexgnfjwfo"))) - .withPolicy(new ActivityPolicy().withTimeout("dataowlsrxy").withRetry("datavzqxpmfhe") - .withRetryIntervalInSeconds(1691661362).withSecureInput(false).withSecureOutput(false) + HDInsightPigActivity model = new HDInsightPigActivity().withName("cycgftelimqxw") + .withDescription("npyexjr") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("ih") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED, + DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("eawolhlfffe") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED, + DependencyCondition.FAILED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("kkraapczmziiftj") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("kuhrtqnbdgc") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("zftfyjcen").withValue("dataidlpmlxhzwyy"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("pvnrngiffsn") + .withParameters(mapOf("vqqrcyeu", "datafqgu", "u", "datawvzagurgurpc", "dcrwoiqsrqebjgo", + "datayuzhkefownc", "chkmq", "datamjcahda"))) + .withPolicy(new ActivityPolicy().withTimeout("dataseaomqqb") + .withRetry("datawxcf") + .withRetryIntervalInSeconds(952320530) + .withSecureInput(false) + .withSecureOutput(false) .withAdditionalProperties(mapOf())) - .withStorageLinkedServices(Arrays.asList(new LinkedServiceReference().withReferenceName("ljfkqubnnmzzcbcx") - .withParameters(mapOf("dfxxaoyisky", "datahlw")))) - .withArguments("datasxakbqbwjt").withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS) - .withScriptPath("datamqkgc") - .withScriptLinkedService( - new LinkedServiceReference().withReferenceName("dxuwe").withParameters(mapOf("yfaprhf", "dataegszwsn"))) - .withDefines(mapOf("x", "databcribqdsz", "yknfrhbknragpnmc", "dataaqlyw")); + .withStorageLinkedServices(Arrays.asList(new LinkedServiceReference().withReferenceName("mgho") + .withParameters(mapOf("qcxrrkcvclqlibpm", "datacontsr", "zs", "datan", "gzljgrtfic", + "datahkkktlodsyyzmf", "ufet", "dataejmzbasxapcegtc")))) + .withArguments("datattjnneynmgvqysgh") + .withGetDebugInfo(HDInsightActivityDebugInfoOption.FAILURE) + .withScriptPath("datax") + .withScriptLinkedService(new LinkedServiceReference().withReferenceName("mvfdo") + .withParameters(mapOf("ktutwczdwmt", "datafcfidd"))) + .withDefines(mapOf("mobsjudpeed", "datauqhyq", "igtsrrlelpobm", "dataowverhtyc", "qgluhr", "datamdsisll", + "fqb", "dataimojozhdcptxxb")); model = BinaryData.fromObject(model).toObject(HDInsightPigActivity.class); - Assertions.assertEquals("ywyqgaskap", model.name()); - Assertions.assertEquals("vknquipipgvfch", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("borwzpfgk", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("seexdboatvsfyxdf", model.userProperties().get(0).name()); - Assertions.assertEquals("dfxhfgdrzeg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1691661362, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("cycgftelimqxw", model.name()); + Assertions.assertEquals("npyexjr", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("ih", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("zftfyjcen", model.userProperties().get(0).name()); + Assertions.assertEquals("pvnrngiffsn", model.linkedServiceName().referenceName()); + Assertions.assertEquals(952320530, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("ljfkqubnnmzzcbcx", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("dxuwe", model.scriptLinkedService().referenceName()); + Assertions.assertEquals("mgho", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); + Assertions.assertEquals("mvfdo", model.scriptLinkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTypePropertiesTests.java index 789bdb9fda804..d67adcd05566b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTypePropertiesTests.java @@ -17,36 +17,32 @@ public final class HDInsightPigActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HDInsightPigActivityTypeProperties model = BinaryData.fromString( - "{\"storageLinkedServices\":[{\"referenceName\":\"zvqpnjqpwxf\",\"parameters\":{\"qopwmbdleegwlh\":\"datajpayvlnzwi\",\"ueizhydjkmxbghx\":\"datan\",\"uqkb\":\"dataotlfzbjs\",\"qkgausc\":\"datadicoaysar\"}},{\"referenceName\":\"gdyfyjeexmlkxvfe\",\"parameters\":{\"kkva\":\"dataojavqez\",\"lokhmkqyfatddwf\":\"datayyowjpsmnxccasu\",\"lwd\":\"datafjfatoilmwzkxa\",\"hvioccszdaxafu\":\"datatjfnmxzu\"}},{\"referenceName\":\"ccnqfwobnblu\",\"parameters\":{\"rfyetljr\":\"datafimlozlfdxj\"}},{\"referenceName\":\"cgeorm\",\"parameters\":{\"twiocuha\":\"datawcqhaonmfnf\"}}],\"arguments\":\"dataielhtukhei\",\"getDebugInfo\":\"Always\",\"scriptPath\":\"datatjcgdpqkfxd\",\"scriptLinkedService\":{\"referenceName\":\"dvrglql\",\"parameters\":{\"qo\":\"dataese\"}},\"defines\":{\"ckjuwkkvarff\":\"datauqqahmzlet\"}}") + "{\"storageLinkedServices\":[{\"referenceName\":\"txtc\",\"parameters\":{\"twlpuur\":\"datacd\",\"ixlzaavvuvhyerj\":\"dataxkphaqtnejufljqz\"}},{\"referenceName\":\"cxyxepllb\",\"parameters\":{\"spli\":\"datapfjib\",\"qpgncscw\":\"datafqjweigywj\",\"utyjdcvnanejmcr\":\"dataefdqnsuaoml\"}}],\"arguments\":\"dataecq\",\"getDebugInfo\":\"Failure\",\"scriptPath\":\"datayhkvtofxke\",\"scriptLinkedService\":{\"referenceName\":\"v\",\"parameters\":{\"ftk\":\"datarpognqv\",\"gochp\":\"datayghcmpyk\"}},\"defines\":{\"fnstvtbpwwot\":\"datasapk\",\"shdwive\":\"datazesfdrsgfp\"}}") .toObject(HDInsightPigActivityTypeProperties.class); - Assertions.assertEquals("zvqpnjqpwxf", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("dvrglql", model.scriptLinkedService().referenceName()); + Assertions.assertEquals("txtc", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); + Assertions.assertEquals("v", model.scriptLinkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { HDInsightPigActivityTypeProperties model = new HDInsightPigActivityTypeProperties() .withStorageLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("zvqpnjqpwxf") - .withParameters(mapOf("qopwmbdleegwlh", "datajpayvlnzwi", "ueizhydjkmxbghx", "datan", "uqkb", - "dataotlfzbjs", "qkgausc", "datadicoaysar")), - new LinkedServiceReference().withReferenceName("gdyfyjeexmlkxvfe") - .withParameters(mapOf("kkva", "dataojavqez", "lokhmkqyfatddwf", "datayyowjpsmnxccasu", "lwd", - "datafjfatoilmwzkxa", "hvioccszdaxafu", "datatjfnmxzu")), - new LinkedServiceReference().withReferenceName("ccnqfwobnblu") - .withParameters(mapOf("rfyetljr", "datafimlozlfdxj")), - new LinkedServiceReference().withReferenceName("cgeorm") - .withParameters(mapOf("twiocuha", "datawcqhaonmfnf")))) - .withArguments("dataielhtukhei").withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS) - .withScriptPath("datatjcgdpqkfxd") - .withScriptLinkedService( - new LinkedServiceReference().withReferenceName("dvrglql").withParameters(mapOf("qo", "dataese"))) - .withDefines(mapOf("ckjuwkkvarff", "datauqqahmzlet")); + new LinkedServiceReference().withReferenceName("txtc") + .withParameters(mapOf("twlpuur", "datacd", "ixlzaavvuvhyerj", "dataxkphaqtnejufljqz")), + new LinkedServiceReference().withReferenceName("cxyxepllb") + .withParameters(mapOf("spli", "datapfjib", "qpgncscw", "datafqjweigywj", "utyjdcvnanejmcr", + "dataefdqnsuaoml")))) + .withArguments("dataecq") + .withGetDebugInfo(HDInsightActivityDebugInfoOption.FAILURE) + .withScriptPath("datayhkvtofxke") + .withScriptLinkedService(new LinkedServiceReference().withReferenceName("v") + .withParameters(mapOf("ftk", "datarpognqv", "gochp", "datayghcmpyk"))) + .withDefines(mapOf("fnstvtbpwwot", "datasapk", "shdwive", "datazesfdrsgfp")); model = BinaryData.fromObject(model).toObject(HDInsightPigActivityTypeProperties.class); - Assertions.assertEquals("zvqpnjqpwxf", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("dvrglql", model.scriptLinkedService().referenceName()); + Assertions.assertEquals("txtc", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); + Assertions.assertEquals("v", model.scriptLinkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTests.java index d373c05093378..fbeccb3e9087f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTests.java @@ -23,68 +23,82 @@ public final class HDInsightSparkActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HDInsightSparkActivity model = BinaryData.fromString( - "{\"type\":\"HDInsightSpark\",\"typeProperties\":{\"rootPath\":\"datachzuap\",\"entryFilePath\":\"datahfhuuizyeyfki\",\"arguments\":[\"datayjffpuuyky\"],\"getDebugInfo\":\"Failure\",\"sparkJobLinkedService\":{\"referenceName\":\"eymlctnnsjcuf\",\"parameters\":{\"xg\":\"dataaxuvazzptlda\",\"htykebtvn\":\"dataxhbnqyewinle\",\"riehooxqkc\":\"datadcclpbhntoiviue\",\"kyiqjtx\":\"datayydtnl\"}},\"className\":\"grf\",\"proxyUser\":\"datadkj\",\"sparkConfig\":{\"vii\":\"datahivvoczsryp\",\"usnhnnekhfdlbc\":\"datajjqpsbbxkeygmqnu\",\"cccydldavozmibtk\":\"datacwfcbug\"}},\"linkedServiceName\":{\"referenceName\":\"tpgll\",\"parameters\":{\"zjwizrul\":\"dataankxxwtnrrohlkgz\",\"obenaah\":\"datakwyldttggcpqmk\",\"irhstwpbvwu\":\"datajnmychtvp\",\"nvsu\":\"datahpphjimoecqpqk\"}},\"policy\":{\"timeout\":\"dataxdlrjspxot\",\"retry\":\"datag\",\"retryIntervalInSeconds\":1487197700,\"secureInput\":true,\"secureOutput\":true,\"\":{\"lcdqvunvnggqacf\":\"dataldsftm\",\"dzruuscbs\":\"dataruwqbe\",\"ofoc\":\"datatjdioevifzqqs\",\"uuzpiooacjxsof\":\"datapphwv\"}},\"name\":\"iritpqqpynrl\",\"description\":\"u\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"jnfyub\",\"dependencyConditions\":[\"Completed\",\"Failed\"],\"\":{\"pkleieafpvbsllyo\":\"dataqqnumpnav\"}},{\"activity\":\"iadhnbofeucct\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Failed\",\"Succeeded\"],\"\":{\"rqsobusur\":\"dataylskbvvwdf\",\"knywxpmefbnccbvc\":\"datavjdxlbsnskcksfx\"}}],\"userProperties\":[{\"name\":\"kmifyx\",\"value\":\"datanu\"}],\"\":{\"w\":\"datasfnbtqdr\",\"yhnfqnekpxd\":\"dataidroidhbulvki\",\"jahnsmktk\":\"datadeahfg\"}}") + "{\"type\":\"nn\",\"typeProperties\":{\"rootPath\":\"datameacj\",\"entryFilePath\":\"dataaixhma\",\"arguments\":[\"datagvwvlqqnf\",\"datawrws\",\"datayblwjhpibgalefjs\"],\"getDebugInfo\":\"Failure\",\"sparkJobLinkedService\":{\"referenceName\":\"gmvzci\",\"parameters\":{\"tskprgztzcib\":\"datapqnshnbfdwxsj\",\"nbsxoe\":\"datawqmfhg\",\"hv\":\"dataephohjorguif\"}},\"className\":\"bmngkqej\",\"proxyUser\":\"dataw\",\"sparkConfig\":{\"hwcfxbywpw\":\"datazdlfayi\",\"j\":\"datavpglstxznkbj\",\"pdnnvepbocwbcx\":\"datazunrddy\"}},\"linkedServiceName\":{\"referenceName\":\"pummphb\",\"parameters\":{\"l\":\"dataive\"}},\"policy\":{\"timeout\":\"dataizyenajjx\",\"retry\":\"datadpnersmevhgs\",\"retryIntervalInSeconds\":417182956,\"secureInput\":true,\"secureOutput\":true,\"\":{\"syqpkpvbxgrgy\":\"dataqakbh\",\"eiocacngiaa\":\"datauqyxvzyivg\",\"teidfzof\":\"datagxdzhyc\"}},\"name\":\"cmlomlnprkiky\",\"description\":\"csgquphqnuitu\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"gabcw\",\"dependencyConditions\":[\"Completed\"],\"\":{\"zanr\":\"datakc\",\"hijjlaxeqehg\":\"datatjdmdbtbdtrqiu\",\"gb\":\"datajgvrawjom\"}},{\"activity\":\"dwfyagvhe\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Skipped\",\"Completed\"],\"\":{\"z\":\"datawpcupejz\"}},{\"activity\":\"dapxxgvcsvtflcj\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Completed\"],\"\":{\"pdfi\":\"datapf\",\"maqyesahvowlibr\":\"datarutcedeygsrrgd\",\"acyrkcdokka\":\"dataomdwz\",\"ysadjlqkoraqtbis\":\"datakaitihn\"}},{\"activity\":\"kcebjajl\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Succeeded\"],\"\":{\"ukghxde\":\"dataipkeonixx\",\"uevzqawjnwj\":\"dataqptvxibpzhkn\",\"eisjg\":\"datasiubp\"}}],\"userProperties\":[{\"name\":\"ldxsdrlbbpkjseft\",\"value\":\"datawu\"},{\"name\":\"fmakn\",\"value\":\"datanaqyeswinoecwabu\"}],\"\":{\"skbbeqayvkmptg\":\"datal\",\"zjrlm\":\"dataqxiyllamdz\"}}") .toObject(HDInsightSparkActivity.class); - Assertions.assertEquals("iritpqqpynrl", model.name()); - Assertions.assertEquals("u", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals("cmlomlnprkiky", model.name()); + Assertions.assertEquals("csgquphqnuitu", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("jnfyub", model.dependsOn().get(0).activity()); + Assertions.assertEquals("gabcw", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("kmifyx", model.userProperties().get(0).name()); - Assertions.assertEquals("tpgll", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1487197700, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("ldxsdrlbbpkjseft", model.userProperties().get(0).name()); + Assertions.assertEquals("pummphb", model.linkedServiceName().referenceName()); + Assertions.assertEquals(417182956, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(true, model.policy().secureOutput()); Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); - Assertions.assertEquals("eymlctnnsjcuf", model.sparkJobLinkedService().referenceName()); - Assertions.assertEquals("grf", model.className()); + Assertions.assertEquals("gmvzci", model.sparkJobLinkedService().referenceName()); + Assertions.assertEquals("bmngkqej", model.className()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HDInsightSparkActivity model - = new HDInsightSparkActivity().withName("iritpqqpynrl").withDescription("u").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("jnfyub") - .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("iadhnbofeucct") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.SKIPPED, DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("kmifyx").withValue("datanu"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("tpgll") - .withParameters(mapOf("zjwizrul", "dataankxxwtnrrohlkgz", "obenaah", "datakwyldttggcpqmk", - "irhstwpbvwu", "datajnmychtvp", "nvsu", "datahpphjimoecqpqk"))) - .withPolicy(new ActivityPolicy().withTimeout("dataxdlrjspxot").withRetry("datag") - .withRetryIntervalInSeconds(1487197700).withSecureInput(true).withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withRootPath("datachzuap").withEntryFilePath("datahfhuuizyeyfki") - .withArguments(Arrays.asList("datayjffpuuyky")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.FAILURE) - .withSparkJobLinkedService(new LinkedServiceReference().withReferenceName("eymlctnnsjcuf") - .withParameters(mapOf("xg", "dataaxuvazzptlda", "htykebtvn", "dataxhbnqyewinle", "riehooxqkc", - "datadcclpbhntoiviue", "kyiqjtx", "datayydtnl"))) - .withClassName("grf").withProxyUser("datadkj").withSparkConfig(mapOf("vii", "datahivvoczsryp", - "usnhnnekhfdlbc", "datajjqpsbbxkeygmqnu", "cccydldavozmibtk", "datacwfcbug")); + HDInsightSparkActivity model = new HDInsightSparkActivity().withName("cmlomlnprkiky") + .withDescription("csgquphqnuitu") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("gabcw") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("dwfyagvhe") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, + DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("dapxxgvcsvtflcj") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, + DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("kcebjajl") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("ldxsdrlbbpkjseft").withValue("datawu"), + new UserProperty().withName("fmakn").withValue("datanaqyeswinoecwabu"))) + .withLinkedServiceName( + new LinkedServiceReference().withReferenceName("pummphb").withParameters(mapOf("l", "dataive"))) + .withPolicy(new ActivityPolicy().withTimeout("dataizyenajjx") + .withRetry("datadpnersmevhgs") + .withRetryIntervalInSeconds(417182956) + .withSecureInput(true) + .withSecureOutput(true) + .withAdditionalProperties(mapOf())) + .withRootPath("datameacj") + .withEntryFilePath("dataaixhma") + .withArguments(Arrays.asList("datagvwvlqqnf", "datawrws", "datayblwjhpibgalefjs")) + .withGetDebugInfo(HDInsightActivityDebugInfoOption.FAILURE) + .withSparkJobLinkedService(new LinkedServiceReference().withReferenceName("gmvzci") + .withParameters( + mapOf("tskprgztzcib", "datapqnshnbfdwxsj", "nbsxoe", "datawqmfhg", "hv", "dataephohjorguif"))) + .withClassName("bmngkqej") + .withProxyUser("dataw") + .withSparkConfig( + mapOf("hwcfxbywpw", "datazdlfayi", "j", "datavpglstxznkbj", "pdnnvepbocwbcx", "datazunrddy")); model = BinaryData.fromObject(model).toObject(HDInsightSparkActivity.class); - Assertions.assertEquals("iritpqqpynrl", model.name()); - Assertions.assertEquals("u", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals("cmlomlnprkiky", model.name()); + Assertions.assertEquals("csgquphqnuitu", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("jnfyub", model.dependsOn().get(0).activity()); + Assertions.assertEquals("gabcw", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("kmifyx", model.userProperties().get(0).name()); - Assertions.assertEquals("tpgll", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1487197700, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("ldxsdrlbbpkjseft", model.userProperties().get(0).name()); + Assertions.assertEquals("pummphb", model.linkedServiceName().referenceName()); + Assertions.assertEquals(417182956, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(true, model.policy().secureOutput()); Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); - Assertions.assertEquals("eymlctnnsjcuf", model.sparkJobLinkedService().referenceName()); - Assertions.assertEquals("grf", model.className()); + Assertions.assertEquals("gmvzci", model.sparkJobLinkedService().referenceName()); + Assertions.assertEquals("bmngkqej", model.className()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTypePropertiesTests.java index aa91c6eb672f5..0ac729b19f241 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTypePropertiesTests.java @@ -17,27 +17,30 @@ public final class HDInsightSparkActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HDInsightSparkActivityTypeProperties model = BinaryData.fromString( - "{\"rootPath\":\"datalq\",\"entryFilePath\":\"dataxjdolobtzr\",\"arguments\":[\"datalaurviyntcblpbz\",\"dataqtfbjkbfktelbl\",\"dataungrkjbdaxttoenf\"],\"getDebugInfo\":\"Always\",\"sparkJobLinkedService\":{\"referenceName\":\"ijfywmmqzb\",\"parameters\":{\"ecktcwgnkxjd\":\"datajw\"}},\"className\":\"diundzawotpiakle\",\"proxyUser\":\"dataaiabfntrm\",\"sparkConfig\":{\"fn\":\"datawmfeudcgdljbnfwd\"}}") + "{\"rootPath\":\"datadboesxpc\",\"entryFilePath\":\"datagl\",\"arguments\":[\"datah\",\"datacyggflosxtbdgpy\"],\"getDebugInfo\":\"Failure\",\"sparkJobLinkedService\":{\"referenceName\":\"dcglima\",\"parameters\":{\"pxdyyohbbtwpkg\":\"datakypyvztdhoyarcu\",\"ybvxumtxuv\":\"datavuemjcjeja\",\"egqmlv\":\"dataoteidcwrmdqqgt\",\"yfszluzmzgat\":\"dataym\"}},\"className\":\"groejsaerzckmcuk\",\"proxyUser\":\"dataz\",\"sparkConfig\":{\"wbvyraazscxi\":\"datazzrxgqxddvuiur\",\"kmybohax\":\"datajyjcshmtpdvuix\"}}") .toObject(HDInsightSparkActivityTypeProperties.class); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("ijfywmmqzb", model.sparkJobLinkedService().referenceName()); - Assertions.assertEquals("diundzawotpiakle", model.className()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); + Assertions.assertEquals("dcglima", model.sparkJobLinkedService().referenceName()); + Assertions.assertEquals("groejsaerzckmcuk", model.className()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { HDInsightSparkActivityTypeProperties model - = new HDInsightSparkActivityTypeProperties().withRootPath("datalq").withEntryFilePath("dataxjdolobtzr") - .withArguments(Arrays.asList("datalaurviyntcblpbz", "dataqtfbjkbfktelbl", "dataungrkjbdaxttoenf")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS) - .withSparkJobLinkedService(new LinkedServiceReference().withReferenceName("ijfywmmqzb") - .withParameters(mapOf("ecktcwgnkxjd", "datajw"))) - .withClassName("diundzawotpiakle").withProxyUser("dataaiabfntrm") - .withSparkConfig(mapOf("fn", "datawmfeudcgdljbnfwd")); + = new HDInsightSparkActivityTypeProperties().withRootPath("datadboesxpc") + .withEntryFilePath("datagl") + .withArguments(Arrays.asList("datah", "datacyggflosxtbdgpy")) + .withGetDebugInfo(HDInsightActivityDebugInfoOption.FAILURE) + .withSparkJobLinkedService(new LinkedServiceReference().withReferenceName("dcglima") + .withParameters(mapOf("pxdyyohbbtwpkg", "datakypyvztdhoyarcu", "ybvxumtxuv", "datavuemjcjeja", + "egqmlv", "dataoteidcwrmdqqgt", "yfszluzmzgat", "dataym"))) + .withClassName("groejsaerzckmcuk") + .withProxyUser("dataz") + .withSparkConfig(mapOf("wbvyraazscxi", "datazzrxgqxddvuiur", "kmybohax", "datajyjcshmtpdvuix")); model = BinaryData.fromObject(model).toObject(HDInsightSparkActivityTypeProperties.class); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("ijfywmmqzb", model.sparkJobLinkedService().referenceName()); - Assertions.assertEquals("diundzawotpiakle", model.className()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); + Assertions.assertEquals("dcglima", model.sparkJobLinkedService().referenceName()); + Assertions.assertEquals("groejsaerzckmcuk", model.className()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTests.java index d249b35b304e6..ce625b7ad8de0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTests.java @@ -23,80 +23,85 @@ public final class HDInsightStreamingActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HDInsightStreamingActivity model = BinaryData.fromString( - "{\"type\":\"HDInsightStreaming\",\"typeProperties\":{\"storageLinkedServices\":[{\"referenceName\":\"zydyl\",\"parameters\":{\"e\":\"dataisddmfom\"}},{\"referenceName\":\"vfnxxkmrf\",\"parameters\":{\"rfhfjwikva\":\"datakprbmca\",\"uhqilvajctpw\":\"datajx\"}},{\"referenceName\":\"frfardjqwdr\",\"parameters\":{\"syd\":\"dataoo\",\"lsxewocwmadyelw\":\"datavdqcmegwajjzxcq\"}},{\"referenceName\":\"ll\",\"parameters\":{\"zqldakbijcxctn\":\"datahanfjrdcaw\",\"avoyhnollnuhoc\":\"datajxyczzwhwsidnq\",\"nhujcunyuasstok\":\"databeoxoewprtzofcu\"}}],\"arguments\":[\"datay\",\"datayblmcen\",\"datactc\"],\"getDebugInfo\":\"Always\",\"mapper\":\"datauplxksphzyu\",\"reducer\":\"datayts\",\"input\":\"datafmajswr\",\"output\":\"datadst\",\"filePaths\":[\"datakqzulosl\"],\"fileLinkedService\":{\"referenceName\":\"fnhej\",\"parameters\":{\"lvi\":\"dataugyuxcffbs\",\"zwtzdyz\":\"datas\",\"sojdmesoxj\":\"datafgn\",\"biouuqox\":\"datapwbgfhjwchvu\"}},\"combiner\":\"datatws\",\"commandEnvironment\":[\"datadghlokvisqzmhei\",\"datasioonnfjgrtkeg\",\"datatvwffvbvuxpyveav\",\"datatzbzykks\"],\"defines\":{\"zdcj\":\"dataan\"}},\"linkedServiceName\":{\"referenceName\":\"zpmwxvfrmvtwwb\",\"parameters\":{\"lqydhhypuvhucawm\":\"datavfosbrqeyw\"}},\"policy\":{\"timeout\":\"datajll\",\"retry\":\"databqvnbxgk\",\"retryIntervalInSeconds\":1536315462,\"secureInput\":false,\"secureOutput\":false,\"\":{\"qvzwummw\":\"databytzh\"}},\"name\":\"ax\",\"description\":\"bihgcduj\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"rbtrmif\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Completed\"],\"\":{\"fkufv\":\"dataam\"}},{\"activity\":\"bcinuby\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Completed\"],\"\":{\"ajpyuwrggfgll\":\"datakozlpsf\",\"gzvytb\":\"datasu\",\"j\":\"dataqmxkuyyrcqsyqhu\"}}],\"userProperties\":[{\"name\":\"tgmmfdfdqoepw\",\"value\":\"datayeupkpyzaenar\"},{\"name\":\"yrlqiykhoygfgch\",\"value\":\"datacbt\"}],\"\":{\"yklyhmymkcc\":\"dataskgu\"}}") + "{\"type\":\"ludfdhiori\",\"typeProperties\":{\"storageLinkedServices\":[{\"referenceName\":\"meuukk\",\"parameters\":{\"wzgb\":\"datatucmhpjmnxlfkm\",\"mrpbmxmxshfh\":\"databwmiap\",\"qimjnxpfvxyt\":\"datap\",\"zyr\":\"dataoqnytuc\"}}],\"arguments\":[\"dataebsinsoybegejr\"],\"getDebugInfo\":\"Failure\",\"mapper\":\"datahlhludimqjc\",\"reducer\":\"datagxrozcfcxks\",\"input\":\"dataj\",\"output\":\"databteakdrh\",\"filePaths\":[\"dataepmnxv\",\"datahqvcnlphlkxd\"],\"fileLinkedService\":{\"referenceName\":\"lyccdmkpohgatq\",\"parameters\":{\"wndnsggfzpst\":\"datadizd\"}},\"combiner\":\"datadmwsflrdyr\",\"commandEnvironment\":[\"datax\",\"datay\",\"dataxnmxkmdl\",\"datanlhsdtcgflevndl\"],\"defines\":{\"wf\":\"datarfcfl\",\"izloyqjrkted\":\"datajyuhuthqdfi\",\"jsogesrmah\":\"datauqve\"}},\"linkedServiceName\":{\"referenceName\":\"ow\",\"parameters\":{\"aeisboeapsrayd\":\"datawvupndnhq\",\"rkaymdgzbklioku\":\"datapuokmakkw\"}},\"policy\":{\"timeout\":\"datapamavxorldubbba\",\"retry\":\"dataxlirolaooldwd\",\"retryIntervalInSeconds\":391806464,\"secureInput\":false,\"secureOutput\":false,\"\":{\"kcczb\":\"datagpoeqgku\",\"kwjhkjvsvywnz\":\"dataobe\",\"aquke\":\"dataiqvqbvfihna\"}},\"name\":\"qzf\",\"description\":\"qa\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"t\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Succeeded\"],\"\":{\"uufenpzt\":\"dataywtgilhxaaf\",\"exqyroqklgvyce\":\"dataktnfeghcmxi\",\"twhyznlhak\":\"dataywuioi\"}},{\"activity\":\"fskgxfmdpsreqor\",\"dependencyConditions\":[\"Failed\"],\"\":{\"akdsmwajalsens\":\"dataqjqbwjiqrukbj\",\"vbslrhcceyobjs\":\"dataoslvf\",\"bbyvxk\":\"dataunz\"}},{\"activity\":\"fukuht\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Completed\",\"Completed\"],\"\":{\"ftmpjinrq\":\"datamjccnjv\",\"scngdu\":\"dataliqxahpy\",\"vhcwt\":\"dataw\",\"xigpmc\":\"dataqires\"}},{\"activity\":\"equocawcb\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Skipped\"],\"\":{\"aaav\":\"datapwhybtx\",\"dxdxrkrvmhhgvrxv\":\"dataiadygoad\"}}],\"userProperties\":[{\"name\":\"bvrbwafwix\",\"value\":\"datatnc\"}],\"\":{\"ftbwmuxc\":\"datafgv\"}}") .toObject(HDInsightStreamingActivity.class); - Assertions.assertEquals("ax", model.name()); - Assertions.assertEquals("bihgcduj", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals("qzf", model.name()); + Assertions.assertEquals("qa", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("rbtrmif", model.dependsOn().get(0).activity()); + Assertions.assertEquals("t", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("tgmmfdfdqoepw", model.userProperties().get(0).name()); - Assertions.assertEquals("zpmwxvfrmvtwwb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1536315462, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("bvrbwafwix", model.userProperties().get(0).name()); + Assertions.assertEquals("ow", model.linkedServiceName().referenceName()); + Assertions.assertEquals(391806464, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("zydyl", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("fnhej", model.fileLinkedService().referenceName()); + Assertions.assertEquals("meuukk", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); + Assertions.assertEquals("lyccdmkpohgatq", model.fileLinkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HDInsightStreamingActivity model = new HDInsightStreamingActivity().withName("ax").withDescription("bihgcduj") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + HDInsightStreamingActivity model = new HDInsightStreamingActivity().withName("qzf") + .withDescription("qa") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("rbtrmif") + new ActivityDependency().withActivity("t") .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.FAILED, - DependencyCondition.COMPLETED)) + DependencyCondition.SUCCEEDED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("bcinuby") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED, - DependencyCondition.COMPLETED)) + new ActivityDependency().withActivity("fskgxfmdpsreqor") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("fukuht") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, + DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("equocawcb") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, + DependencyCondition.SKIPPED)) .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("tgmmfdfdqoepw").withValue("datayeupkpyzaenar"), - new UserProperty().withName("yrlqiykhoygfgch").withValue("datacbt"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zpmwxvfrmvtwwb") - .withParameters(mapOf("lqydhhypuvhucawm", "datavfosbrqeyw"))) - .withPolicy(new ActivityPolicy().withTimeout("datajll").withRetry("databqvnbxgk") - .withRetryIntervalInSeconds(1536315462).withSecureInput(false).withSecureOutput(false) + .withUserProperties(Arrays.asList(new UserProperty().withName("bvrbwafwix").withValue("datatnc"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ow") + .withParameters(mapOf("aeisboeapsrayd", "datawvupndnhq", "rkaymdgzbklioku", "datapuokmakkw"))) + .withPolicy(new ActivityPolicy().withTimeout("datapamavxorldubbba") + .withRetry("dataxlirolaooldwd") + .withRetryIntervalInSeconds(391806464) + .withSecureInput(false) + .withSecureOutput(false) .withAdditionalProperties(mapOf())) - .withStorageLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("zydyl").withParameters(mapOf("e", "dataisddmfom")), - new LinkedServiceReference().withReferenceName("vfnxxkmrf") - .withParameters(mapOf("rfhfjwikva", "datakprbmca", "uhqilvajctpw", "datajx")), - new LinkedServiceReference().withReferenceName("frfardjqwdr") - .withParameters(mapOf("syd", "dataoo", "lsxewocwmadyelw", "datavdqcmegwajjzxcq")), - new LinkedServiceReference().withReferenceName("ll") - .withParameters(mapOf("zqldakbijcxctn", "datahanfjrdcaw", "avoyhnollnuhoc", "datajxyczzwhwsidnq", - "nhujcunyuasstok", "databeoxoewprtzofcu")))) - .withArguments(Arrays.asList("datay", "datayblmcen", "datactc")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS).withMapper("datauplxksphzyu") - .withReducer("datayts").withInput("datafmajswr").withOutput("datadst") - .withFilePaths(Arrays.asList("datakqzulosl")) - .withFileLinkedService(new LinkedServiceReference().withReferenceName("fnhej") - .withParameters(mapOf("lvi", "dataugyuxcffbs", "zwtzdyz", "datas", "sojdmesoxj", "datafgn", "biouuqox", - "datapwbgfhjwchvu"))) - .withCombiner("datatws") - .withCommandEnvironment( - Arrays.asList("datadghlokvisqzmhei", "datasioonnfjgrtkeg", "datatvwffvbvuxpyveav", "datatzbzykks")) - .withDefines(mapOf("zdcj", "dataan")); + .withStorageLinkedServices(Arrays.asList(new LinkedServiceReference().withReferenceName("meuukk") + .withParameters(mapOf("wzgb", "datatucmhpjmnxlfkm", "mrpbmxmxshfh", "databwmiap", "qimjnxpfvxyt", + "datap", "zyr", "dataoqnytuc")))) + .withArguments(Arrays.asList("dataebsinsoybegejr")) + .withGetDebugInfo(HDInsightActivityDebugInfoOption.FAILURE) + .withMapper("datahlhludimqjc") + .withReducer("datagxrozcfcxks") + .withInput("dataj") + .withOutput("databteakdrh") + .withFilePaths(Arrays.asList("dataepmnxv", "datahqvcnlphlkxd")) + .withFileLinkedService(new LinkedServiceReference().withReferenceName("lyccdmkpohgatq") + .withParameters(mapOf("wndnsggfzpst", "datadizd"))) + .withCombiner("datadmwsflrdyr") + .withCommandEnvironment(Arrays.asList("datax", "datay", "dataxnmxkmdl", "datanlhsdtcgflevndl")) + .withDefines(mapOf("wf", "datarfcfl", "izloyqjrkted", "datajyuhuthqdfi", "jsogesrmah", "datauqve")); model = BinaryData.fromObject(model).toObject(HDInsightStreamingActivity.class); - Assertions.assertEquals("ax", model.name()); - Assertions.assertEquals("bihgcduj", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals("qzf", model.name()); + Assertions.assertEquals("qa", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("rbtrmif", model.dependsOn().get(0).activity()); + Assertions.assertEquals("t", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("tgmmfdfdqoepw", model.userProperties().get(0).name()); - Assertions.assertEquals("zpmwxvfrmvtwwb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1536315462, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("bvrbwafwix", model.userProperties().get(0).name()); + Assertions.assertEquals("ow", model.linkedServiceName().referenceName()); + Assertions.assertEquals(391806464, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("zydyl", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("fnhej", model.fileLinkedService().referenceName()); + Assertions.assertEquals("meuukk", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); + Assertions.assertEquals("lyccdmkpohgatq", model.fileLinkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTypePropertiesTests.java index 1a4bf47a1f673..acf6ad060d3d0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTypePropertiesTests.java @@ -17,36 +17,34 @@ public final class HDInsightStreamingActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HDInsightStreamingActivityTypeProperties model = BinaryData.fromString( - "{\"storageLinkedServices\":[{\"referenceName\":\"fawfeeatvnmm\",\"parameters\":{\"gwtcll\":\"datawfqrykikh\",\"fohtsmkf\":\"datawaz\"}},{\"referenceName\":\"oxbavfseh\",\"parameters\":{\"qszn\":\"dataqion\",\"o\":\"databi\",\"dteyvpv\":\"dataexjcrw\"}},{\"referenceName\":\"cqjgwtiasfbp\",\"parameters\":{\"seh\":\"dataxxxhbrysns\",\"wqg\":\"dataegvwbykrndxbkv\",\"qyedhyfncwiyfzu\":\"datafhl\",\"ccgtujiwzbzed\":\"databsaaxstnziv\"}}],\"arguments\":[\"dataygjhclny\",\"dataktc\",\"datamct\"],\"getDebugInfo\":\"Always\",\"mapper\":\"databrxmxqskemtajjf\",\"reducer\":\"datak\",\"input\":\"dataqpgbticn\",\"output\":\"datadubocmjiib\",\"filePaths\":[\"datawalhaw\",\"dataptiqfu\",\"dataavtapcxsmap\"],\"fileLinkedService\":{\"referenceName\":\"mmwylrv\",\"parameters\":{\"mcbtumt\":\"dataelpuxhku\",\"hyqexujll\":\"datarcvo\"}},\"combiner\":\"dataeegvqbsythycdck\",\"commandEnvironment\":[\"dataommgfwx\",\"datahrcmg\"],\"defines\":{\"senyehmwzgfanke\":\"datagosclhjgckkbn\",\"g\":\"datalorosa\"}}") + "{\"storageLinkedServices\":[{\"referenceName\":\"bvf\",\"parameters\":{\"gfdsrngy\":\"datahhzmxptkbehpyw\"}}],\"arguments\":[\"datazqwcmmolpfcvvk\",\"datannyksskuscdnn\"],\"getDebugInfo\":\"Failure\",\"mapper\":\"datata\",\"reducer\":\"datayrhttjtqwwlaxh\",\"input\":\"datajwpc\",\"output\":\"datatwywhrzntmzzzavx\",\"filePaths\":[\"datakexspoiq\",\"dataukytteai\"],\"fileLinkedService\":{\"referenceName\":\"wopkov\",\"parameters\":{\"yqmgvqthlim\":\"dataaigdvcbylds\"}},\"combiner\":\"datazr\",\"commandEnvironment\":[\"datagyon\"],\"defines\":{\"sijcjnbtgfitpx\":\"dataxwldktphn\",\"ngj\":\"datakb\",\"fzkujgeppxiyo\":\"databbyqvbd\"}}") .toObject(HDInsightStreamingActivityTypeProperties.class); - Assertions.assertEquals("fawfeeatvnmm", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("mmwylrv", model.fileLinkedService().referenceName()); + Assertions.assertEquals("bvf", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); + Assertions.assertEquals("wopkov", model.fileLinkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { HDInsightStreamingActivityTypeProperties model = new HDInsightStreamingActivityTypeProperties() - .withStorageLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("fawfeeatvnmm") - .withParameters(mapOf("gwtcll", "datawfqrykikh", "fohtsmkf", "datawaz")), - new LinkedServiceReference().withReferenceName("oxbavfseh") - .withParameters(mapOf("qszn", "dataqion", "o", "databi", "dteyvpv", "dataexjcrw")), - new LinkedServiceReference().withReferenceName("cqjgwtiasfbp") - .withParameters(mapOf("seh", "dataxxxhbrysns", "wqg", "dataegvwbykrndxbkv", "qyedhyfncwiyfzu", - "datafhl", "ccgtujiwzbzed", "databsaaxstnziv")))) - .withArguments(Arrays.asList("dataygjhclny", "dataktc", "datamct")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS).withMapper("databrxmxqskemtajjf") - .withReducer("datak").withInput("dataqpgbticn").withOutput("datadubocmjiib") - .withFilePaths(Arrays.asList("datawalhaw", "dataptiqfu", "dataavtapcxsmap")) - .withFileLinkedService(new LinkedServiceReference().withReferenceName("mmwylrv") - .withParameters(mapOf("mcbtumt", "dataelpuxhku", "hyqexujll", "datarcvo"))) - .withCombiner("dataeegvqbsythycdck").withCommandEnvironment(Arrays.asList("dataommgfwx", "datahrcmg")) - .withDefines(mapOf("senyehmwzgfanke", "datagosclhjgckkbn", "g", "datalorosa")); + .withStorageLinkedServices(Arrays.asList(new LinkedServiceReference().withReferenceName("bvf") + .withParameters(mapOf("gfdsrngy", "datahhzmxptkbehpyw")))) + .withArguments(Arrays.asList("datazqwcmmolpfcvvk", "datannyksskuscdnn")) + .withGetDebugInfo(HDInsightActivityDebugInfoOption.FAILURE) + .withMapper("datata") + .withReducer("datayrhttjtqwwlaxh") + .withInput("datajwpc") + .withOutput("datatwywhrzntmzzzavx") + .withFilePaths(Arrays.asList("datakexspoiq", "dataukytteai")) + .withFileLinkedService(new LinkedServiceReference().withReferenceName("wopkov") + .withParameters(mapOf("yqmgvqthlim", "dataaigdvcbylds"))) + .withCombiner("datazr") + .withCommandEnvironment(Arrays.asList("datagyon")) + .withDefines(mapOf("sijcjnbtgfitpx", "dataxwldktphn", "ngj", "datakb", "fzkujgeppxiyo", "databbyqvbd")); model = BinaryData.fromObject(model).toObject(HDInsightStreamingActivityTypeProperties.class); - Assertions.assertEquals("fawfeeatvnmm", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("mmwylrv", model.fileLinkedService().referenceName()); + Assertions.assertEquals("bvf", model.storageLinkedServices().get(0).referenceName()); + Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); + Assertions.assertEquals("wopkov", model.fileLinkedService().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsLocationTests.java index 28ee742b87004..d6057922068df 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsLocationTests.java @@ -11,13 +11,13 @@ public final class HdfsLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HdfsLocation model = BinaryData.fromString( - "{\"type\":\"HdfsLocation\",\"folderPath\":\"dataoxdjxldnaryy\",\"fileName\":\"datazkdolrndwdbvxvza\",\"\":{\"hmcxqqxmyzkl\":\"dataoyqxlunkf\",\"rqra\":\"dataoanpohrvm\"}}") + "{\"type\":\"fm\",\"folderPath\":\"datask\",\"fileName\":\"dataxvlzjxplhpevasyn\",\"\":{\"elbqrvvbqvkn\":\"datajy\",\"nstlpwqpjn\":\"datapecqxgiqasifubn\",\"psvyouweuiyxfw\":\"datajkht\"}}") .toObject(HdfsLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HdfsLocation model = new HdfsLocation().withFolderPath("dataoxdjxldnaryy").withFileName("datazkdolrndwdbvxvza"); + HdfsLocation model = new HdfsLocation().withFolderPath("datask").withFileName("dataxvlzjxplhpevasyn"); model = BinaryData.fromObject(model).toObject(HdfsLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsReadSettingsTests.java index a1baafd1764c0..4ae02799af122 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsReadSettingsTests.java @@ -12,21 +12,26 @@ public final class HdfsReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HdfsReadSettings model = BinaryData.fromString( - "{\"type\":\"HdfsReadSettings\",\"recursive\":\"dataerxfe\",\"wildcardFolderPath\":\"datamzsekvsuzyowr\",\"wildcardFileName\":\"datavofrenuvp\",\"fileListPath\":\"dataltnyyeyj\",\"enablePartitionDiscovery\":\"datafpbxnretpg\",\"partitionRootPath\":\"datatohruqtximrxeyz\",\"modifiedDatetimeStart\":\"datanxb\",\"modifiedDatetimeEnd\":\"datayglfyfcsbkjhoxtb\",\"distcpSettings\":{\"resourceManagerEndpoint\":\"databpef\",\"tempScriptPath\":\"datajpnixd\",\"distcpOptions\":\"datajkfvmrnwgeaj\"},\"deleteFilesAfterCompletion\":\"datafeiiri\",\"maxConcurrentConnections\":\"datajdnknbt\",\"disableMetricsCollection\":\"datardep\",\"\":{\"onovfnob\":\"datazhkp\",\"mmzxpsrlbppjq\":\"datawhutvcdtgxsyfuh\"}}") + "{\"type\":\"rmficqrdervtru\",\"recursive\":\"dataetdruugimviefbje\",\"wildcardFolderPath\":\"dataiyjkhjuuep\",\"wildcardFileName\":\"datauqwnajb\",\"fileListPath\":\"dataxqsvax\",\"enablePartitionDiscovery\":\"datawisdwtugadxupipg\",\"partitionRootPath\":\"datawiwhtjoxxllh\",\"modifiedDatetimeStart\":\"dataunnwm\",\"modifiedDatetimeEnd\":\"dataxyawxkd\",\"distcpSettings\":{\"resourceManagerEndpoint\":\"datavhyu\",\"tempScriptPath\":\"dataihapcqmcvur\",\"distcpOptions\":\"dataubljnizwztlcrxfi\"},\"deleteFilesAfterCompletion\":\"datafgxn\",\"maxConcurrentConnections\":\"datal\",\"disableMetricsCollection\":\"datartcsmpm\",\"\":{\"xcgcdfel\":\"dataxb\"}}") .toObject(HdfsReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HdfsReadSettings model = new HdfsReadSettings().withMaxConcurrentConnections("datajdnknbt") - .withDisableMetricsCollection("datardep").withRecursive("dataerxfe") - .withWildcardFolderPath("datamzsekvsuzyowr").withWildcardFileName("datavofrenuvp") - .withFileListPath("dataltnyyeyj").withEnablePartitionDiscovery("datafpbxnretpg") - .withPartitionRootPath("datatohruqtximrxeyz").withModifiedDatetimeStart("datanxb") - .withModifiedDatetimeEnd("datayglfyfcsbkjhoxtb") - .withDistcpSettings(new DistcpSettings().withResourceManagerEndpoint("databpef") - .withTempScriptPath("datajpnixd").withDistcpOptions("datajkfvmrnwgeaj")) - .withDeleteFilesAfterCompletion("datafeiiri"); + HdfsReadSettings model = new HdfsReadSettings().withMaxConcurrentConnections("datal") + .withDisableMetricsCollection("datartcsmpm") + .withRecursive("dataetdruugimviefbje") + .withWildcardFolderPath("dataiyjkhjuuep") + .withWildcardFileName("datauqwnajb") + .withFileListPath("dataxqsvax") + .withEnablePartitionDiscovery("datawisdwtugadxupipg") + .withPartitionRootPath("datawiwhtjoxxllh") + .withModifiedDatetimeStart("dataunnwm") + .withModifiedDatetimeEnd("dataxyawxkd") + .withDistcpSettings(new DistcpSettings().withResourceManagerEndpoint("datavhyu") + .withTempScriptPath("dataihapcqmcvur") + .withDistcpOptions("dataubljnizwztlcrxfi")) + .withDeleteFilesAfterCompletion("datafgxn"); model = BinaryData.fromObject(model).toObject(HdfsReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsSourceTests.java index ee4b65fcdfb2c..4fb6b5867b107 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsSourceTests.java @@ -12,16 +12,20 @@ public final class HdfsSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HdfsSource model = BinaryData.fromString( - "{\"type\":\"HdfsSource\",\"recursive\":\"dataskzw\",\"distcpSettings\":{\"resourceManagerEndpoint\":\"datah\",\"tempScriptPath\":\"datavhzrxcae\",\"distcpOptions\":\"dataxoavlwwpvjrnjwvc\"},\"sourceRetryCount\":\"datarqlceflgsndur\",\"sourceRetryWait\":\"dataozjwm\",\"maxConcurrentConnections\":\"datadehjloz\",\"disableMetricsCollection\":\"datawokuxedpqwzzzi\",\"\":{\"ve\":\"dataxjgxrhajrubcvu\",\"uclkbw\":\"datadfmdjnfeealp\",\"vkhfzldzz\":\"datamwdrvkbcsvn\"}}") + "{\"type\":\"f\",\"recursive\":\"datalaxjmnbmfmloq\",\"distcpSettings\":{\"resourceManagerEndpoint\":\"dataswvtd\",\"tempScriptPath\":\"datapicwnbtvlrs\",\"distcpOptions\":\"datatrmodknxerkaiik\"},\"sourceRetryCount\":\"dataxpqrkeyhj\",\"sourceRetryWait\":\"dataezkbrvtaul\",\"maxConcurrentConnections\":\"dataqvtpkodijcn\",\"disableMetricsCollection\":\"datao\",\"\":{\"hijbfiyuho\":\"datacyqjj\",\"phyacdhjm\":\"dataulevpdbirhgj\",\"jtkuyvytfuq\":\"datanvgkxshh\",\"kf\":\"datastqbxpyfawkjei\"}}") .toObject(HdfsSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HdfsSource model = new HdfsSource().withSourceRetryCount("datarqlceflgsndur").withSourceRetryWait("dataozjwm") - .withMaxConcurrentConnections("datadehjloz").withDisableMetricsCollection("datawokuxedpqwzzzi") - .withRecursive("dataskzw").withDistcpSettings(new DistcpSettings().withResourceManagerEndpoint("datah") - .withTempScriptPath("datavhzrxcae").withDistcpOptions("dataxoavlwwpvjrnjwvc")); + HdfsSource model = new HdfsSource().withSourceRetryCount("dataxpqrkeyhj") + .withSourceRetryWait("dataezkbrvtaul") + .withMaxConcurrentConnections("dataqvtpkodijcn") + .withDisableMetricsCollection("datao") + .withRecursive("datalaxjmnbmfmloq") + .withDistcpSettings(new DistcpSettings().withResourceManagerEndpoint("dataswvtd") + .withTempScriptPath("datapicwnbtvlrs") + .withDistcpOptions("datatrmodknxerkaiik")); model = BinaryData.fromObject(model).toObject(HdfsSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveDatasetTypePropertiesTests.java index 49ba7e7615268..e29ca1d0c4e3a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveDatasetTypePropertiesTests.java @@ -11,14 +11,15 @@ public final class HiveDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HiveDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datanjqhdheosx\",\"table\":\"datafudmpfhwyp\",\"schema\":\"datatjtntc\"}") + .fromString("{\"tableName\":\"dataphceeivk\",\"table\":\"dataxlepghcnu\",\"schema\":\"dataqpvtwttf\"}") .toObject(HiveDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HiveDatasetTypeProperties model = new HiveDatasetTypeProperties().withTableName("datanjqhdheosx") - .withTable("datafudmpfhwyp").withSchema("datatjtntc"); + HiveDatasetTypeProperties model = new HiveDatasetTypeProperties().withTableName("dataphceeivk") + .withTable("dataxlepghcnu") + .withSchema("dataqpvtwttf"); model = BinaryData.fromObject(model).toObject(HiveDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveObjectDatasetTests.java index 849c01bf1cccc..e9907cb06e208 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveObjectDatasetTests.java @@ -19,32 +19,37 @@ public final class HiveObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HiveObjectDataset model = BinaryData.fromString( - "{\"type\":\"HiveObject\",\"typeProperties\":{\"tableName\":\"datazsggd\",\"table\":\"datatfcbrtsrdplqdyza\",\"schema\":\"dataasfzrguzliyvsb\"},\"description\":\"inv\",\"structure\":\"datadjuljgxotuda\",\"schema\":\"datai\",\"linkedServiceName\":{\"referenceName\":\"a\",\"parameters\":{\"l\":\"datahulzugifgsp\"}},\"parameters\":{\"bwmgksrlmspp\":{\"type\":\"Float\",\"defaultValue\":\"datascygimizluk\"},\"zuqix\":{\"type\":\"Int\",\"defaultValue\":\"dataszthjtryjskdiylg\"},\"koe\":{\"type\":\"Int\",\"defaultValue\":\"databqowgvmxwbohxd\"}},\"annotations\":[\"datah\",\"datanakaj\",\"datascmne\",\"datavlumqeumz\"],\"folder\":{\"name\":\"mgqaeivjq\"},\"\":{\"dzahktxvcbic\":\"datarbgbzgfh\",\"tpxjvtwkyjdpayx\":\"dataecthotbkjwh\",\"qztjfkgb\":\"datapqiwuzr\",\"en\":\"dataqqjobsyn\"}}") + "{\"type\":\"ybivywotjnjuv\",\"typeProperties\":{\"tableName\":\"datays\",\"table\":\"dataqqjhdfhfaob\",\"schema\":\"datajcsbozvcdqwssydv\"},\"description\":\"ijrdl\",\"structure\":\"dataaeyocpkvlt\",\"schema\":\"datadzf\",\"linkedServiceName\":{\"referenceName\":\"npbdrcibjxnnnoz\",\"parameters\":{\"pshtisy\":\"datavdtuoamqobqeh\"}},\"parameters\":{\"vwdxgyypmxqzlm\":{\"type\":\"SecureString\",\"defaultValue\":\"datatrzjwnzwckzebm\"},\"ilyeshox\":{\"type\":\"String\",\"defaultValue\":\"datarcatkuhskegdkvv\"},\"busxyugozwp\":{\"type\":\"Int\",\"defaultValue\":\"datajdmu\"}},\"annotations\":[\"datagzumnotii\",\"datakkbyg\",\"datagiq\",\"datawyshybbnhtt\"],\"folder\":{\"name\":\"onzsurqcoj\"},\"\":{\"hjfvnhwsgn\":\"datazhzzcarciuoxyipd\",\"ssjgbfbb\":\"dataputfelfchnu\",\"zunf\":\"datachxxc\"}}") .toObject(HiveObjectDataset.class); - Assertions.assertEquals("inv", model.description()); - Assertions.assertEquals("a", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("bwmgksrlmspp").type()); - Assertions.assertEquals("mgqaeivjq", model.folder().name()); + Assertions.assertEquals("ijrdl", model.description()); + Assertions.assertEquals("npbdrcibjxnnnoz", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("vwdxgyypmxqzlm").type()); + Assertions.assertEquals("onzsurqcoj", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HiveObjectDataset model = new HiveObjectDataset().withDescription("inv").withStructure("datadjuljgxotuda") - .withSchema("datai") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("a").withParameters(mapOf("l", "datahulzugifgsp"))) - .withParameters(mapOf("bwmgksrlmspp", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datascygimizluk"), "zuqix", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataszthjtryjskdiylg"), - "koe", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("databqowgvmxwbohxd"))) - .withAnnotations(Arrays.asList("datah", "datanakaj", "datascmne", "datavlumqeumz")) - .withFolder(new DatasetFolder().withName("mgqaeivjq")).withTableName("datazsggd") - .withTable("datatfcbrtsrdplqdyza").withSchemaTypePropertiesSchema("dataasfzrguzliyvsb"); + HiveObjectDataset model = new HiveObjectDataset().withDescription("ijrdl") + .withStructure("dataaeyocpkvlt") + .withSchema("datadzf") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("npbdrcibjxnnnoz") + .withParameters(mapOf("pshtisy", "datavdtuoamqobqeh"))) + .withParameters(mapOf("vwdxgyypmxqzlm", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("datatrzjwnzwckzebm"), + "ilyeshox", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datarcatkuhskegdkvv"), + "busxyugozwp", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datajdmu"))) + .withAnnotations(Arrays.asList("datagzumnotii", "datakkbyg", "datagiq", "datawyshybbnhtt")) + .withFolder(new DatasetFolder().withName("onzsurqcoj")) + .withTableName("datays") + .withTable("dataqqjhdfhfaob") + .withSchemaTypePropertiesSchema("datajcsbozvcdqwssydv"); model = BinaryData.fromObject(model).toObject(HiveObjectDataset.class); - Assertions.assertEquals("inv", model.description()); - Assertions.assertEquals("a", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("bwmgksrlmspp").type()); - Assertions.assertEquals("mgqaeivjq", model.folder().name()); + Assertions.assertEquals("ijrdl", model.description()); + Assertions.assertEquals("npbdrcibjxnnnoz", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("vwdxgyypmxqzlm").type()); + Assertions.assertEquals("onzsurqcoj", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveSourceTests.java index 9d756b64c5969..6f93e8631db35 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveSourceTests.java @@ -11,16 +11,19 @@ public final class HiveSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HiveSource model = BinaryData.fromString( - "{\"type\":\"HiveSource\",\"query\":\"dataeydmeuimlhyze\",\"queryTimeout\":\"dataivkzrvya\",\"additionalColumns\":\"dataqgyui\",\"sourceRetryCount\":\"dataelyjduzapnopoto\",\"sourceRetryWait\":\"datarrqcaglyt\",\"maxConcurrentConnections\":\"datacbdpczmzuwr\",\"disableMetricsCollection\":\"datahfwce\",\"\":{\"cyfccnwmdpbso\":\"dataaqaviqskylwpq\",\"fxpveruuckrzw\":\"datakn\"}}") + "{\"type\":\"xmnoa\",\"query\":\"datahoaqj\",\"queryTimeout\":\"datazqz\",\"additionalColumns\":\"datadipnhbsvrlr\",\"sourceRetryCount\":\"datayadyfnxtlln\",\"sourceRetryWait\":\"datamdgsvaekuovwi\",\"maxConcurrentConnections\":\"dataykprrddbenf\",\"disableMetricsCollection\":\"datafszmxpos\",\"\":{\"j\":\"datacvyuldkpdleeslj\"}}") .toObject(HiveSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HiveSource model - = new HiveSource().withSourceRetryCount("dataelyjduzapnopoto").withSourceRetryWait("datarrqcaglyt") - .withMaxConcurrentConnections("datacbdpczmzuwr").withDisableMetricsCollection("datahfwce") - .withQueryTimeout("dataivkzrvya").withAdditionalColumns("dataqgyui").withQuery("dataeydmeuimlhyze"); + HiveSource model = new HiveSource().withSourceRetryCount("datayadyfnxtlln") + .withSourceRetryWait("datamdgsvaekuovwi") + .withMaxConcurrentConnections("dataykprrddbenf") + .withDisableMetricsCollection("datafszmxpos") + .withQueryTimeout("datazqz") + .withAdditionalColumns("datadipnhbsvrlr") + .withQuery("datahoaqj"); model = BinaryData.fromObject(model).toObject(HiveSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTests.java index 614286e8abcc7..5310f0de23094 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTests.java @@ -21,37 +21,41 @@ public final class HttpDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HttpDataset model = BinaryData.fromString( - "{\"type\":\"HttpFile\",\"typeProperties\":{\"relativeUrl\":\"dataxooi\",\"requestMethod\":\"datahiebruptls\",\"requestBody\":\"dataqzgaqsosrnjlvgrg\",\"additionalHeaders\":\"datahuoxrqhjninpesw\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"dataqkgebz\",\"deserializer\":\"datam\",\"\":{\"ojzdvmsnao\":\"dataiu\",\"hdbitq\":\"dataxsxoxvimdvet\",\"omr\":\"databyujs\"}},\"compression\":{\"type\":\"datau\",\"level\":\"datarmsdbvqxgfyg\",\"\":{\"sesboynpytporr\":\"dataxbdpbcehwbd\",\"anetinqxdhnpjne\":\"datakxtfc\",\"mltpmr\":\"datajighd\",\"qhngqqxjbsot\":\"datawvwetqffuxvfh\"}}},\"description\":\"lmr\",\"structure\":\"datau\",\"schema\":\"datadeatwxpx\",\"linkedServiceName\":{\"referenceName\":\"xedhxbboceksra\",\"parameters\":{\"wurxdqhv\":\"datahlugfnlvvk\"}},\"parameters\":{\"ivlqcwyzhndqkzst\":{\"type\":\"Float\",\"defaultValue\":\"datanntfkqpwqcnbn\"},\"eirta\":{\"type\":\"Object\",\"defaultValue\":\"dataecdl\"},\"viudzpsj\":{\"type\":\"Int\",\"defaultValue\":\"datawcimtcau\"},\"unlofwuzebfq\":{\"type\":\"Int\",\"defaultValue\":\"datalujm\"}},\"annotations\":[\"datajbhzyenf\",\"datapetxeudwkh\",\"datalckdoxocjcdevzp\"],\"folder\":{\"name\":\"ortwwyjm\"},\"\":{\"jnnhbcjywkdywks\":\"datalhfxmr\",\"ptplkossjbzv\":\"dataavuafanefic\"}}") + "{\"type\":\"gwaakktbjortz\",\"typeProperties\":{\"relativeUrl\":\"dataejexfdlhuhd\",\"requestMethod\":\"databgywadrklpdye\",\"requestBody\":\"datarwcflvxbocayw\",\"additionalHeaders\":\"datavuhzmolhveoln\",\"format\":{\"type\":\"mhwdxqupyml\",\"serializer\":\"datalmnjqzm\",\"deserializer\":\"datanhitrnwqgqrbth\",\"\":{\"qeiaddp\":\"dataiqnrjocogwfvkywz\",\"mizunzbqvioync\":\"datahuvnlmdcnutie\"}},\"compression\":{\"type\":\"dataqhhvvwz\",\"level\":\"datajaaaiaibtvavly\",\"\":{\"rl\":\"datalocnwmefzvzuzq\",\"v\":\"datao\",\"sfyqncowm\":\"datazgyhen\",\"ywjiaaosla\":\"datanozf\"}}},\"description\":\"ajqhsnsejplis\",\"structure\":\"datayljzbk\",\"schema\":\"datalfjwxgvtkjctv\",\"linkedServiceName\":{\"referenceName\":\"peawzzkvfccozv\",\"parameters\":{\"raitrms\":\"dataph\",\"ptctxpoegyckmemf\":\"datakxtuytg\",\"mwrv\":\"datarcclclfkfvyjo\",\"x\":\"dataoipjy\"}},\"parameters\":{\"qwh\":{\"type\":\"String\",\"defaultValue\":\"dataewfzvvpay\"}},\"annotations\":[\"datayandblk\",\"datantcv\"],\"folder\":{\"name\":\"fmo\"},\"\":{\"pff\":\"datatfvxuos\",\"wyjzuakkiubeqk\":\"datapjpjmsbzzjsnyf\"}}") .toObject(HttpDataset.class); - Assertions.assertEquals("lmr", model.description()); - Assertions.assertEquals("xedhxbboceksra", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("ivlqcwyzhndqkzst").type()); - Assertions.assertEquals("ortwwyjm", model.folder().name()); + Assertions.assertEquals("ajqhsnsejplis", model.description()); + Assertions.assertEquals("peawzzkvfccozv", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("qwh").type()); + Assertions.assertEquals("fmo", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HttpDataset model = new HttpDataset().withDescription("lmr").withStructure("datau").withSchema("datadeatwxpx") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xedhxbboceksra") - .withParameters(mapOf("wurxdqhv", "datahlugfnlvvk"))) - .withParameters(mapOf("ivlqcwyzhndqkzst", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datanntfkqpwqcnbn"), - "eirta", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataecdl"), - "viudzpsj", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datawcimtcau"), - "unlofwuzebfq", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datalujm"))) - .withAnnotations(Arrays.asList("datajbhzyenf", "datapetxeudwkh", "datalckdoxocjcdevzp")) - .withFolder(new DatasetFolder().withName("ortwwyjm")).withRelativeUrl("dataxooi") - .withRequestMethod("datahiebruptls").withRequestBody("dataqzgaqsosrnjlvgrg") - .withAdditionalHeaders("datahuoxrqhjninpesw") - .withFormat(new DatasetStorageFormat().withSerializer("dataqkgebz").withDeserializer("datam") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("datau").withLevel("datarmsdbvqxgfyg") + HttpDataset model = new HttpDataset().withDescription("ajqhsnsejplis") + .withStructure("datayljzbk") + .withSchema("datalfjwxgvtkjctv") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("peawzzkvfccozv") + .withParameters(mapOf("raitrms", "dataph", "ptctxpoegyckmemf", "datakxtuytg", "mwrv", + "datarcclclfkfvyjo", "x", "dataoipjy"))) + .withParameters(mapOf("qwh", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataewfzvvpay"))) + .withAnnotations(Arrays.asList("datayandblk", "datantcv")) + .withFolder(new DatasetFolder().withName("fmo")) + .withRelativeUrl("dataejexfdlhuhd") + .withRequestMethod("databgywadrklpdye") + .withRequestBody("datarwcflvxbocayw") + .withAdditionalHeaders("datavuhzmolhveoln") + .withFormat(new DatasetStorageFormat().withSerializer("datalmnjqzm") + .withDeserializer("datanhitrnwqgqrbth") + .withAdditionalProperties(mapOf("type", "mhwdxqupyml"))) + .withCompression(new DatasetCompression().withType("dataqhhvvwz") + .withLevel("datajaaaiaibtvavly") .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(HttpDataset.class); - Assertions.assertEquals("lmr", model.description()); - Assertions.assertEquals("xedhxbboceksra", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("ivlqcwyzhndqkzst").type()); - Assertions.assertEquals("ortwwyjm", model.folder().name()); + Assertions.assertEquals("ajqhsnsejplis", model.description()); + Assertions.assertEquals("peawzzkvfccozv", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("qwh").type()); + Assertions.assertEquals("fmo", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTypePropertiesTests.java index 765545eff8ddf..49cb1483c794e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTypePropertiesTests.java @@ -15,17 +15,21 @@ public final class HttpDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HttpDatasetTypeProperties model = BinaryData.fromString( - "{\"relativeUrl\":\"datazw\",\"requestMethod\":\"datazuh\",\"requestBody\":\"datatiaczhfjdccjny\",\"additionalHeaders\":\"databt\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"datajcgjtjkntomnl\",\"deserializer\":\"datahcdb\",\"\":{\"bdctqxavejoezvwk\":\"databzrrxeyvidcowlr\"}},\"compression\":{\"type\":\"datazgavp\",\"level\":\"datadmdfiekkis\",\"\":{\"a\":\"datayaeknfffysh\",\"tfofhoajjylsyqyj\":\"datajlmlcufbbji\"}}}") + "{\"relativeUrl\":\"datatlrglhxso\",\"requestMethod\":\"dataguhbnhogsezreneg\",\"requestBody\":\"datadtyzpx\",\"additionalHeaders\":\"datatwkejmgem\",\"format\":{\"type\":\"pehskvsdfvhr\",\"serializer\":\"datae\",\"deserializer\":\"datammpkapvnpeukg\",\"\":{\"vktfp\":\"dataakeqnitromlc\",\"fxjtxla\":\"datarowsh\",\"fqdmll\":\"datamvdyqabjrop\"}},\"compression\":{\"type\":\"datayjyuwqlzw\",\"level\":\"datapvhwirilamqtrh\",\"\":{\"dedpkwdtobpgdc\":\"dataxdega\",\"q\":\"datadpd\",\"ddlirqqcl\":\"datarm\",\"k\":\"dataaqifepdureevi\"}}}") .toObject(HttpDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HttpDatasetTypeProperties model = new HttpDatasetTypeProperties().withRelativeUrl("datazw") - .withRequestMethod("datazuh").withRequestBody("datatiaczhfjdccjny").withAdditionalHeaders("databt") - .withFormat(new DatasetStorageFormat().withSerializer("datajcgjtjkntomnl").withDeserializer("datahcdb") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("datazgavp").withLevel("datadmdfiekkis") + HttpDatasetTypeProperties model = new HttpDatasetTypeProperties().withRelativeUrl("datatlrglhxso") + .withRequestMethod("dataguhbnhogsezreneg") + .withRequestBody("datadtyzpx") + .withAdditionalHeaders("datatwkejmgem") + .withFormat(new DatasetStorageFormat().withSerializer("datae") + .withDeserializer("datammpkapvnpeukg") + .withAdditionalProperties(mapOf("type", "pehskvsdfvhr"))) + .withCompression(new DatasetCompression().withType("datayjyuwqlzw") + .withLevel("datapvhwirilamqtrh") .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(HttpDatasetTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpReadSettingsTests.java index 6fed3c81808fb..c0bc860ed54bf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpReadSettingsTests.java @@ -11,16 +11,19 @@ public final class HttpReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HttpReadSettings model = BinaryData.fromString( - "{\"type\":\"HttpReadSettings\",\"requestMethod\":\"datarvigrxmptufde\",\"requestBody\":\"datapqghxdpgihfimlyx\",\"additionalHeaders\":\"dataixjudbiac\",\"requestTimeout\":\"dataoucmfuvuslvbujwp\",\"additionalColumns\":\"dataijpyyvecruhqymwd\",\"maxConcurrentConnections\":\"datahkt\",\"disableMetricsCollection\":\"dataljkh\",\"\":{\"ivapua\":\"datatpgxkkoypxwlvt\",\"pd\":\"dataoswqwbhlrzlgkc\"}}") + "{\"type\":\"cnkgius\",\"requestMethod\":\"datamxgglkqitpbyne\",\"requestBody\":\"dataxux\",\"additionalHeaders\":\"dataoclef\",\"requestTimeout\":\"dataggglzsbou\",\"additionalColumns\":\"datan\",\"maxConcurrentConnections\":\"dataswcpspaoxig\",\"disableMetricsCollection\":\"datai\",\"\":{\"yn\":\"datapgpqsmglutn\",\"xbgfwwcfwlwnj\":\"datalxxnbogxkidb\"}}") .toObject(HttpReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HttpReadSettings model = new HttpReadSettings().withMaxConcurrentConnections("datahkt") - .withDisableMetricsCollection("dataljkh").withRequestMethod("datarvigrxmptufde") - .withRequestBody("datapqghxdpgihfimlyx").withAdditionalHeaders("dataixjudbiac") - .withRequestTimeout("dataoucmfuvuslvbujwp").withAdditionalColumns("dataijpyyvecruhqymwd"); + HttpReadSettings model = new HttpReadSettings().withMaxConcurrentConnections("dataswcpspaoxig") + .withDisableMetricsCollection("datai") + .withRequestMethod("datamxgglkqitpbyne") + .withRequestBody("dataxux") + .withAdditionalHeaders("dataoclef") + .withRequestTimeout("dataggglzsbou") + .withAdditionalColumns("datan"); model = BinaryData.fromObject(model).toObject(HttpReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpServerLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpServerLocationTests.java index 3496c931f757b..4f335e7228f92 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpServerLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpServerLocationTests.java @@ -11,14 +11,15 @@ public final class HttpServerLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HttpServerLocation model = BinaryData.fromString( - "{\"type\":\"HttpServerLocation\",\"relativeUrl\":\"datauklajvcfoc\",\"folderPath\":\"dataapejovtkwx\",\"fileName\":\"datawhhnoyrzaa\",\"\":{\"envjeateaxxc\":\"datahpm\"}}") + "{\"type\":\"tnaczkfwfatga\",\"relativeUrl\":\"datawkuh\",\"folderPath\":\"datahnskivdwgtqcume\",\"fileName\":\"dataaaqgoqbdiuycs\",\"\":{\"bhz\":\"dataowk\",\"htmqowi\":\"datarb\",\"ruozkgyfp\":\"dataasfgqgucyhfaimqv\"}}") .toObject(HttpServerLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HttpServerLocation model = new HttpServerLocation().withFolderPath("dataapejovtkwx") - .withFileName("datawhhnoyrzaa").withRelativeUrl("datauklajvcfoc"); + HttpServerLocation model = new HttpServerLocation().withFolderPath("datahnskivdwgtqcume") + .withFileName("dataaaqgoqbdiuycs") + .withRelativeUrl("datawkuh"); model = BinaryData.fromObject(model).toObject(HttpServerLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpSourceTests.java index e7acf3e79d6d7..090511ddc51d5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpSourceTests.java @@ -11,15 +11,17 @@ public final class HttpSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HttpSource model = BinaryData.fromString( - "{\"type\":\"HttpSource\",\"httpRequestTimeout\":\"datajnhxufocski\",\"sourceRetryCount\":\"dataj\",\"sourceRetryWait\":\"datap\",\"maxConcurrentConnections\":\"datavhfpfsesiywcre\",\"disableMetricsCollection\":\"dataphqqozhesbpq\",\"\":{\"upcdaoatzvaj\":\"datafjktdvdhlkztalu\",\"pqo\":\"datavxhefmotulhilmaz\"}}") + "{\"type\":\"hcnybhvzltbgw\",\"httpRequestTimeout\":\"dataslyqmlg\",\"sourceRetryCount\":\"dataepjmkruzogs\",\"sourceRetryWait\":\"dataoqjbnfaxcdcmqe\",\"maxConcurrentConnections\":\"datasirotj\",\"disableMetricsCollection\":\"datatugobs\",\"\":{\"jnqifoznfd\":\"datackgqyuvhlpmjpz\"}}") .toObject(HttpSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HttpSource model = new HttpSource().withSourceRetryCount("dataj").withSourceRetryWait("datap") - .withMaxConcurrentConnections("datavhfpfsesiywcre").withDisableMetricsCollection("dataphqqozhesbpq") - .withHttpRequestTimeout("datajnhxufocski"); + HttpSource model = new HttpSource().withSourceRetryCount("dataepjmkruzogs") + .withSourceRetryWait("dataoqjbnfaxcdcmqe") + .withMaxConcurrentConnections("datasirotj") + .withDisableMetricsCollection("datatugobs") + .withHttpRequestTimeout("dataslyqmlg"); model = BinaryData.fromObject(model).toObject(HttpSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotObjectDatasetTests.java index 83a87df9b3831..987361f230455 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotObjectDatasetTests.java @@ -19,30 +19,31 @@ public final class HubspotObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HubspotObjectDataset model = BinaryData.fromString( - "{\"type\":\"HubspotObject\",\"typeProperties\":{\"tableName\":\"datadbbglaecct\"},\"description\":\"fspvjrds\",\"structure\":\"datavrm\",\"schema\":\"dataftyptwjwiyyeo\",\"linkedServiceName\":{\"referenceName\":\"gmc\",\"parameters\":{\"ioxbgom\":\"datamvphwfnugslvfz\",\"zuox\":\"dataueprpmofxnwc\"}},\"parameters\":{\"nnrnkyj\":{\"type\":\"String\",\"defaultValue\":\"dataxajsehb\"},\"gzehczbni\":{\"type\":\"String\",\"defaultValue\":\"datapcbs\"}},\"annotations\":[\"datahsxvppkjeal\",\"datadbewh\",\"datatvbmyzuqfdqdktr\",\"datat\"],\"folder\":{\"name\":\"zhhqngj\"},\"\":{\"mxlffqgdodnkq\":\"datav\",\"zesimef\":\"dataipgkmjtdazm\",\"fzjlflzagvda\":\"datagd\"}}") + "{\"type\":\"dnihuzzjuzvwg\",\"typeProperties\":{\"tableName\":\"datatbfyt\"},\"description\":\"dto\",\"structure\":\"dataiwnyfzdpxctsu\",\"schema\":\"dataxdte\",\"linkedServiceName\":{\"referenceName\":\"objzr\",\"parameters\":{\"lgmpupj\":\"dataidcnz\"}},\"parameters\":{\"dvbgvzlzjs\":{\"type\":\"String\",\"defaultValue\":\"datayu\"}},\"annotations\":[\"datacutzaz\"],\"folder\":{\"name\":\"pokns\"},\"\":{\"kdarl\":\"datambdqra\",\"bva\":\"datao\",\"vclfjyclvi\":\"dataqwzknyujxy\",\"leirmtxfqpfildcg\":\"datadlff\"}}") .toObject(HubspotObjectDataset.class); - Assertions.assertEquals("fspvjrds", model.description()); - Assertions.assertEquals("gmc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("nnrnkyj").type()); - Assertions.assertEquals("zhhqngj", model.folder().name()); + Assertions.assertEquals("dto", model.description()); + Assertions.assertEquals("objzr", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("dvbgvzlzjs").type()); + Assertions.assertEquals("pokns", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HubspotObjectDataset model = new HubspotObjectDataset().withDescription("fspvjrds").withStructure("datavrm") - .withSchema("dataftyptwjwiyyeo") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("gmc") - .withParameters(mapOf("ioxbgom", "datamvphwfnugslvfz", "zuox", "dataueprpmofxnwc"))) - .withParameters(mapOf("nnrnkyj", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataxajsehb"), - "gzehczbni", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datapcbs"))) - .withAnnotations(Arrays.asList("datahsxvppkjeal", "datadbewh", "datatvbmyzuqfdqdktr", "datat")) - .withFolder(new DatasetFolder().withName("zhhqngj")).withTableName("datadbbglaecct"); + HubspotObjectDataset model = new HubspotObjectDataset().withDescription("dto") + .withStructure("dataiwnyfzdpxctsu") + .withSchema("dataxdte") + .withLinkedServiceName( + new LinkedServiceReference().withReferenceName("objzr").withParameters(mapOf("lgmpupj", "dataidcnz"))) + .withParameters(mapOf("dvbgvzlzjs", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datayu"))) + .withAnnotations(Arrays.asList("datacutzaz")) + .withFolder(new DatasetFolder().withName("pokns")) + .withTableName("datatbfyt"); model = BinaryData.fromObject(model).toObject(HubspotObjectDataset.class); - Assertions.assertEquals("fspvjrds", model.description()); - Assertions.assertEquals("gmc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("nnrnkyj").type()); - Assertions.assertEquals("zhhqngj", model.folder().name()); + Assertions.assertEquals("dto", model.description()); + Assertions.assertEquals("objzr", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("dvbgvzlzjs").type()); + Assertions.assertEquals("pokns", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotSourceTests.java index 022094851f025..5b12732f445b8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotSourceTests.java @@ -11,16 +11,19 @@ public final class HubspotSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { HubspotSource model = BinaryData.fromString( - "{\"type\":\"HubspotSource\",\"query\":\"dataqaptqyrnlyuyopww\",\"queryTimeout\":\"dataoubwbssvfzjjf\",\"additionalColumns\":\"dataxeosyl\",\"sourceRetryCount\":\"datappqjujbqrfw\",\"sourceRetryWait\":\"datawvpnbgyxo\",\"maxConcurrentConnections\":\"datakzeaiaycauvlfsc\",\"disableMetricsCollection\":\"dataqpzqivfgemvuicxw\",\"\":{\"atjm\":\"dataydlvfnucgwflj\"}}") + "{\"type\":\"xpnzpuknfpgg\",\"query\":\"datakniqoqyrcpsjea\",\"queryTimeout\":\"dataluchbfrta\",\"additionalColumns\":\"datasddy\",\"sourceRetryCount\":\"dataxsyufexivhjyxa\",\"sourceRetryWait\":\"datatkqofr\",\"maxConcurrentConnections\":\"dataccqjenzloxazy\",\"disableMetricsCollection\":\"datajbvqaey\",\"\":{\"wliitaieledmiup\":\"databdwflx\"}}") .toObject(HubspotSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - HubspotSource model = new HubspotSource().withSourceRetryCount("datappqjujbqrfw") - .withSourceRetryWait("datawvpnbgyxo").withMaxConcurrentConnections("datakzeaiaycauvlfsc") - .withDisableMetricsCollection("dataqpzqivfgemvuicxw").withQueryTimeout("dataoubwbssvfzjjf") - .withAdditionalColumns("dataxeosyl").withQuery("dataqaptqyrnlyuyopww"); + HubspotSource model = new HubspotSource().withSourceRetryCount("dataxsyufexivhjyxa") + .withSourceRetryWait("datatkqofr") + .withMaxConcurrentConnections("dataccqjenzloxazy") + .withDisableMetricsCollection("datajbvqaey") + .withQueryTimeout("dataluchbfrta") + .withAdditionalColumns("datasddy") + .withQuery("datakniqoqyrcpsjea"); model = BinaryData.fromObject(model).toObject(HubspotSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTests.java index 897c3e09fce6e..a6341eb2ac7cc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTests.java @@ -22,158 +22,171 @@ public final class IfConditionActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IfConditionActivity model = BinaryData.fromString( - "{\"type\":\"IfCondition\",\"typeProperties\":{\"expression\":{\"value\":\"wbqcofsqruy\"},\"ifTrueActivities\":[{\"type\":\"Activity\",\"name\":\"em\",\"description\":\"jzhix\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"uj\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Failed\",\"Completed\"],\"\":{\"bshyul\":\"datawtkhfcnceowvi\",\"zcpoydaifxmbxqzc\":\"datahepnmeg\"}},{\"activity\":\"c\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Succeeded\",\"Failed\"],\"\":{\"vanpjvqrwlseeu\":\"datapldaoiidxknsqdru\"}},{\"activity\":\"xxrwov\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\",\"Completed\"],\"\":{\"vytrzsqbckqgte\":\"databwqrot\"}},{\"activity\":\"inznkvyicj\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\",\"Succeeded\"],\"\":{\"buvyuzzwph\":\"dataeskindgmk\",\"ikwvcogq\":\"dataliflxrnsyvmu\",\"cthrrxrki\":\"dataim\"}}],\"userProperties\":[{\"name\":\"dhbmizbevje\",\"value\":\"dataufxuug\"},{\"name\":\"dbpjoycpys\",\"value\":\"datacmavln\"},{\"name\":\"bm\",\"value\":\"datanvfgwgoxfd\"}],\"\":{\"bifpc\":\"datazoxhazafmq\",\"gjxklojdydha\":\"dataammpeakdhebzquq\",\"sxgjih\":\"datafjwm\"}},{\"type\":\"Activity\",\"name\":\"xoxjghumvpt\",\"description\":\"o\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"lcjuzzzi\",\"dependencyConditions\":[\"Succeeded\",\"Failed\"],\"\":{\"yhpilqojdmz\":\"dataovbgdbaornx\",\"rymzlqircivxaqz\":\"datajcpzzqji\",\"ye\":\"datavgxqtkcvnyi\"}},{\"activity\":\"wfsivg\",\"dependencyConditions\":[\"Completed\",\"Skipped\"],\"\":{\"kklz\":\"datadyztnsutes\",\"vtivefsr\":\"datah\",\"dmcoxobrv\":\"datatcxhpntewvfvs\"}}],\"userProperties\":[{\"name\":\"rvnnfieaqbv\",\"value\":\"datauehggeea\"},{\"name\":\"brslbzcyubqemrxm\",\"value\":\"dataibexaxu\"},{\"name\":\"wwqnwxohbmvg\",\"value\":\"datagdnzvohrnqn\"},{\"name\":\"runkyuzcpi\",\"value\":\"dataas\"}],\"\":{\"ascvcmt\":\"datatiocsfpcyyi\",\"adtyhmoph\":\"dataukboryn\"}}],\"ifFalseActivities\":[{\"type\":\"Activity\",\"name\":\"hvnqwdphnc\",\"description\":\"bqij\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"nhrhxhmtxpx\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Skipped\"],\"\":{\"vijdtmjybb\":\"datantiz\",\"sliou\":\"datadhwadnccunrviqrz\"}},{\"activity\":\"a\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Completed\"],\"\":{\"ir\":\"dataggbuajwrrgqudnm\"}}],\"userProperties\":[{\"name\":\"ztkxfhix\",\"value\":\"datauuzaczmej\"},{\"name\":\"iiegpdhit\",\"value\":\"datatketw\"},{\"name\":\"sko\",\"value\":\"datamqhzys\"},{\"name\":\"chbvejgfx\",\"value\":\"datajqevmzhk\"}],\"\":{\"pnxylhrlbohdxln\":\"datangdgk\"}},{\"type\":\"Activity\",\"name\":\"lvcbcxb\",\"description\":\"snhqqqaedgwghqqi\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"uenb\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"opibaxkywqs\":\"dataoopksmj\",\"pfe\":\"datanrcuvlfzd\"}}],\"userProperties\":[{\"name\":\"cahlsavinoora\",\"value\":\"dataspfinyijmwqgmhf\"},{\"name\":\"lbd\",\"value\":\"datadhedmfidro\"},{\"name\":\"fpucwmdmbys\",\"value\":\"dataqbgndfzheyxccx\"}],\"\":{\"uppkzuxsbbmxfut\":\"dataioawrorexicwb\",\"helyopobg\":\"datay\"}}]},\"name\":\"l\",\"description\":\"ki\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"fmpiffgtqhghy\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Failed\"],\"\":{\"hucxmybuqjpgbiya\":\"datagmlaer\",\"fyinh\":\"datagat\"}},{\"activity\":\"vbmbf\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Failed\",\"Failed\"],\"\":{\"yvopotiefxhaqq\":\"datadkdcvowaslswwg\",\"ypuon\":\"datavhfdezomykjbl\"}},{\"activity\":\"vmymfaiw\",\"dependencyConditions\":[\"Skipped\",\"Completed\"],\"\":{\"va\":\"dataddsxsq\",\"roedwipauclety\":\"datayzd\",\"hboqeue\":\"datazziavguskvvnzn\"}},{\"activity\":\"yfl\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Skipped\",\"Succeeded\"],\"\":{\"jnsbdw\":\"datazda\"}}],\"userProperties\":[{\"name\":\"q\",\"value\":\"databafrbhrsp\"},{\"name\":\"kvok\",\"value\":\"datamere\"}],\"\":{\"decxbiknf\":\"dataessuwkcn\",\"fxdntpksb\":\"datapixfdojxby\",\"svahbqoojdnmrxj\":\"dataigegwaidqzfl\",\"nkadanl\":\"dataumrzfdbo\"}}") + "{\"type\":\"abzr\",\"typeProperties\":{\"expression\":{\"value\":\"uhjqdwlxabtlms\"},\"ifTrueActivities\":[{\"type\":\"dai\",\"name\":\"fqnxjkopivsz\",\"description\":\"bptrmhabzjem\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"axnbqsjznc\",\"dependencyConditions\":[\"Failed\"],\"\":{\"xojimussvursl\":\"datagiv\",\"ksoqrhwl\":\"datadxnxgvalvkdaq\"}},{\"activity\":\"nwhtwsxliwpzu\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Completed\"],\"\":{\"ubh\":\"datarvtrwswbm\",\"ivusehyvqxjbqfcl\":\"databtthzfgpzy\",\"xdlzxua\":\"datajecajtuo\"}},{\"activity\":\"bavpkrxnrrbck\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"bdhhqsfhtlvjaxd\":\"datasgxeijnvsjg\"}}],\"userProperties\":[{\"name\":\"icikzmvdddfjmirb\",\"value\":\"datafcqls\"}],\"\":{\"rymrfpqyxlncwagi\":\"datapfspfd\",\"uerhzyl\":\"dataqhzotkowi\",\"emsl\":\"datawymrmuioepi\",\"vryszqzve\":\"dataz\"}},{\"type\":\"newmpwjcgr\",\"name\":\"olbqcftrywd\",\"description\":\"skdl\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"nxvmcxljlpyhdx\",\"dependencyConditions\":[\"Succeeded\",\"Failed\"],\"\":{\"qbqgfq\":\"dataewt\",\"xwevdjmxvvtuky\":\"datavm\"}},{\"activity\":\"ubjnmoidinbfb\",\"dependencyConditions\":[\"Failed\",\"Succeeded\"],\"\":{\"ghysedqrb\":\"datacgmfklqswwdbs\",\"qrwngfyjfquzxmtm\":\"datavo\"}},{\"activity\":\"yibycoupksa\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Failed\",\"Failed\"],\"\":{\"gyjoklngjsglzoir\":\"dataxvffrncswv\",\"pbgak\":\"datasqdnasj\",\"rgye\":\"dataszzbdt\",\"qiot\":\"datavqslikeuq\"}},{\"activity\":\"fcbgffd\",\"dependencyConditions\":[\"Completed\"],\"\":{\"qawtfyzqop\":\"datat\",\"ea\":\"datalixhapvwacwrc\",\"ble\":\"dataucnknzncoxeop\"}}],\"userProperties\":[{\"name\":\"rsyxeqwgaeic\",\"value\":\"dataovrcdcidcxkyw\"},{\"name\":\"p\",\"value\":\"datatssqbclaeci\"},{\"name\":\"zwvttkhaxqyinfdm\",\"value\":\"datajq\"},{\"name\":\"khq\",\"value\":\"dataxpiczaqgevsnn\"}],\"\":{\"skffqqaobbq\":\"dataufezwgwmdv\",\"adffdr\":\"datadkjusqhr\",\"rvn\":\"dataykhtsycct\"}},{\"type\":\"iembc\",\"name\":\"tzmldw\",\"description\":\"xjkxvzhacorqbmkf\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"qgm\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"dnu\":\"datanv\",\"acxldhoqcdpwx\":\"datasaskgiyrilbi\",\"znpxaxcshtlqhi\":\"dataccvtb\",\"obhnuziaz\":\"datamfzdlhp\"}}],\"userProperties\":[{\"name\":\"wmjaevwidnjpfku\",\"value\":\"datahwdirt\"},{\"name\":\"y\",\"value\":\"dataaqya\"},{\"name\":\"dykxgcfhv\",\"value\":\"dataynsyhz\"},{\"name\":\"suoqfbycra\",\"value\":\"datayxrt\"}],\"\":{\"tstlgdvvpxhdefy\":\"datajhjbfoemm\",\"jyqhcowouoih\":\"dataitbjmva\",\"mpzb\":\"datatnyvigjbxhjpsgpr\"}}],\"ifFalseActivities\":[{\"type\":\"yflryhvphkdci\",\"name\":\"idz\",\"description\":\"fwlxxwpyzbgstml\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"azyni\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Succeeded\"],\"\":{\"m\":\"datawcutohm\",\"iimennxvqjakqd\":\"datamdouich\",\"zuuguze\":\"datannef\"}},{\"activity\":\"fggheqllrp\",\"dependencyConditions\":[\"Failed\",\"Completed\"],\"\":{\"hohqe\":\"datakrvmvdqhag\",\"xeubngwidgxypdo\":\"datatlsipedgtupkm\",\"lt\":\"datalphmcmfvyhmivy\"}},{\"activity\":\"akmtvoprg\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"fxkud\":\"dataorxibw\",\"enmuevq\":\"datacwfo\",\"gzdionlgnes\":\"datassclgolbpw\"}},{\"activity\":\"k\",\"dependencyConditions\":[\"Completed\"],\"\":{\"hlbxrqbi\":\"datatzskvpqqxnd\",\"zkehfkpoczxm\":\"datajhaafvxxi\",\"qpq\":\"databkrwihbyufmuin\"}}],\"userProperties\":[{\"name\":\"xdihuxz\",\"value\":\"datagoto\"}],\"\":{\"d\":\"dataduirjqxknaeuhxnp\",\"dvnaxtbnjmj\":\"datajaeqaolfyqjgob\",\"bdfmhzgtieybimit\":\"datagrwvl\"}}]},\"name\":\"reftwhiivxytvje\",\"description\":\"kuzlfnbz\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"kwrv\",\"dependencyConditions\":[\"Failed\",\"Failed\"],\"\":{\"g\":\"dataqy\",\"wxmqyhtlnnpftay\":\"datavpxsdtnxg\",\"gxamhmqexyoy\":\"datao\",\"pvvelcrwhrpxs\":\"datacwzkcreuf\"}},{\"activity\":\"ybalsmiar\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Succeeded\",\"Failed\"],\"\":{\"obyyv\":\"datapv\"}},{\"activity\":\"jelsjh\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Succeeded\"],\"\":{\"ujjdoelawdbkez\":\"datahkhiycddonqi\"}},{\"activity\":\"kotvoszgcy\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Succeeded\"],\"\":{\"j\":\"dataqwvvferlqhfzzqqs\",\"skjqejkm\":\"datashwxy\",\"utcyjjbdgfrl\":\"datatwftlhsmtkxzio\",\"egqvusffzvpwzvh\":\"datah\"}}],\"userProperties\":[{\"name\":\"rvmpiw\",\"value\":\"dataoorrnssthninza\"},{\"name\":\"dmnc\",\"value\":\"dataltrxwab\"},{\"name\":\"d\",\"value\":\"dataclqgteoepdpx\"}],\"\":{\"qikeamymalvoy\":\"dataqwfpqixomonq\"}}") .toObject(IfConditionActivity.class); - Assertions.assertEquals("l", model.name()); - Assertions.assertEquals("ki", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals("reftwhiivxytvje", model.name()); + Assertions.assertEquals("kuzlfnbz", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("fmpiffgtqhghy", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("q", model.userProperties().get(0).name()); - Assertions.assertEquals("wbqcofsqruy", model.expression().value()); - Assertions.assertEquals("em", model.ifTrueActivities().get(0).name()); - Assertions.assertEquals("jzhix", model.ifTrueActivities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.ifTrueActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.ifTrueActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("uj", model.ifTrueActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, + Assertions.assertEquals("kwrv", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("rvmpiw", model.userProperties().get(0).name()); + Assertions.assertEquals("uhjqdwlxabtlms", model.expression().value()); + Assertions.assertEquals("fqnxjkopivsz", model.ifTrueActivities().get(0).name()); + Assertions.assertEquals("bptrmhabzjem", model.ifTrueActivities().get(0).description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.ifTrueActivities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.ifTrueActivities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("axnbqsjznc", model.ifTrueActivities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.ifTrueActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("dhbmizbevje", model.ifTrueActivities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("hvnqwdphnc", model.ifFalseActivities().get(0).name()); - Assertions.assertEquals("bqij", model.ifFalseActivities().get(0).description()); + Assertions.assertEquals("icikzmvdddfjmirb", model.ifTrueActivities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("idz", model.ifFalseActivities().get(0).name()); + Assertions.assertEquals("fwlxxwpyzbgstml", model.ifFalseActivities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.ifFalseActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.ifFalseActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("nhrhxhmtxpx", model.ifFalseActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.ifFalseActivities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("azyni", model.ifFalseActivities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.ifFalseActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ztkxfhix", model.ifFalseActivities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("xdihuxz", model.ifFalseActivities().get(0).userProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - IfConditionActivity model = new IfConditionActivity().withName("l").withDescription("ki") - .withState(ActivityState.INACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("fmpiffgtqhghy") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, - DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vbmbf") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED, - DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vmymfaiw") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("yfl") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, - DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("q").withValue("databafrbhrsp"), - new UserProperty().withName("kvok").withValue("datamere"))) - .withExpression(new Expression().withValue("wbqcofsqruy")) - .withIfTrueActivities(Arrays.asList( - new Activity().withName("em").withDescription("jzhix").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency() - .withActivity("uj") + IfConditionActivity model + = new IfConditionActivity().withName("reftwhiivxytvje") + .withDescription("kuzlfnbz") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn( + Arrays.asList( + new ActivityDependency().withActivity("kwrv") .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, - DependencyCondition.FAILED, DependencyCondition.COMPLETED)) + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("ybalsmiar") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, + DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("c") + new ActivityDependency().withActivity("jelsjh") .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) + DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("xxrwov") + new ActivityDependency().withActivity("kotvoszgcy") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, + DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList( + new UserProperty().withName("rvmpiw").withValue("dataoorrnssthninza"), + new UserProperty().withName("dmnc").withValue("dataltrxwab"), new UserProperty() + .withName("d") + .withValue("dataclqgteoepdpx"))) + .withExpression(new Expression().withValue("uhjqdwlxabtlms")) + .withIfTrueActivities(Arrays.asList( + new Activity().withName("fqnxjkopivsz") + .withDescription("bptrmhabzjem") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("axnbqsjznc") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("nwhtwsxliwpzu") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, + DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency() + .withActivity("bavpkrxnrrbck") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("icikzmvdddfjmirb").withValue("datafcqls"))) + .withAdditionalProperties(mapOf("type", "dai")), + new Activity().withName("olbqcftrywd") + .withDescription("skdl") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("nxvmcxljlpyhdx") .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) + Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("inznkvyicj") + new ActivityDependency().withActivity("ubjnmoidinbfb") + .withDependencyConditions( + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("yibycoupksa") + .withDependencyConditions( + Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED, + DependencyCondition.FAILED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("fcbgffd") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("rsyxeqwgaeic").withValue("dataovrcdcidcxkyw"), + new UserProperty().withName("p").withValue("datatssqbclaeci"), + new UserProperty().withName("zwvttkhaxqyinfdm").withValue("datajq"), + new UserProperty().withName("khq").withValue("dataxpiczaqgevsnn"))) + .withAdditionalProperties(mapOf("type", "newmpwjcgr")), + new Activity().withName("tzmldw") + .withDescription("xjkxvzhacorqbmkf") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("qgm") .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED, - DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED)) .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("dhbmizbevje").withValue("dataufxuug"), - new UserProperty().withName("dbpjoycpys").withValue("datacmavln"), - new UserProperty().withName("bm").withValue("datanvfgwgoxfd"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity() - .withName("xoxjghumvpt").withDescription("o").withState( - ActivityState.ACTIVE) + .withUserProperties( + Arrays.asList(new UserProperty().withName("wmjaevwidnjpfku").withValue("datahwdirt"), + new UserProperty().withName("y").withValue("dataaqya"), + new UserProperty().withName("dykxgcfhv").withValue("dataynsyhz"), + new UserProperty().withName("suoqfbycra").withValue("datayxrt"))) + .withAdditionalProperties(mapOf("type", "iembc")))) + .withIfFalseActivities(Arrays.asList(new Activity().withName("idz") + .withDescription("fwlxxwpyzbgstml") + .withState(ActivityState.ACTIVE) .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("lcjuzzzi") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) + new ActivityDependency().withActivity("azyni") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("wfsivg") + new ActivityDependency().withActivity("fggheqllrp") .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("rvnnfieaqbv").withValue("datauehggeea"), - new UserProperty().withName("brslbzcyubqemrxm").withValue("dataibexaxu"), - new UserProperty().withName("wwqnwxohbmvg").withValue("datagdnzvohrnqn"), - new UserProperty().withName("runkyuzcpi").withValue("dataas"))) - .withAdditionalProperties(mapOf("type", "Activity")))) - .withIfFalseActivities(Arrays.asList( - new Activity().withName("hvnqwdphnc").withDescription("bqij").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("nhrhxhmtxpx") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.FAILED, DependencyCondition.SKIPPED)) + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("a") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) + new ActivityDependency().withActivity("akmtvoprg") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("k") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("ztkxfhix").withValue("datauuzaczmej"), - new UserProperty().withName("iiegpdhit").withValue("datatketw"), - new UserProperty().withName("sko").withValue("datamqhzys"), - new UserProperty().withName("chbvejgfx").withValue("datajqevmzhk"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("lvcbcxb").withDescription("snhqqqaedgwghqqi").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("uenb") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("cahlsavinoora").withValue("dataspfinyijmwqgmhf"), - new UserProperty().withName("lbd").withValue("datadhedmfidro"), - new UserProperty().withName("fpucwmdmbys").withValue("dataqbgndfzheyxccx"))) - .withAdditionalProperties(mapOf("type", "Activity")))); + .withUserProperties(Arrays.asList(new UserProperty().withName("xdihuxz").withValue("datagoto"))) + .withAdditionalProperties(mapOf("type", "yflryhvphkdci")))); model = BinaryData.fromObject(model).toObject(IfConditionActivity.class); - Assertions.assertEquals("l", model.name()); - Assertions.assertEquals("ki", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals("reftwhiivxytvje", model.name()); + Assertions.assertEquals("kuzlfnbz", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("fmpiffgtqhghy", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("q", model.userProperties().get(0).name()); - Assertions.assertEquals("wbqcofsqruy", model.expression().value()); - Assertions.assertEquals("em", model.ifTrueActivities().get(0).name()); - Assertions.assertEquals("jzhix", model.ifTrueActivities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.ifTrueActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.ifTrueActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("uj", model.ifTrueActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, + Assertions.assertEquals("kwrv", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("rvmpiw", model.userProperties().get(0).name()); + Assertions.assertEquals("uhjqdwlxabtlms", model.expression().value()); + Assertions.assertEquals("fqnxjkopivsz", model.ifTrueActivities().get(0).name()); + Assertions.assertEquals("bptrmhabzjem", model.ifTrueActivities().get(0).description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.ifTrueActivities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.ifTrueActivities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("axnbqsjznc", model.ifTrueActivities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.ifTrueActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("dhbmizbevje", model.ifTrueActivities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("hvnqwdphnc", model.ifFalseActivities().get(0).name()); - Assertions.assertEquals("bqij", model.ifFalseActivities().get(0).description()); + Assertions.assertEquals("icikzmvdddfjmirb", model.ifTrueActivities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("idz", model.ifFalseActivities().get(0).name()); + Assertions.assertEquals("fwlxxwpyzbgstml", model.ifFalseActivities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.ifFalseActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.ifFalseActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("nhrhxhmtxpx", model.ifFalseActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.ifFalseActivities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("azyni", model.ifFalseActivities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.ifFalseActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ztkxfhix", model.ifFalseActivities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("xdihuxz", model.ifFalseActivities().get(0).userProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTypePropertiesTests.java index 1efb949cf37f2..8f942bd702705 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTypePropertiesTests.java @@ -22,135 +22,154 @@ public final class IfConditionActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IfConditionActivityTypeProperties model = BinaryData.fromString( - "{\"expression\":{\"value\":\"qlrmbgiaoxpfko\"},\"ifTrueActivities\":[{\"type\":\"Activity\",\"name\":\"xxezur\",\"description\":\"ucnssp\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ftklbbribgc\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Succeeded\"],\"\":{\"xlhec\":\"datatdkwibdrivedsh\"}},{\"activity\":\"tmwwmybviw\",\"dependencyConditions\":[\"Succeeded\",\"Completed\"],\"\":{\"udskcadkyoo\":\"datajglponkrhpyed\",\"f\":\"datavqpcjr\"}},{\"activity\":\"yduzzyx\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Completed\"],\"\":{\"nectfpbfgmghohox\":\"dataufmy\",\"vqcxrrkc\":\"dataonts\",\"zs\":\"dataclqlibpmfn\"}}],\"userProperties\":[{\"name\":\"kktlodsyyzmf\",\"value\":\"datagzljgrtfic\"},{\"name\":\"ejmzbasxapcegtc\",\"value\":\"dataufet\"}],\"\":{\"xgxqdmvfdocjaf\":\"datatjnneynmgvqysghk\",\"wmtfjzuqhyqvm\":\"datafiddnktutwcz\",\"dpeedzowverhtyc\":\"databsj\",\"mdsisll\":\"dataigtsrrlelpobm\"}},{\"type\":\"Activity\",\"name\":\"qgluhr\",\"description\":\"mojozhdcptxx\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"mwnyudcvqeo\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"ntopfqguovqqrc\":\"datangiff\",\"rpcguwyu\":\"dataeumwvzagurg\",\"qsrqebjgo\":\"datahkefowncudcrwo\"}},{\"activity\":\"mjcahda\",\"dependencyConditions\":[\"Skipped\",\"Skipped\"],\"\":{\"mseao\":\"dataf\",\"bslwxcf\":\"dataq\",\"wnmnxppgfep\":\"datavedxyeba\"}}],\"userProperties\":[{\"name\":\"djva\",\"value\":\"dataae\"},{\"name\":\"oqknz\",\"value\":\"datanvvkfbmrppjf\"},{\"name\":\"eabgpw\",\"value\":\"datas\"}],\"\":{\"n\":\"datai\",\"telimqxwih\":\"datavdjmvzcycg\",\"hz\":\"datapyexjrguziglr\",\"isklotwnppstpq\":\"datamrvgcbf\"}}],\"ifFalseActivities\":[{\"type\":\"Activity\",\"name\":\"eawolhlfffe\",\"description\":\"bmhqy\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"coqtvxhipchdpdev\",\"dependencyConditions\":[\"Succeeded\",\"Failed\"],\"\":{\"eshxomt\":\"datacik\",\"pypzgdet\":\"datakxpsx\",\"gyhu\":\"datad\",\"zmziiftjig\":\"datasutspocrskkraap\"}},{\"activity\":\"qyzocfyywcfl\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Succeeded\"],\"\":{\"pwtjoku\":\"datamktbwdfjcepy\",\"bbccbqxwojve\":\"datartqnbdgcnickn\"}},{\"activity\":\"xhf\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\"],\"\":{\"hzwyyyerzbmlhg\":\"datafyjcenkidlpml\",\"wsyx\":\"datatkthevodddne\",\"ohdifbhtxtcqjg\":\"datafdjftcr\"}},{\"activity\":\"d\",\"dependencyConditions\":[\"Completed\",\"Skipped\"],\"\":{\"tnej\":\"dataurjxkpha\",\"vuvh\":\"datafljqzbixlzaa\",\"bneepfjibtsp\":\"dataerjrcxyxepl\",\"eigywj\":\"dataiwfqj\"}}],\"userProperties\":[{\"name\":\"gncscwsefdqnsu\",\"value\":\"dataomln\"}],\"\":{\"crllecquo\":\"datajdcvnanej\",\"wvcyprpog\":\"datagyhkvtofxke\",\"ochpzcgs\":\"dataqvuftkiyghcmpyki\"}},{\"type\":\"Activity\",\"name\":\"pklfnst\",\"description\":\"bpwwo\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"rsgfpds\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"kcji\":\"dataepmttrfun\",\"jjfne\":\"dataoczoiduk\",\"mdffyv\":\"datauqalwjcqbnvbz\"}},{\"activity\":\"d\",\"dependencyConditions\":[\"Completed\"],\"\":{\"okirxyffttsdt\":\"dataryvkubfotgivpor\",\"gtrjzimxz\":\"dataql\",\"uladdujzenagmh\":\"datauqcinjejyinlys\"}},{\"activity\":\"mgtbqzftmpgibm\",\"dependencyConditions\":[\"Completed\"],\"\":{\"yjvjyxueuqc\":\"datacprbwsndloldxm\",\"gxak\":\"datagbs\",\"uyokctymsbhdi\":\"datakbryolzbmdntajgg\",\"s\":\"datazao\"}},{\"activity\":\"nxgk\",\"dependencyConditions\":[\"Completed\",\"Skipped\"],\"\":{\"euwpivsltlyqc\":\"dataukbpwwfeixm\",\"qcmsrzrcddlzga\":\"datapwndcjr\",\"optrudpm\":\"dataptwqfgqccond\"}}],\"userProperties\":[{\"name\":\"loflcilrafkrvv\",\"value\":\"datawknymqzmui\"},{\"name\":\"uvtgjgpcvdjin\",\"value\":\"dataoslzrbz\"},{\"name\":\"f\",\"value\":\"datavwcjrbjgdvwa\"}],\"\":{\"svximqkuyflzx\":\"datacnevkfkmena\"}}]}") + "{\"expression\":{\"value\":\"qgelcccccc\"},\"ifTrueActivities\":[{\"type\":\"jzqve\",\"name\":\"mzpo\",\"description\":\"h\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"khvxtxuihy\",\"dependencyConditions\":[\"Failed\"],\"\":{\"ydtllpwzayau\":\"dataycpzqjpyquy\",\"kfplhrenedsnu\":\"dataell\",\"skn\":\"datair\"}},{\"activity\":\"yfvptoktrjwnq\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"ngkkf\":\"datafngktmzooszv\",\"iqtyeqeasia\":\"dataebwqz\",\"idbbqvip\":\"datascjhay\"}}],\"userProperties\":[{\"name\":\"wbxp\",\"value\":\"datamndqmz\"},{\"name\":\"gqedonozwy\",\"value\":\"datahvqkeuiyme\"}],\"\":{\"gwsnqnxrrjihgig\":\"datankntldddknpvus\",\"nouwxkeqlbmwoya\":\"dataozksood\",\"udfixhxl\":\"dataxnq\",\"xvmenlqwxsk\":\"datavzqhtgtadtootkg\"}},{\"type\":\"zzp\",\"name\":\"twgtmpytomft\",\"description\":\"hvbw\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"a\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Failed\"],\"\":{\"srdaoixgq\":\"dataxfzvllihw\",\"qhdgix\":\"datadsjnleko\",\"kjcdjswxek\":\"datanchyoimt\",\"clg\":\"datahvccxuntghwcb\"}},{\"activity\":\"yfcbcakcq\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Failed\"],\"\":{\"gvcm\":\"dataumlnfxboqvgwaiyw\"}},{\"activity\":\"i\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Failed\"],\"\":{\"zbnqmxirspj\":\"dataxz\",\"zisdnbourw\":\"dataakrbew\"}},{\"activity\":\"g\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Completed\",\"Completed\"],\"\":{\"fnxtynus\":\"dataxgmzyqftlafeco\",\"nztwnylk\":\"dataza\",\"uzw\":\"databwxcjf\"}}],\"userProperties\":[{\"name\":\"guqzlmh\",\"value\":\"datauqlsd\"}],\"\":{\"cpvych\":\"datajxlzyyylyxujqp\",\"jg\":\"databshogjaubplf\",\"ujtnnd\":\"datafrwym\",\"yc\":\"datas\"}}],\"ifFalseActivities\":[{\"type\":\"yt\",\"name\":\"qbtijybpfwgclppw\",\"description\":\"xh\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"faqgzakisipjgvm\",\"dependencyConditions\":[\"Succeeded\",\"Completed\"],\"\":{\"ubqfbwffgconiyd\":\"datavuzbxuubwjopkl\"}}],\"userProperties\":[{\"name\":\"sgyytnmhlankos\",\"value\":\"datafvmfyxle\"},{\"name\":\"khkiaybdivx\",\"value\":\"dataxwdfm\"},{\"name\":\"zndlgq\",\"value\":\"datauqjczcorct\"}],\"\":{\"hmsdod\":\"dataxaafcv\",\"lfo\":\"datarzsninkhbm\",\"jphzxmcpsepkrdge\":\"datayt\"}},{\"type\":\"zxkpxrfulqhhm\",\"name\":\"dbbpjdgn\",\"description\":\"elxkzmfmgboyliop\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"yvmpfebsummy\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Failed\",\"Completed\"],\"\":{\"rskosbzvclzutvqk\":\"datasdupmrickuhgbrv\",\"mv\":\"datai\"}},{\"activity\":\"fskqwjlohkaffyny\",\"dependencyConditions\":[\"Succeeded\",\"Failed\"],\"\":{\"xkucxpq\":\"datatyi\",\"egiufjnjgupj\":\"dataaxkayv\",\"wbdmunuv\":\"datappbalcft\"}},{\"activity\":\"wm\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"yza\":\"datapzqrb\",\"qzmwxoogi\":\"datawrufiouafxp\",\"l\":\"datagnplzbtvpuigtnjy\",\"extlyyvebpykzhr\":\"datavvitxoitnqmiwlri\"}},{\"activity\":\"usbtwpvmzgnxepa\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\"],\"\":{\"nbttefbbr\":\"datahkyluqxndmtasx\"}}],\"userProperties\":[{\"name\":\"fkvshozjkwj\",\"value\":\"datavdohocsgktfzs\"}],\"\":{\"cxevitvbzyhexlh\":\"datacbe\"}},{\"type\":\"piedcrtv\",\"name\":\"cbzpynedts\",\"description\":\"tdmgwxowaawehxs\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"cuvj\",\"dependencyConditions\":[\"Completed\"],\"\":{\"fuuu\":\"dataswgkbzrmeftg\",\"ykvgfh\":\"dataagsyvzghnqed\",\"ims\":\"dataihotzygqdc\",\"ynunrajtbumaid\":\"datarytkmfhbpcr\"}},{\"activity\":\"sn\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Succeeded\",\"Skipped\"],\"\":{\"cgtlttnjpgxuxkce\":\"datamvgttj\"}},{\"activity\":\"xkdqqombiaoaqwwo\",\"dependencyConditions\":[\"Completed\"],\"\":{\"sqtj\":\"datafrau\",\"zqqtipkrea\":\"datatqbhrdp\",\"nwaymrlvhl\":\"dataakkgqfkigukfximw\",\"hszgaub\":\"dataeriqendtyccn\"}},{\"activity\":\"bizjbwufjogswf\",\"dependencyConditions\":[\"Failed\",\"Completed\"],\"\":{\"fgx\":\"dataypwrvnveetayd\",\"awfzyvxk\":\"datadobsxshjsrau\"}}],\"userProperties\":[{\"name\":\"ofpsr\",\"value\":\"dataoujkcpyerfsngt\"},{\"name\":\"ijbolksehtyx\",\"value\":\"datagsurfnktxht\"},{\"name\":\"rzdqqo\",\"value\":\"datadralt\"},{\"name\":\"cttjibognhuqdkq\",\"value\":\"dataffcvahknv\"}],\"\":{\"bijzoixutizl\":\"datapwobhkq\"}}]}") .toObject(IfConditionActivityTypeProperties.class); - Assertions.assertEquals("qlrmbgiaoxpfko", model.expression().value()); - Assertions.assertEquals("xxezur", model.ifTrueActivities().get(0).name()); - Assertions.assertEquals("ucnssp", model.ifTrueActivities().get(0).description()); + Assertions.assertEquals("qgelcccccc", model.expression().value()); + Assertions.assertEquals("mzpo", model.ifTrueActivities().get(0).name()); + Assertions.assertEquals("h", model.ifTrueActivities().get(0).description()); Assertions.assertEquals(ActivityState.INACTIVE, model.ifTrueActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.ifTrueActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("ftklbbribgc", model.ifTrueActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.ifTrueActivities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("khvxtxuihy", model.ifTrueActivities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.ifTrueActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("kktlodsyyzmf", model.ifTrueActivities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("eawolhlfffe", model.ifFalseActivities().get(0).name()); - Assertions.assertEquals("bmhqy", model.ifFalseActivities().get(0).description()); + Assertions.assertEquals("wbxp", model.ifTrueActivities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("qbtijybpfwgclppw", model.ifFalseActivities().get(0).name()); + Assertions.assertEquals("xh", model.ifFalseActivities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.ifFalseActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.ifFalseActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("coqtvxhipchdpdev", model.ifFalseActivities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.ifFalseActivities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("faqgzakisipjgvm", model.ifFalseActivities().get(0).dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.ifFalseActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("gncscwsefdqnsu", model.ifFalseActivities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("sgyytnmhlankos", model.ifFalseActivities().get(0).userProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { IfConditionActivityTypeProperties model - = new IfConditionActivityTypeProperties() - .withExpression( - new Expression().withValue("qlrmbgiaoxpfko")) + = new IfConditionActivityTypeProperties().withExpression(new Expression().withValue("qgelcccccc")) .withIfTrueActivities(Arrays.asList( - new Activity().withName("xxezur").withDescription("ucnssp").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + new Activity().withName("mzpo") + .withDescription("h") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ftklbbribgc") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) + new ActivityDependency().withActivity("khvxtxuihy") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("tmwwmybviw") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) + new ActivityDependency().withActivity("yfvptoktrjwnq") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("wbxp").withValue("datamndqmz"), + new UserProperty().withName("gqedonozwy").withValue("datahvqkeuiyme"))) + .withAdditionalProperties(mapOf("type", "jzqve")), + new Activity().withName("twgtmpytomft") + .withDescription("hvbw") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("a") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, + DependencyCondition.FAILED, DependencyCondition.FAILED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("yduzzyx") + new ActivityDependency().withActivity("yfcbcakcq") .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.FAILED, DependencyCondition.COMPLETED)) + DependencyCondition.COMPLETED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("i") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, + DependencyCondition.FAILED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("g") + .withDependencyConditions( + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED, + DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()))) .withUserProperties( - Arrays.asList(new UserProperty().withName("kktlodsyyzmf").withValue("datagzljgrtfic"), - new UserProperty().withName("ejmzbasxapcegtc").withValue("dataufet"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity() - .withName("qgluhr").withDescription("mojozhdcptxx").withState( - ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + Arrays.asList(new UserProperty().withName("guqzlmh").withValue("datauqlsd"))) + .withAdditionalProperties(mapOf("type", "zzp")))) + .withIfFalseActivities(Arrays.asList( + new Activity().withName("qbtijybpfwgclppw") + .withDescription("xh") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("faqgzakisipjgvm") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("sgyytnmhlankos").withValue("datafvmfyxle"), + new UserProperty().withName("khkiaybdivx") + .withValue("dataxwdfm"), + new UserProperty().withName("zndlgq").withValue("datauqjczcorct"))) + .withAdditionalProperties(mapOf("type", "yt")), + new Activity().withName("dbbpjdgn") + .withDescription("elxkzmfmgboyliop") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("mwnyudcvqeo") + new ActivityDependency().withActivity("yvmpfebsummy") .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED)) + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, + DependencyCondition.FAILED, DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("mjcahda") + new ActivityDependency().withActivity("fskqwjlohkaffyny") .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) + Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("wm") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("usbtwpvmzgnxepa") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("djva").withValue("dataae"), - new UserProperty().withName("oqknz") - .withValue("datanvvkfbmrppjf"), - new UserProperty().withName("eabgpw").withValue("datas"))) - .withAdditionalProperties(mapOf("type", "Activity")))) - .withIfFalseActivities(Arrays.asList( - new Activity().withName("eawolhlfffe").withDescription("bmhqy").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs( - ActivityOnInactiveMarkAs.FAILED) - .withDependsOn( - Arrays.asList( - new ActivityDependency().withActivity("coqtvxhipchdpdev") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("qyzocfyywcfl") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("xhf") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("d") - .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("gncscwsefdqnsu") - .withValue("dataomln"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("pklfnst").withDescription("bpwwo").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withUserProperties( + Arrays.asList(new UserProperty().withName("fkvshozjkwj").withValue("datavdohocsgktfzs"))) + .withAdditionalProperties(mapOf("type", "zxkpxrfulqhhm")), + new Activity().withName("cbzpynedts") + .withDescription("tdmgwxowaawehxs") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("rsgfpds") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("d") + new ActivityDependency().withActivity("cuvj") .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("mgtbqzftmpgibm") + new ActivityDependency().withActivity("sn") + .withDependencyConditions( + Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, + DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("xkdqqombiaoaqwwo") .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("nxgk") + new ActivityDependency().withActivity("bizjbwufjogswf") .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()))) .withUserProperties( - Arrays.asList(new UserProperty().withName("loflcilrafkrvv").withValue("datawknymqzmui"), - new UserProperty().withName("uvtgjgpcvdjin").withValue("dataoslzrbz"), - new UserProperty().withName("f").withValue("datavwcjrbjgdvwa"))) - .withAdditionalProperties(mapOf("type", "Activity")))); + Arrays.asList(new UserProperty().withName("ofpsr").withValue("dataoujkcpyerfsngt"), + new UserProperty().withName("ijbolksehtyx").withValue("datagsurfnktxht"), + new UserProperty().withName("rzdqqo").withValue("datadralt"), + new UserProperty().withName("cttjibognhuqdkq").withValue("dataffcvahknv"))) + .withAdditionalProperties(mapOf("type", "piedcrtv")))); model = BinaryData.fromObject(model).toObject(IfConditionActivityTypeProperties.class); - Assertions.assertEquals("qlrmbgiaoxpfko", model.expression().value()); - Assertions.assertEquals("xxezur", model.ifTrueActivities().get(0).name()); - Assertions.assertEquals("ucnssp", model.ifTrueActivities().get(0).description()); + Assertions.assertEquals("qgelcccccc", model.expression().value()); + Assertions.assertEquals("mzpo", model.ifTrueActivities().get(0).name()); + Assertions.assertEquals("h", model.ifTrueActivities().get(0).description()); Assertions.assertEquals(ActivityState.INACTIVE, model.ifTrueActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.ifTrueActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("ftklbbribgc", model.ifTrueActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.ifTrueActivities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("khvxtxuihy", model.ifTrueActivities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.ifTrueActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("kktlodsyyzmf", model.ifTrueActivities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("eawolhlfffe", model.ifFalseActivities().get(0).name()); - Assertions.assertEquals("bmhqy", model.ifFalseActivities().get(0).description()); + Assertions.assertEquals("wbxp", model.ifTrueActivities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("qbtijybpfwgclppw", model.ifFalseActivities().get(0).name()); + Assertions.assertEquals("xh", model.ifFalseActivities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.ifFalseActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.ifFalseActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("coqtvxhipchdpdev", model.ifFalseActivities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.ifFalseActivities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("faqgzakisipjgvm", model.ifFalseActivities().get(0).dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.ifFalseActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("gncscwsefdqnsu", model.ifFalseActivities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("sgyytnmhlankos", model.ifFalseActivities().get(0).userProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaDatasetTypePropertiesTests.java index d3b98fae5d401..6f21ba2a79494 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaDatasetTypePropertiesTests.java @@ -11,14 +11,16 @@ public final class ImpalaDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ImpalaDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datamby\",\"table\":\"datavwnbu\",\"schema\":\"dataodtevzshqykebmps\"}") + .fromString( + "{\"tableName\":\"datatqmlzuwtbdzqa\",\"table\":\"datakmpebfhlgeehb\",\"schema\":\"datagplnl\"}") .toObject(ImpalaDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ImpalaDatasetTypeProperties model = new ImpalaDatasetTypeProperties().withTableName("datamby") - .withTable("datavwnbu").withSchema("dataodtevzshqykebmps"); + ImpalaDatasetTypeProperties model = new ImpalaDatasetTypeProperties().withTableName("datatqmlzuwtbdzqa") + .withTable("datakmpebfhlgeehb") + .withSchema("datagplnl"); model = BinaryData.fromObject(model).toObject(ImpalaDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaObjectDatasetTests.java index ef3b8faec8b9e..1f025e284182b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaObjectDatasetTests.java @@ -19,32 +19,37 @@ public final class ImpalaObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ImpalaObjectDataset model = BinaryData.fromString( - "{\"type\":\"ImpalaObject\",\"typeProperties\":{\"tableName\":\"dataxiefcorzbidaeb\",\"table\":\"datanicewd\",\"schema\":\"datajwiylciobb\"},\"description\":\"ws\",\"structure\":\"dataeqx\",\"schema\":\"datacuuuexsmnteevfg\",\"linkedServiceName\":{\"referenceName\":\"xfezraqsddko\",\"parameters\":{\"w\":\"dataxqfkyrxgmzzeg\",\"fegaok\":\"datazfss\"}},\"parameters\":{\"fyuklxkelmz\":{\"type\":\"String\",\"defaultValue\":\"datara\"},\"gsingmhpavsfg\":{\"type\":\"SecureString\",\"defaultValue\":\"databwhuecx\"},\"klj\":{\"type\":\"Float\",\"defaultValue\":\"dataqrwwbdrwro\"}},\"annotations\":[\"dataqhqq\",\"dataarkyulfamea\",\"datasjqenh\"],\"folder\":{\"name\":\"azvgeytlplslfcv\"},\"\":{\"jocrhnxzmfvmw\":\"datasuowtolkyqf\",\"rawwhyxf\":\"datanrtc\"}}") + "{\"type\":\"zbyqhaath\",\"typeProperties\":{\"tableName\":\"datapgcryvidbzdylbvj\",\"table\":\"datagngwn\",\"schema\":\"dataftecgprzsqmpdqc\"},\"description\":\"yxuuc\",\"structure\":\"datausyrux\",\"schema\":\"datahhlhrvmgs\",\"linkedServiceName\":{\"referenceName\":\"pgmncrvtp\",\"parameters\":{\"cmpu\":\"dataromppzsauqmeuhpl\",\"e\":\"dataiugo\",\"rncfu\":\"datatlyspjymwicc\",\"iwtwfgoc\":\"datalakgixhqjqhgqwbb\"}},\"parameters\":{\"eohl\":{\"type\":\"Int\",\"defaultValue\":\"datamnnzugabkyydsy\"},\"nvbftswcdopnsep\":{\"type\":\"Object\",\"defaultValue\":\"datanv\"},\"af\":{\"type\":\"Object\",\"defaultValue\":\"datatic\"}},\"annotations\":[\"datawkopxdk\",\"datatwoqhgpp\"],\"folder\":{\"name\":\"ik\"},\"\":{\"zjnnuwwv\":\"dataxxfnduv\",\"bkj\":\"datazrfxajtbcqj\",\"i\":\"dataurxrjwyz\"}}") .toObject(ImpalaObjectDataset.class); - Assertions.assertEquals("ws", model.description()); - Assertions.assertEquals("xfezraqsddko", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("fyuklxkelmz").type()); - Assertions.assertEquals("azvgeytlplslfcv", model.folder().name()); + Assertions.assertEquals("yxuuc", model.description()); + Assertions.assertEquals("pgmncrvtp", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("eohl").type()); + Assertions.assertEquals("ik", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ImpalaObjectDataset model = new ImpalaObjectDataset().withDescription("ws").withStructure("dataeqx") - .withSchema("datacuuuexsmnteevfg") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xfezraqsddko") - .withParameters(mapOf("w", "dataxqfkyrxgmzzeg", "fegaok", "datazfss"))) - .withParameters(mapOf("fyuklxkelmz", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datara"), "gsingmhpavsfg", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("databwhuecx"), - "klj", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataqrwwbdrwro"))) - .withAnnotations(Arrays.asList("dataqhqq", "dataarkyulfamea", "datasjqenh")) - .withFolder(new DatasetFolder().withName("azvgeytlplslfcv")).withTableName("dataxiefcorzbidaeb") - .withTable("datanicewd").withSchemaTypePropertiesSchema("datajwiylciobb"); + ImpalaObjectDataset model = new ImpalaObjectDataset().withDescription("yxuuc") + .withStructure("datausyrux") + .withSchema("datahhlhrvmgs") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("pgmncrvtp") + .withParameters(mapOf("cmpu", "dataromppzsauqmeuhpl", "e", "dataiugo", "rncfu", "datatlyspjymwicc", + "iwtwfgoc", "datalakgixhqjqhgqwbb"))) + .withParameters(mapOf("eohl", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datamnnzugabkyydsy"), + "nvbftswcdopnsep", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datanv"), "af", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datatic"))) + .withAnnotations(Arrays.asList("datawkopxdk", "datatwoqhgpp")) + .withFolder(new DatasetFolder().withName("ik")) + .withTableName("datapgcryvidbzdylbvj") + .withTable("datagngwn") + .withSchemaTypePropertiesSchema("dataftecgprzsqmpdqc"); model = BinaryData.fromObject(model).toObject(ImpalaObjectDataset.class); - Assertions.assertEquals("ws", model.description()); - Assertions.assertEquals("xfezraqsddko", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("fyuklxkelmz").type()); - Assertions.assertEquals("azvgeytlplslfcv", model.folder().name()); + Assertions.assertEquals("yxuuc", model.description()); + Assertions.assertEquals("pgmncrvtp", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("eohl").type()); + Assertions.assertEquals("ik", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaSourceTests.java index 78421c27fda7c..9e1fd094e0fe4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaSourceTests.java @@ -11,15 +11,19 @@ public final class ImpalaSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ImpalaSource model = BinaryData.fromString( - "{\"type\":\"ImpalaSource\",\"query\":\"databrcdumkqhatckom\",\"queryTimeout\":\"datafjs\",\"additionalColumns\":\"datavzvkddaeiepvjr\",\"sourceRetryCount\":\"dataksx\",\"sourceRetryWait\":\"datakb\",\"maxConcurrentConnections\":\"datauawokrhhj\",\"disableMetricsCollection\":\"datahrmuwvs\",\"\":{\"imgg\":\"datauosidtxmbnm\"}}") + "{\"type\":\"bedvvmrtnmgabfz\",\"query\":\"datajlmstakgrebecxuu\",\"queryTimeout\":\"datawrv\",\"additionalColumns\":\"datajxkttx\",\"sourceRetryCount\":\"dataiihylzwzhlbpmp\",\"sourceRetryWait\":\"datath\",\"maxConcurrentConnections\":\"datanbnamtvooa\",\"disableMetricsCollection\":\"dataefx\",\"\":{\"ywi\":\"datavjctytytyrvtuxv\",\"kfqznvahpxdg\":\"datammmgbynvoytdt\",\"xfwwvmygcfaztoi\":\"datahowxcptx\",\"ytv\":\"dataszjrihcamg\"}}") .toObject(ImpalaSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ImpalaSource model = new ImpalaSource().withSourceRetryCount("dataksx").withSourceRetryWait("datakb") - .withMaxConcurrentConnections("datauawokrhhj").withDisableMetricsCollection("datahrmuwvs") - .withQueryTimeout("datafjs").withAdditionalColumns("datavzvkddaeiepvjr").withQuery("databrcdumkqhatckom"); + ImpalaSource model = new ImpalaSource().withSourceRetryCount("dataiihylzwzhlbpmp") + .withSourceRetryWait("datath") + .withMaxConcurrentConnections("datanbnamtvooa") + .withDisableMetricsCollection("dataefx") + .withQueryTimeout("datawrv") + .withAdditionalColumns("datajxkttx") + .withQuery("datajlmstakgrebecxuu"); model = BinaryData.fromObject(model).toObject(ImpalaSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImportSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImportSettingsTests.java index 06bb9450de2ec..2b9054b356658 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImportSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImportSettingsTests.java @@ -13,13 +13,13 @@ public final class ImportSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ImportSettings model - = BinaryData.fromString("{\"type\":\"ImportSettings\",\"\":{\"qmbnfvygttdcfjal\":\"dataojvcr\"}}") + = BinaryData.fromString("{\"type\":\"lhjgckkbna\",\"\":{\"orosahgcchzuap\":\"datayehmwzgfankeo\"}}") .toObject(ImportSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ImportSettings model = new ImportSettings().withAdditionalProperties(mapOf("type", "ImportSettings")); + ImportSettings model = new ImportSettings().withAdditionalProperties(mapOf("type", "lhjgckkbna")); model = BinaryData.fromObject(model).toObject(ImportSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSinkTests.java index 5ae99cddb3279..4843f842e1a11 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSinkTests.java @@ -11,16 +11,19 @@ public final class InformixSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { InformixSink model = BinaryData.fromString( - "{\"type\":\"InformixSink\",\"preCopyScript\":\"dataamgjy\",\"writeBatchSize\":\"datakttit\",\"writeBatchTimeout\":\"datamnx\",\"sinkRetryCount\":\"dataoadjooer\",\"sinkRetryWait\":\"datalzzmy\",\"maxConcurrentConnections\":\"datautqebpuoycawptxq\",\"disableMetricsCollection\":\"dataufdxpwj\",\"\":{\"cuk\":\"datavskpbuoc\",\"cepp\":\"datatcuvwwfgjjcaa\"}}") + "{\"type\":\"seehvmtyubvdou\",\"preCopyScript\":\"datazydwexo\",\"writeBatchSize\":\"datakhipaodohb\",\"writeBatchTimeout\":\"databbweaajgokpnb\",\"sinkRetryCount\":\"dataskhjjxesmbu\",\"sinkRetryWait\":\"datacshyhgahmte\",\"maxConcurrentConnections\":\"datafeoij\",\"disableMetricsCollection\":\"datapn\",\"\":{\"nejnjpwkwxnmqmyt\":\"datawgtrcccyiueh\",\"tyfnmwmgh\":\"dataqrj\",\"tnlbsv\":\"dataeedqakhccwj\",\"zafpvwrbqbyxu\":\"datasjvd\"}}") .toObject(InformixSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - InformixSink model = new InformixSink().withWriteBatchSize("datakttit").withWriteBatchTimeout("datamnx") - .withSinkRetryCount("dataoadjooer").withSinkRetryWait("datalzzmy") - .withMaxConcurrentConnections("datautqebpuoycawptxq").withDisableMetricsCollection("dataufdxpwj") - .withPreCopyScript("dataamgjy"); + InformixSink model = new InformixSink().withWriteBatchSize("datakhipaodohb") + .withWriteBatchTimeout("databbweaajgokpnb") + .withSinkRetryCount("dataskhjjxesmbu") + .withSinkRetryWait("datacshyhgahmte") + .withMaxConcurrentConnections("datafeoij") + .withDisableMetricsCollection("datapn") + .withPreCopyScript("datazydwexo"); model = BinaryData.fromObject(model).toObject(InformixSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSourceTests.java index e4ed3e83b6cb9..663493be918c3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSourceTests.java @@ -11,15 +11,19 @@ public final class InformixSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { InformixSource model = BinaryData.fromString( - "{\"type\":\"InformixSource\",\"query\":\"datazuzvbqbroyrw\",\"queryTimeout\":\"databbfweozkbok\",\"additionalColumns\":\"datasu\",\"sourceRetryCount\":\"datacslzca\",\"sourceRetryWait\":\"datad\",\"maxConcurrentConnections\":\"datafwkpupbsgfnqtxl\",\"disableMetricsCollection\":\"dataoviklxsgstunsatc\",\"\":{\"tgsazwx\":\"datadbehkbuajkodpz\",\"hasjbuhz\":\"datafaas\"}}") + "{\"type\":\"mtcljopivtwxvcfc\",\"query\":\"dataohuabduf\",\"queryTimeout\":\"datapuaptpuwek\",\"additionalColumns\":\"datal\",\"sourceRetryCount\":\"datakkcjjnq\",\"sourceRetryWait\":\"datajoayaj\",\"maxConcurrentConnections\":\"datahcxjmapgfbzbxeqz\",\"disableMetricsCollection\":\"datakfrhfafx\",\"\":{\"ulvue\":\"dataucmuax\",\"geqpa\":\"datasrxqscdbbwejrmk\",\"neteehndfpflf\":\"datallfscosfmeot\"}}") .toObject(InformixSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - InformixSource model = new InformixSource().withSourceRetryCount("datacslzca").withSourceRetryWait("datad") - .withMaxConcurrentConnections("datafwkpupbsgfnqtxl").withDisableMetricsCollection("dataoviklxsgstunsatc") - .withQueryTimeout("databbfweozkbok").withAdditionalColumns("datasu").withQuery("datazuzvbqbroyrw"); + InformixSource model = new InformixSource().withSourceRetryCount("datakkcjjnq") + .withSourceRetryWait("datajoayaj") + .withMaxConcurrentConnections("datahcxjmapgfbzbxeqz") + .withDisableMetricsCollection("datakfrhfafx") + .withQueryTimeout("datapuaptpuwek") + .withAdditionalColumns("datal") + .withQuery("dataohuabduf"); model = BinaryData.fromObject(model).toObject(InformixSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTests.java index 05f43e3e2407f..d7417b09a2969 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTests.java @@ -19,33 +19,33 @@ public final class InformixTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { InformixTableDataset model = BinaryData.fromString( - "{\"type\":\"InformixTable\",\"typeProperties\":{\"tableName\":\"dataspth\"},\"description\":\"fmwtblgm\",\"structure\":\"datakqoikxiefwln\",\"schema\":\"datakffcnuestbsl\",\"linkedServiceName\":{\"referenceName\":\"e\",\"parameters\":{\"ikjiytehhxt\":\"dataccote\",\"n\":\"dataxqdwbymuq\"}},\"parameters\":{\"pek\":{\"type\":\"Bool\",\"defaultValue\":\"dataorctyse\"},\"tzcvimmwckoz\":{\"type\":\"Float\",\"defaultValue\":\"databyh\"},\"xup\":{\"type\":\"String\",\"defaultValue\":\"dataymtrts\"},\"rfrjschjxncqzahg\":{\"type\":\"String\",\"defaultValue\":\"datackjbcbkg\"}},\"annotations\":[\"datagdobimor\"],\"folder\":{\"name\":\"xosgihtrxue\"},\"\":{\"znjqswshe\":\"dataxqfg\"}}") + "{\"type\":\"owo\",\"typeProperties\":{\"tableName\":\"dataiqzzdckhsqdrrjsu\"},\"description\":\"xrxmyokohlsfjfo\",\"structure\":\"datajpz\",\"schema\":\"dataahuvkqxqkvadmjh\",\"linkedServiceName\":{\"referenceName\":\"mud\",\"parameters\":{\"rmclyqwwu\":\"dataajzdebhs\",\"svkb\":\"datayqkaaptb\",\"bloccu\":\"databptw\",\"uybutcdzjfjt\":\"dataplxzbnsshvqnpszb\"}},\"parameters\":{\"jgpqfk\":{\"type\":\"Array\",\"defaultValue\":\"datadyuxurxrltqmm\"},\"czscymqfvxgw\":{\"type\":\"Object\",\"defaultValue\":\"dataei\"}},\"annotations\":[\"datay\",\"datamzapdokez\",\"datape\",\"dataknfzqnzbflbqmhb\"],\"folder\":{\"name\":\"xvwedhag\"},\"\":{\"zzdcrolrz\":\"dataseseayuflms\"}}") .toObject(InformixTableDataset.class); - Assertions.assertEquals("fmwtblgm", model.description()); - Assertions.assertEquals("e", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("pek").type()); - Assertions.assertEquals("xosgihtrxue", model.folder().name()); + Assertions.assertEquals("xrxmyokohlsfjfo", model.description()); + Assertions.assertEquals("mud", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("jgpqfk").type()); + Assertions.assertEquals("xvwedhag", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - InformixTableDataset model = new InformixTableDataset().withDescription("fmwtblgm") - .withStructure("datakqoikxiefwln").withSchema("datakffcnuestbsl") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("e") - .withParameters(mapOf("ikjiytehhxt", "dataccote", "n", "dataxqdwbymuq"))) - .withParameters(mapOf("pek", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataorctyse"), - "tzcvimmwckoz", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("databyh"), - "xup", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataymtrts"), - "rfrjschjxncqzahg", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datackjbcbkg"))) - .withAnnotations(Arrays.asList("datagdobimor")).withFolder(new DatasetFolder().withName("xosgihtrxue")) - .withTableName("dataspth"); + InformixTableDataset model = new InformixTableDataset().withDescription("xrxmyokohlsfjfo") + .withStructure("datajpz") + .withSchema("dataahuvkqxqkvadmjh") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("mud") + .withParameters(mapOf("rmclyqwwu", "dataajzdebhs", "svkb", "datayqkaaptb", "bloccu", "databptw", + "uybutcdzjfjt", "dataplxzbnsshvqnpszb"))) + .withParameters(mapOf("jgpqfk", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datadyuxurxrltqmm"), + "czscymqfvxgw", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataei"))) + .withAnnotations(Arrays.asList("datay", "datamzapdokez", "datape", "dataknfzqnzbflbqmhb")) + .withFolder(new DatasetFolder().withName("xvwedhag")) + .withTableName("dataiqzzdckhsqdrrjsu"); model = BinaryData.fromObject(model).toObject(InformixTableDataset.class); - Assertions.assertEquals("fmwtblgm", model.description()); - Assertions.assertEquals("e", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("pek").type()); - Assertions.assertEquals("xosgihtrxue", model.folder().name()); + Assertions.assertEquals("xrxmyokohlsfjfo", model.description()); + Assertions.assertEquals("mud", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("jgpqfk").type()); + Assertions.assertEquals("xvwedhag", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTypePropertiesTests.java index 20bb00cf3fc46..1eb4247c2e4f8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTypePropertiesTests.java @@ -10,14 +10,13 @@ public final class InformixTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - InformixTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datacsqosecxlngo\"}") - .toObject(InformixTableDatasetTypeProperties.class); + InformixTableDatasetTypeProperties model + = BinaryData.fromString("{\"tableName\":\"databo\"}").toObject(InformixTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - InformixTableDatasetTypeProperties model - = new InformixTableDatasetTypeProperties().withTableName("datacsqosecxlngo"); + InformixTableDatasetTypeProperties model = new InformixTableDatasetTypeProperties().withTableName("databo"); model = BinaryData.fromObject(model).toObject(InformixTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeComputePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeComputePropertiesTests.java index fde879d78f0b2..82b99caf9f655 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeComputePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeComputePropertiesTests.java @@ -21,75 +21,82 @@ public final class IntegrationRuntimeComputePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeComputeProperties model = BinaryData.fromString( - "{\"location\":\"wojvxv\",\"nodeSize\":\"hrqxrqghotingzi\",\"numberOfNodes\":896793765,\"maxParallelExecutionsPerNode\":117425644,\"dataFlowProperties\":{\"computeType\":\"MemoryOptimized\",\"coreCount\":199030014,\"timeToLive\":1937656001,\"cleanup\":false,\"customProperties\":[{\"name\":\"yelvy\",\"value\":\"vpuqyrp\"},{\"name\":\"bkhcidcfw\",\"value\":\"lkugzowgmmixfzau\"},{\"name\":\"blnagjnpahzhpqsc\",\"value\":\"ileqjzrijebmuio\"},{\"name\":\"mndwohoeashuxfvb\",\"value\":\"mzwynsmmphvkyez\"}],\"\":{\"jwq\":\"datayuoyj\",\"yaymhx\":\"dataslqreofzrkrzt\",\"vyotpcvpahhpk\":\"datanmwaxsymnrtvqmim\",\"dwtominru\":\"datayqpvzxxz\"}},\"vNetProperties\":{\"vNetId\":\"auygasfmhbxvew\",\"subnet\":\"natxvuzccaliry\",\"publicIPs\":[\"caq\",\"johlcbnrvxyyhh\"],\"subnetId\":\"sztqfrpan\",\"\":{\"imkjzcx\":\"dataiwduukaa\",\"f\":\"datasj\",\"xijovuyxuupzeadd\":\"datavksijrjgyind\",\"zy\":\"datatopdtphvjgv\"}},\"copyComputeScaleProperties\":{\"dataIntegrationUnit\":1732999100,\"timeToLive\":1693962581,\"\":{\"nyzpu\":\"dataxzxbth\",\"koabfcvefbw\":\"datanrmd\"}},\"pipelineExternalComputeScaleProperties\":{\"timeToLive\":1354669108,\"numberOfPipelineNodes\":1621925601,\"numberOfExternalNodes\":66962308,\"\":{\"grz\":\"datasmryuyutkbx\",\"yncyzjnd\":\"datakyqguauuihkybg\"}},\"\":{\"vypmwdz\":\"datamxiurpf\",\"qimyhxnpdggllyd\":\"datajpcroxpp\",\"jnstz\":\"datayufdmzucxvzvwlx\",\"oxklrzats\":\"dataur\"}}") + "{\"location\":\"ha\",\"nodeSize\":\"ytuecmgu\",\"numberOfNodes\":2137117317,\"maxParallelExecutionsPerNode\":1491172911,\"dataFlowProperties\":{\"computeType\":\"MemoryOptimized\",\"coreCount\":613256770,\"timeToLive\":776631813,\"cleanup\":true,\"customProperties\":[{\"name\":\"qzq\",\"value\":\"seujcmtcid\"},{\"name\":\"b\",\"value\":\"xhcxct\"},{\"name\":\"xoef\",\"value\":\"orylx\"},{\"name\":\"nwkjzvq\",\"value\":\"ymtupyjtrxxzwdsn\"}],\"\":{\"sdlul\":\"dataefnakdmtpj\",\"ttl\":\"datatjxhxwt\",\"lynvpdvctqdapyds\":\"datavukvupuplug\",\"pj\":\"datap\"}},\"vNetProperties\":{\"vNetId\":\"lbnru\",\"subnet\":\"ehy\",\"publicIPs\":[\"mjrgfbmp\"],\"subnetId\":\"wkbcstzuw\",\"\":{\"maxdwxrwq\":\"datae\"}},\"copyComputeScaleProperties\":{\"dataIntegrationUnit\":315112243,\"timeToLive\":768261244,\"\":{\"pghhuluqyfvgpq\":\"datakxyrlkgjhomywl\"}},\"pipelineExternalComputeScaleProperties\":{\"timeToLive\":595808141,\"numberOfPipelineNodes\":266503485,\"numberOfExternalNodes\":821047302,\"\":{\"mc\":\"datambuihtqfvyq\",\"joxsehj\":\"datauguvlieegjnqwh\",\"xdl\":\"datacgqcrwaucftotedh\",\"axrhljwqu\":\"datauhffjjqvjyqmpmsk\"}},\"\":{\"jvwfijfdj\":\"datapypboqlefxfpwm\",\"lbqntdde\":\"datarvsvhmsmrihddnb\"}}") .toObject(IntegrationRuntimeComputeProperties.class); - Assertions.assertEquals("wojvxv", model.location()); - Assertions.assertEquals("hrqxrqghotingzi", model.nodeSize()); - Assertions.assertEquals(896793765, model.numberOfNodes()); - Assertions.assertEquals(117425644, model.maxParallelExecutionsPerNode()); + Assertions.assertEquals("ha", model.location()); + Assertions.assertEquals("ytuecmgu", model.nodeSize()); + Assertions.assertEquals(2137117317, model.numberOfNodes()); + Assertions.assertEquals(1491172911, model.maxParallelExecutionsPerNode()); Assertions.assertEquals(DataFlowComputeType.MEMORY_OPTIMIZED, model.dataFlowProperties().computeType()); - Assertions.assertEquals(199030014, model.dataFlowProperties().coreCount()); - Assertions.assertEquals(1937656001, model.dataFlowProperties().timeToLive()); - Assertions.assertEquals(false, model.dataFlowProperties().cleanup()); - Assertions.assertEquals("yelvy", model.dataFlowProperties().customProperties().get(0).name()); - Assertions.assertEquals("vpuqyrp", model.dataFlowProperties().customProperties().get(0).value()); - Assertions.assertEquals("auygasfmhbxvew", model.vNetProperties().vNetId()); - Assertions.assertEquals("natxvuzccaliry", model.vNetProperties().subnet()); - Assertions.assertEquals("caq", model.vNetProperties().publicIPs().get(0)); - Assertions.assertEquals("sztqfrpan", model.vNetProperties().subnetId()); - Assertions.assertEquals(1732999100, model.copyComputeScaleProperties().dataIntegrationUnit()); - Assertions.assertEquals(1693962581, model.copyComputeScaleProperties().timeToLive()); - Assertions.assertEquals(1354669108, model.pipelineExternalComputeScaleProperties().timeToLive()); - Assertions.assertEquals(1621925601, model.pipelineExternalComputeScaleProperties().numberOfPipelineNodes()); - Assertions.assertEquals(66962308, model.pipelineExternalComputeScaleProperties().numberOfExternalNodes()); + Assertions.assertEquals(613256770, model.dataFlowProperties().coreCount()); + Assertions.assertEquals(776631813, model.dataFlowProperties().timeToLive()); + Assertions.assertEquals(true, model.dataFlowProperties().cleanup()); + Assertions.assertEquals("qzq", model.dataFlowProperties().customProperties().get(0).name()); + Assertions.assertEquals("seujcmtcid", model.dataFlowProperties().customProperties().get(0).value()); + Assertions.assertEquals("lbnru", model.vNetProperties().vNetId()); + Assertions.assertEquals("ehy", model.vNetProperties().subnet()); + Assertions.assertEquals("mjrgfbmp", model.vNetProperties().publicIPs().get(0)); + Assertions.assertEquals("wkbcstzuw", model.vNetProperties().subnetId()); + Assertions.assertEquals(315112243, model.copyComputeScaleProperties().dataIntegrationUnit()); + Assertions.assertEquals(768261244, model.copyComputeScaleProperties().timeToLive()); + Assertions.assertEquals(595808141, model.pipelineExternalComputeScaleProperties().timeToLive()); + Assertions.assertEquals(266503485, model.pipelineExternalComputeScaleProperties().numberOfPipelineNodes()); + Assertions.assertEquals(821047302, model.pipelineExternalComputeScaleProperties().numberOfExternalNodes()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - IntegrationRuntimeComputeProperties model = new IntegrationRuntimeComputeProperties().withLocation("wojvxv") - .withNodeSize("hrqxrqghotingzi").withNumberOfNodes(896793765).withMaxParallelExecutionsPerNode(117425644) - .withDataFlowProperties( - new IntegrationRuntimeDataFlowProperties().withComputeType(DataFlowComputeType.MEMORY_OPTIMIZED) - .withCoreCount(199030014).withTimeToLive(1937656001).withCleanup(false) - .withCustomProperties(Arrays.asList( - new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("yelvy") - .withValue("vpuqyrp"), - new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("bkhcidcfw") - .withValue("lkugzowgmmixfzau"), - new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("blnagjnpahzhpqsc") - .withValue("ileqjzrijebmuio"), - new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("mndwohoeashuxfvb") - .withValue("mzwynsmmphvkyez"))) - .withAdditionalProperties(mapOf())) - .withVNetProperties(new IntegrationRuntimeVNetProperties().withVNetId("auygasfmhbxvew") - .withSubnet("natxvuzccaliry").withPublicIPs(Arrays.asList("caq", "johlcbnrvxyyhh")) - .withSubnetId("sztqfrpan").withAdditionalProperties(mapOf())) - .withCopyComputeScaleProperties(new CopyComputeScaleProperties().withDataIntegrationUnit(1732999100) - .withTimeToLive(1693962581).withAdditionalProperties(mapOf())) - .withPipelineExternalComputeScaleProperties(new PipelineExternalComputeScaleProperties() - .withTimeToLive(1354669108).withNumberOfPipelineNodes(1621925601).withNumberOfExternalNodes(66962308) + IntegrationRuntimeComputeProperties model = new IntegrationRuntimeComputeProperties().withLocation("ha") + .withNodeSize("ytuecmgu") + .withNumberOfNodes(2137117317) + .withMaxParallelExecutionsPerNode(1491172911) + .withDataFlowProperties(new IntegrationRuntimeDataFlowProperties() + .withComputeType(DataFlowComputeType.MEMORY_OPTIMIZED) + .withCoreCount(613256770) + .withTimeToLive(776631813) + .withCleanup(true) + .withCustomProperties(Arrays.asList( + new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("qzq") + .withValue("seujcmtcid"), + new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("b").withValue("xhcxct"), + new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("xoef").withValue("orylx"), + new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("nwkjzvq") + .withValue("ymtupyjtrxxzwdsn"))) + .withAdditionalProperties(mapOf())) + .withVNetProperties(new IntegrationRuntimeVNetProperties().withVNetId("lbnru") + .withSubnet("ehy") + .withPublicIPs(Arrays.asList("mjrgfbmp")) + .withSubnetId("wkbcstzuw") .withAdditionalProperties(mapOf())) + .withCopyComputeScaleProperties(new CopyComputeScaleProperties().withDataIntegrationUnit(315112243) + .withTimeToLive(768261244) + .withAdditionalProperties(mapOf())) + .withPipelineExternalComputeScaleProperties( + new PipelineExternalComputeScaleProperties().withTimeToLive(595808141) + .withNumberOfPipelineNodes(266503485) + .withNumberOfExternalNodes(821047302) + .withAdditionalProperties(mapOf())) .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeComputeProperties.class); - Assertions.assertEquals("wojvxv", model.location()); - Assertions.assertEquals("hrqxrqghotingzi", model.nodeSize()); - Assertions.assertEquals(896793765, model.numberOfNodes()); - Assertions.assertEquals(117425644, model.maxParallelExecutionsPerNode()); + Assertions.assertEquals("ha", model.location()); + Assertions.assertEquals("ytuecmgu", model.nodeSize()); + Assertions.assertEquals(2137117317, model.numberOfNodes()); + Assertions.assertEquals(1491172911, model.maxParallelExecutionsPerNode()); Assertions.assertEquals(DataFlowComputeType.MEMORY_OPTIMIZED, model.dataFlowProperties().computeType()); - Assertions.assertEquals(199030014, model.dataFlowProperties().coreCount()); - Assertions.assertEquals(1937656001, model.dataFlowProperties().timeToLive()); - Assertions.assertEquals(false, model.dataFlowProperties().cleanup()); - Assertions.assertEquals("yelvy", model.dataFlowProperties().customProperties().get(0).name()); - Assertions.assertEquals("vpuqyrp", model.dataFlowProperties().customProperties().get(0).value()); - Assertions.assertEquals("auygasfmhbxvew", model.vNetProperties().vNetId()); - Assertions.assertEquals("natxvuzccaliry", model.vNetProperties().subnet()); - Assertions.assertEquals("caq", model.vNetProperties().publicIPs().get(0)); - Assertions.assertEquals("sztqfrpan", model.vNetProperties().subnetId()); - Assertions.assertEquals(1732999100, model.copyComputeScaleProperties().dataIntegrationUnit()); - Assertions.assertEquals(1693962581, model.copyComputeScaleProperties().timeToLive()); - Assertions.assertEquals(1354669108, model.pipelineExternalComputeScaleProperties().timeToLive()); - Assertions.assertEquals(1621925601, model.pipelineExternalComputeScaleProperties().numberOfPipelineNodes()); - Assertions.assertEquals(66962308, model.pipelineExternalComputeScaleProperties().numberOfExternalNodes()); + Assertions.assertEquals(613256770, model.dataFlowProperties().coreCount()); + Assertions.assertEquals(776631813, model.dataFlowProperties().timeToLive()); + Assertions.assertEquals(true, model.dataFlowProperties().cleanup()); + Assertions.assertEquals("qzq", model.dataFlowProperties().customProperties().get(0).name()); + Assertions.assertEquals("seujcmtcid", model.dataFlowProperties().customProperties().get(0).value()); + Assertions.assertEquals("lbnru", model.vNetProperties().vNetId()); + Assertions.assertEquals("ehy", model.vNetProperties().subnet()); + Assertions.assertEquals("mjrgfbmp", model.vNetProperties().publicIPs().get(0)); + Assertions.assertEquals("wkbcstzuw", model.vNetProperties().subnetId()); + Assertions.assertEquals(315112243, model.copyComputeScaleProperties().dataIntegrationUnit()); + Assertions.assertEquals(768261244, model.copyComputeScaleProperties().timeToLive()); + Assertions.assertEquals(595808141, model.pipelineExternalComputeScaleProperties().timeToLive()); + Assertions.assertEquals(266503485, model.pipelineExternalComputeScaleProperties().numberOfPipelineNodes()); + Assertions.assertEquals(821047302, model.pipelineExternalComputeScaleProperties().numberOfExternalNodes()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeCustomerVirtualNetworkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeCustomerVirtualNetworkTests.java index 2787a0ad4996c..dac59ca9718c8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeCustomerVirtualNetworkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeCustomerVirtualNetworkTests.java @@ -11,16 +11,16 @@ public final class IntegrationRuntimeCustomerVirtualNetworkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - IntegrationRuntimeCustomerVirtualNetwork model = BinaryData.fromString("{\"subnetId\":\"hoodttq\"}") + IntegrationRuntimeCustomerVirtualNetwork model = BinaryData.fromString("{\"subnetId\":\"vbploazcc\"}") .toObject(IntegrationRuntimeCustomerVirtualNetwork.class); - Assertions.assertEquals("hoodttq", model.subnetId()); + Assertions.assertEquals("vbploazcc", model.subnetId()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { IntegrationRuntimeCustomerVirtualNetwork model - = new IntegrationRuntimeCustomerVirtualNetwork().withSubnetId("hoodttq"); + = new IntegrationRuntimeCustomerVirtualNetwork().withSubnetId("vbploazcc"); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeCustomerVirtualNetwork.class); - Assertions.assertEquals("hoodttq", model.subnetId()); + Assertions.assertEquals("vbploazcc", model.subnetId()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesCustomPropertiesItemTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesCustomPropertiesItemTests.java index 906b5e408cafc..1b2f53e730bc8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesCustomPropertiesItemTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesCustomPropertiesItemTests.java @@ -12,18 +12,18 @@ public final class IntegrationRuntimeDataFlowPropertiesCustomPropertiesItemTests @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem model - = BinaryData.fromString("{\"name\":\"vzhn\",\"value\":\"vtoiqofzttqg\"}") + = BinaryData.fromString("{\"name\":\"mckgpxdxgcq\",\"value\":\"uv\"}") .toObject(IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem.class); - Assertions.assertEquals("vzhn", model.name()); - Assertions.assertEquals("vtoiqofzttqg", model.value()); + Assertions.assertEquals("mckgpxdxgcq", model.name()); + Assertions.assertEquals("uv", model.value()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem model - = new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("vzhn").withValue("vtoiqofzttqg"); + = new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("mckgpxdxgcq").withValue("uv"); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem.class); - Assertions.assertEquals("vzhn", model.name()); - Assertions.assertEquals("vtoiqofzttqg", model.value()); + Assertions.assertEquals("mckgpxdxgcq", model.name()); + Assertions.assertEquals("uv", model.value()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesTests.java index e837f9eda7174..82c19504a0aa0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesTests.java @@ -17,36 +17,37 @@ public final class IntegrationRuntimeDataFlowPropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeDataFlowProperties model = BinaryData.fromString( - "{\"computeType\":\"MemoryOptimized\",\"coreCount\":2135115629,\"timeToLive\":421136528,\"cleanup\":true,\"customProperties\":[{\"name\":\"hqhnfubevwadxcez\",\"value\":\"vltfebqoqiaklqa\"},{\"name\":\"stif\",\"value\":\"twrphmriipzgf\"},{\"name\":\"u\",\"value\":\"cjqnea\"},{\"name\":\"gttbargeey\",\"value\":\"ls\"}],\"\":{\"hsppvjsduouoqte\":\"dataaxoyvgjjpfy\",\"apaseqcppypfre\":\"dataqsomuogeq\"}}") + "{\"computeType\":\"General\",\"coreCount\":1079304457,\"timeToLive\":776293565,\"cleanup\":true,\"customProperties\":[{\"name\":\"mzsf\",\"value\":\"yyysqnwnl\"},{\"name\":\"zfjd\",\"value\":\"fthsydphdb\"},{\"name\":\"etfgkzqbvh\",\"value\":\"ulymk\"},{\"name\":\"s\",\"value\":\"h\"}],\"\":{\"biubzg\":\"datahaenpftkgmbmv\",\"rbusnaq\":\"datapsotbame\",\"erlurgipvnbxle\":\"datavruuhyncppmmwh\"}}") .toObject(IntegrationRuntimeDataFlowProperties.class); - Assertions.assertEquals(DataFlowComputeType.MEMORY_OPTIMIZED, model.computeType()); - Assertions.assertEquals(2135115629, model.coreCount()); - Assertions.assertEquals(421136528, model.timeToLive()); + Assertions.assertEquals(DataFlowComputeType.GENERAL, model.computeType()); + Assertions.assertEquals(1079304457, model.coreCount()); + Assertions.assertEquals(776293565, model.timeToLive()); Assertions.assertEquals(true, model.cleanup()); - Assertions.assertEquals("hqhnfubevwadxcez", model.customProperties().get(0).name()); - Assertions.assertEquals("vltfebqoqiaklqa", model.customProperties().get(0).value()); + Assertions.assertEquals("mzsf", model.customProperties().get(0).name()); + Assertions.assertEquals("yyysqnwnl", model.customProperties().get(0).value()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { IntegrationRuntimeDataFlowProperties model = new IntegrationRuntimeDataFlowProperties() - .withComputeType(DataFlowComputeType.MEMORY_OPTIMIZED).withCoreCount(2135115629).withTimeToLive(421136528) + .withComputeType(DataFlowComputeType.GENERAL) + .withCoreCount(1079304457) + .withTimeToLive(776293565) .withCleanup(true) .withCustomProperties(Arrays.asList( - new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("hqhnfubevwadxcez") - .withValue("vltfebqoqiaklqa"), - new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("stif") - .withValue("twrphmriipzgf"), - new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("u").withValue("cjqnea"), - new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("gttbargeey").withValue("ls"))) + new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("mzsf").withValue("yyysqnwnl"), + new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("zfjd").withValue("fthsydphdb"), + new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("etfgkzqbvh") + .withValue("ulymk"), + new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("s").withValue("h"))) .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeDataFlowProperties.class); - Assertions.assertEquals(DataFlowComputeType.MEMORY_OPTIMIZED, model.computeType()); - Assertions.assertEquals(2135115629, model.coreCount()); - Assertions.assertEquals(421136528, model.timeToLive()); + Assertions.assertEquals(DataFlowComputeType.GENERAL, model.computeType()); + Assertions.assertEquals(1079304457, model.coreCount()); + Assertions.assertEquals(776293565, model.timeToLive()); Assertions.assertEquals(true, model.cleanup()); - Assertions.assertEquals("hqhnfubevwadxcez", model.customProperties().get(0).name()); - Assertions.assertEquals("vltfebqoqiaklqa", model.customProperties().get(0).value()); + Assertions.assertEquals("mzsf", model.customProperties().get(0).name()); + Assertions.assertEquals("yyysqnwnl", model.customProperties().get(0).value()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataProxyPropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataProxyPropertiesTests.java index d773ec5c5f18e..6de773b7a3e72 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataProxyPropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataProxyPropertiesTests.java @@ -14,33 +14,34 @@ public final class IntegrationRuntimeDataProxyPropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeDataProxyProperties model = BinaryData.fromString( - "{\"connectVia\":{\"type\":\"LinkedServiceReference\",\"referenceName\":\"icbicbogsfo\"},\"stagingLinkedService\":{\"type\":\"LinkedServiceReference\",\"referenceName\":\"iyf\"},\"path\":\"nrukcy\"}") + "{\"connectVia\":{\"type\":\"IntegrationRuntimeReference\",\"referenceName\":\"iyapifcjl\"},\"stagingLinkedService\":{\"type\":\"LinkedServiceReference\",\"referenceName\":\"zowcgrzafqso\"},\"path\":\"wqx\"}") .toObject(IntegrationRuntimeDataProxyProperties.class); - Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE, + Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.INTEGRATION_RUNTIME_REFERENCE, model.connectVia().type()); - Assertions.assertEquals("icbicbogsfo", model.connectVia().referenceName()); + Assertions.assertEquals("iyapifcjl", model.connectVia().referenceName()); Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE, model.stagingLinkedService().type()); - Assertions.assertEquals("iyf", model.stagingLinkedService().referenceName()); - Assertions.assertEquals("nrukcy", model.path()); + Assertions.assertEquals("zowcgrzafqso", model.stagingLinkedService().referenceName()); + Assertions.assertEquals("wqx", model.path()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { IntegrationRuntimeDataProxyProperties model = new IntegrationRuntimeDataProxyProperties() .withConnectVia( + new EntityReference().withType(IntegrationRuntimeEntityReferenceType.INTEGRATION_RUNTIME_REFERENCE) + .withReferenceName("iyapifcjl")) + .withStagingLinkedService( new EntityReference().withType(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE) - .withReferenceName("icbicbogsfo")) - .withStagingLinkedService(new EntityReference() - .withType(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE).withReferenceName("iyf")) - .withPath("nrukcy"); + .withReferenceName("zowcgrzafqso")) + .withPath("wqx"); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeDataProxyProperties.class); - Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE, + Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.INTEGRATION_RUNTIME_REFERENCE, model.connectVia().type()); - Assertions.assertEquals("icbicbogsfo", model.connectVia().referenceName()); + Assertions.assertEquals("iyapifcjl", model.connectVia().referenceName()); Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE, model.stagingLinkedService().type()); - Assertions.assertEquals("iyf", model.stagingLinkedService().referenceName()); - Assertions.assertEquals("nrukcy", model.path()); + Assertions.assertEquals("zowcgrzafqso", model.stagingLinkedService().referenceName()); + Assertions.assertEquals("wqx", model.path()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDebugResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDebugResourceTests.java index 491b6ea486ed3..4e72079f0ea4c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDebugResourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDebugResourceTests.java @@ -5,41 +5,26 @@ package com.azure.resourcemanager.datafactory.generated; import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDebugResource; -import java.util.HashMap; -import java.util.Map; +import com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntime; import org.junit.jupiter.api.Assertions; public final class IntegrationRuntimeDebugResourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeDebugResource model = BinaryData.fromString( - "{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"xbzlmc\",\"\":{\"onqzinkfkbgbzbow\":\"datapcvhdbevwqqxeys\",\"qkjjeokbz\":\"dataeqocljmygvk\"}},\"name\":\"ezrxcczurtleipqx\"}") + "{\"properties\":{\"type\":\"Managed\",\"description\":\"qwztcmwqkc\",\"\":{\"xfdeqvhpsyl\":\"datawaxfewzjkj\",\"bffmbmxz\":\"dataksh\",\"jx\":\"datargywwp\"}},\"name\":\"ptfujgicgaaoept\"}") .toObject(IntegrationRuntimeDebugResource.class); - Assertions.assertEquals("ezrxcczurtleipqx", model.name()); - Assertions.assertEquals("xbzlmc", model.properties().description()); + Assertions.assertEquals("ptfujgicgaaoept", model.name()); + Assertions.assertEquals("qwztcmwqkc", model.properties().description()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - IntegrationRuntimeDebugResource model - = new IntegrationRuntimeDebugResource().withName("ezrxcczurtleipqx").withProperties(new IntegrationRuntime() - .withDescription("xbzlmc").withAdditionalProperties(mapOf("type", "IntegrationRuntime"))); + IntegrationRuntimeDebugResource model = new IntegrationRuntimeDebugResource().withName("ptfujgicgaaoept") + .withProperties(new ManagedIntegrationRuntime().withDescription("qwztcmwqkc")); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeDebugResource.class); - Assertions.assertEquals("ezrxcczurtleipqx", model.name()); - Assertions.assertEquals("xbzlmc", model.properties().description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; + Assertions.assertEquals("ptfujgicgaaoept", model.name()); + Assertions.assertEquals("qwztcmwqkc", model.properties().description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeListResponseTests.java index f8ef00e543b30..a41d764392d27 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeListResponseTests.java @@ -6,55 +6,36 @@ import com.azure.core.util.BinaryData; import com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeResourceInner; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeListResponse; +import com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntime; +import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntime; import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; import org.junit.jupiter.api.Assertions; public final class IntegrationRuntimeListResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"hkaetcktvfc\",\"\":{\"jf\":\"datasnkymuctq\",\"fuwutttxf\":\"dataebrjcxe\",\"hfnljkyq\":\"datajrbirphxepcyv\"}},\"name\":\"vuujq\",\"type\":\"dokgjl\",\"etag\":\"oxgvclt\",\"id\":\"sncghkjeszz\"},{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"ijhtxf\",\"\":{\"xnehmpvec\":\"databfs\"}},\"name\":\"odebfqkkrbmpu\",\"type\":\"riwflzlfb\",\"etag\":\"puz\",\"id\":\"ispnqzahmgkbrp\"},{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"dhibnuq\",\"\":{\"drgvtqagn\":\"dataik\",\"mebf\":\"datauynhijg\"}},\"name\":\"arbu\",\"type\":\"cvpnazzmhjrunmpx\",\"etag\":\"dbhrbnlankxm\",\"id\":\"k\"},{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"henbtkcxywnytn\",\"\":{\"lhaaxdbabp\":\"datanlqidybyxczf\"}},\"name\":\"wrqlfktsthsuco\",\"type\":\"nyyazttbtwwrqpue\",\"etag\":\"kzywbiex\",\"id\":\"eyueaxibxujwb\"}],\"nextLink\":\"walm\"}") + "{\"value\":[{\"properties\":{\"type\":\"SelfHosted\",\"description\":\"oxgvclt\",\"\":{\"zz\":\"datancghkje\",\"mxnehmp\":\"databijhtxfvgxbf\",\"godebfqkkrbmpu\":\"dataec\"}},\"name\":\"riwflzlfb\",\"type\":\"puz\",\"etag\":\"ispnqzahmgkbrp\",\"id\":\"dhibnuq\"},{\"properties\":{\"type\":\"Managed\",\"description\":\"kadrgvt\",\"\":{\"uynhijg\":\"datan\",\"iarbutrcvpna\":\"datamebf\"}},\"name\":\"mhjrunmpxttdbhr\",\"type\":\"l\",\"etag\":\"kx\",\"id\":\"skpbhenbtkcxywn\"}],\"nextLink\":\"nrs\"}") .toObject(IntegrationRuntimeListResponse.class); - Assertions.assertEquals("sncghkjeszz", model.value().get(0).id()); - Assertions.assertEquals("hkaetcktvfc", model.value().get(0).properties().description()); - Assertions.assertEquals("walm", model.nextLink()); + Assertions.assertEquals("dhibnuq", model.value().get(0).id()); + Assertions.assertEquals("oxgvclt", model.value().get(0).properties().description()); + Assertions.assertEquals("nrs", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - IntegrationRuntimeListResponse model = new IntegrationRuntimeListResponse() - .withValue(Arrays.asList( - new IntegrationRuntimeResourceInner().withId("sncghkjeszz") - .withProperties(new IntegrationRuntime().withDescription("hkaetcktvfc") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime"))), - new IntegrationRuntimeResourceInner().withId("ispnqzahmgkbrp") - .withProperties(new IntegrationRuntime().withDescription("ijhtxf") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime"))), - new IntegrationRuntimeResourceInner().withId("k") - .withProperties(new IntegrationRuntime().withDescription("dhibnuq") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime"))), - new IntegrationRuntimeResourceInner().withId("eyueaxibxujwb").withProperties(new IntegrationRuntime() - .withDescription("henbtkcxywnytn").withAdditionalProperties(mapOf("type", "IntegrationRuntime"))))) - .withNextLink("walm"); + IntegrationRuntimeListResponse model + = new IntegrationRuntimeListResponse() + .withValue(Arrays.asList( + new IntegrationRuntimeResourceInner().withId("dhibnuq") + .withProperties(new SelfHostedIntegrationRuntime().withDescription("oxgvclt")), + new IntegrationRuntimeResourceInner().withId("skpbhenbtkcxywn") + .withProperties(new ManagedIntegrationRuntime().withDescription("kadrgvt")))) + .withNextLink("nrs"); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeListResponse.class); - Assertions.assertEquals("sncghkjeszz", model.value().get(0).id()); - Assertions.assertEquals("hkaetcktvfc", model.value().get(0).properties().description()); - Assertions.assertEquals("walm", model.nextLink()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; + Assertions.assertEquals("dhibnuq", model.value().get(0).id()); + Assertions.assertEquals("oxgvclt", model.value().get(0).properties().description()); + Assertions.assertEquals("nrs", model.nextLink()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeMonitoringDataInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeMonitoringDataInnerTests.java index ecbb1ef868374..fe89917e029a0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeMonitoringDataInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeMonitoringDataInnerTests.java @@ -16,20 +16,20 @@ public final class IntegrationRuntimeMonitoringDataInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeMonitoringDataInner model = BinaryData.fromString( - "{\"name\":\"k\",\"nodes\":[{\"nodeName\":\"io\",\"availableMemoryInMB\":952194839,\"cpuUtilization\":1578257059,\"concurrentJobsLimit\":391809232,\"concurrentJobsRunning\":1039189909,\"maxConcurrentJobs\":1375924345,\"sentBytes\":68.58864,\"receivedBytes\":66.46081,\"\":{\"jooxdjebw\":\"datasowzxcugi\"}}]}") + "{\"name\":\"xsbkyvpyca\",\"nodes\":[{\"nodeName\":\"p\",\"availableMemoryInMB\":1414672968,\"cpuUtilization\":172271307,\"concurrentJobsLimit\":1834539320,\"concurrentJobsRunning\":2051758335,\"maxConcurrentJobs\":1514524475,\"sentBytes\":30.272806,\"receivedBytes\":79.5002,\"\":{\"srtslhspkdeem\":\"dataseyvj\",\"ahvljuaha\":\"dataofmxagkvtmelmqkr\"}}]}") .toObject(IntegrationRuntimeMonitoringDataInner.class); - Assertions.assertEquals("k", model.name()); + Assertions.assertEquals("xsbkyvpyca", model.name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - IntegrationRuntimeMonitoringDataInner model = new IntegrationRuntimeMonitoringDataInner().withName("k") + IntegrationRuntimeMonitoringDataInner model = new IntegrationRuntimeMonitoringDataInner().withName("xsbkyvpyca") .withNodes(Arrays.asList(new IntegrationRuntimeNodeMonitoringData() - .withAdditionalProperties(mapOf("nodeName", "io", "cpuUtilization", 1578257059, "receivedBytes", - 66.46081f, "concurrentJobsLimit", 391809232, "concurrentJobsRunning", 1039189909, - "maxConcurrentJobs", 1375924345, "availableMemoryInMB", 952194839, "sentBytes", 68.58864f)))); + .withAdditionalProperties(mapOf("nodeName", "p", "cpuUtilization", 172271307, "receivedBytes", 79.5002f, + "concurrentJobsLimit", 1834539320, "concurrentJobsRunning", 2051758335, "maxConcurrentJobs", + 1514524475, "availableMemoryInMB", 1414672968, "sentBytes", 30.272806f)))); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeMonitoringDataInner.class); - Assertions.assertEquals("k", model.name()); + Assertions.assertEquals("xsbkyvpyca", model.name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeIpAddressInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeIpAddressInnerTests.java index 76c3160ed3361..64938495fd3ce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeIpAddressInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeIpAddressInnerTests.java @@ -11,7 +11,7 @@ public final class IntegrationRuntimeNodeIpAddressInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeNodeIpAddressInner model - = BinaryData.fromString("{\"ipAddress\":\"nr\"}").toObject(IntegrationRuntimeNodeIpAddressInner.class); + = BinaryData.fromString("{\"ipAddress\":\"mv\"}").toObject(IntegrationRuntimeNodeIpAddressInner.class); } @org.junit.jupiter.api.Test diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeMonitoringDataTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeMonitoringDataTests.java index 13152d8b690be..8b910bb918ad7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeMonitoringDataTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeMonitoringDataTests.java @@ -13,16 +13,16 @@ public final class IntegrationRuntimeNodeMonitoringDataTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeNodeMonitoringData model = BinaryData.fromString( - "{\"nodeName\":\"cwwfvovbvme\",\"availableMemoryInMB\":321338352,\"cpuUtilization\":1542315989,\"concurrentJobsLimit\":1560245881,\"concurrentJobsRunning\":343837467,\"maxConcurrentJobs\":1240635196,\"sentBytes\":81.17821,\"receivedBytes\":67.35784,\"\":{\"wit\":\"datajueiotwmcdytd\"}}") + "{\"nodeName\":\"hcdhmdual\",\"availableMemoryInMB\":115440556,\"cpuUtilization\":758421621,\"concurrentJobsLimit\":1595213350,\"concurrentJobsRunning\":1911566048,\"maxConcurrentJobs\":294636913,\"sentBytes\":37.394695,\"receivedBytes\":4.791844,\"\":{\"zlfmisgwbnbbeld\":\"dataxpvgo\",\"ali\":\"datawkz\"}}") .toObject(IntegrationRuntimeNodeMonitoringData.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { IntegrationRuntimeNodeMonitoringData model = new IntegrationRuntimeNodeMonitoringData() - .withAdditionalProperties(mapOf("nodeName", "cwwfvovbvme", "cpuUtilization", 1542315989, "receivedBytes", - 67.35784f, "concurrentJobsLimit", 1560245881, "concurrentJobsRunning", 343837467, "maxConcurrentJobs", - 1240635196, "availableMemoryInMB", 321338352, "sentBytes", 81.17821f)); + .withAdditionalProperties(mapOf("nodeName", "hcdhmdual", "cpuUtilization", 758421621, "receivedBytes", + 4.791844f, "concurrentJobsLimit", 1595213350, "concurrentJobsRunning", 1911566048, "maxConcurrentJobs", + 294636913, "availableMemoryInMB", 115440556, "sentBytes", 37.394695f)); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeNodeMonitoringData.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesDeleteWithResponseMockTests.java index e22c2680ea614..cc1491dfa23bc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesDeleteWithResponseMockTests.java @@ -6,47 +6,29 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimeNodesDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.integrationRuntimeNodes().deleteWithResponse("vdqfkjg", "lcfoaabltvltt", "plxbxfrl", "yikcnlbehxoy", - com.azure.core.util.Context.NONE); + manager.integrationRuntimeNodes() + .deleteWithResponse("adxs", "otozxadk", "qaptvmbybs", "vzevpypkf", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetIpAddressWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetIpAddressWithResponseMockTests.java index ef740fda3bd85..fe351562fe82b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetIpAddressWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetIpAddressWithResponseMockTests.java @@ -6,48 +6,31 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeNodeIpAddress; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimeNodesGetIpAddressWithResponseMockTests { @Test public void testGetIpAddressWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); + String responseStr = "{\"ipAddress\":\"vfl\"}"; - String responseStr = "{\"ipAddress\":\"j\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); IntegrationRuntimeNodeIpAddress response = manager.integrationRuntimeNodes() - .getIpAddressWithResponse("xzhbfibzvxqh", "pjdbz", "lchv", "sydjr", com.azure.core.util.Context.NONE) + .getIpAddressWithResponse("peyyzmxuelplbbs", "inldmflngjtltxfo", "yqqleylvyhxlp", "ock", + com.azure.core.util.Context.NONE) .getValue(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetWithResponseMockTests.java index c4f3a06c7e02f..46041893aabfe 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetWithResponseMockTests.java @@ -6,49 +6,31 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntimeNode; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimeNodesGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"nodeName\":\"jxtzy\",\"machineName\":\"qmp\",\"hostServiceUri\":\"xiioasvykt\",\"status\":\"Limited\",\"capabilities\":{\"egqzqdcohsqufsy\":\"wjwl\",\"z\":\"hsnzsjoxuogyakex\",\"nbmngstvnkshaul\":\"lh\",\"oqyin\":\"tvlyl\"},\"versionStatus\":\"gxncoaiyflvvmd\",\"version\":\"ytaocxak\",\"registerTime\":\"2021-02-13T13:27:18Z\",\"lastConnectTime\":\"2021-11-19T07:48:54Z\",\"expiryTime\":\"2021-11-08T15:41:18Z\",\"lastStartTime\":\"2021-10-24T21:52:16Z\",\"lastStopTime\":\"2021-04-19T16:57:35Z\",\"lastUpdateResult\":\"None\",\"lastStartUpdateTime\":\"2021-06-01T19:59:49Z\",\"lastEndUpdateTime\":\"2021-06-04T07:00:41Z\",\"isActiveDispatcher\":false,\"concurrentJobsLimit\":482006335,\"maxConcurrentJobs\":2014408540,\"\":{\"vgusfrkjfrtauf\":\"datalrwwmukx\",\"qmjodvknxj\":\"dataxxvzqin\",\"fqodc\":\"datattkhmhquca\"}}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"nodeName\":\"nwafjibaq\",\"machineName\":\"tdije\",\"hostServiceUri\":\"xrrmnoxjwl\",\"status\":\"NeedRegistration\",\"capabilities\":{\"l\":\"zvvidokvzqeadk\",\"tvtpw\":\"rcxvoltjyzolnqk\",\"qpl\":\"mmyaoegh\",\"hewcevpmt\":\"y\"},\"versionStatus\":\"dfpgsoje\",\"version\":\"jnlvcgar\",\"registerTime\":\"2021-02-19T07:29:51Z\",\"lastConnectTime\":\"2021-07-10T06:20:08Z\",\"expiryTime\":\"2021-02-04T14:24:28Z\",\"lastStartTime\":\"2021-01-08T18:44:26Z\",\"lastStopTime\":\"2021-09-02T20:47:22Z\",\"lastUpdateResult\":\"Succeed\",\"lastStartUpdateTime\":\"2021-12-03T17:37:13Z\",\"lastEndUpdateTime\":\"2021-06-09T07:55:39Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":671274453,\"maxConcurrentJobs\":684366978,\"\":{\"d\":\"datatonovveouwixte\",\"qtpbb\":\"dataqprhzsaquha\",\"jlynlrjoydzmbvs\":\"dataicteq\"}}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); SelfHostedIntegrationRuntimeNode response = manager.integrationRuntimeNodes() - .getWithResponse("dsygdzzufr", "ewqwdglmfsjpl", "dhzltmywy", "fuovkgqtzg", com.azure.core.util.Context.NONE) + .getWithResponse("ii", "jkiajokjuehcryww", "ns", "rcj", com.azure.core.util.Context.NONE) .getValue(); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesUpdateWithResponseMockTests.java index 3ea6506b02bf4..640fe63a71db2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesUpdateWithResponseMockTests.java @@ -6,51 +6,33 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntimeNode; import com.azure.resourcemanager.datafactory.models.UpdateIntegrationRuntimeNodeRequest; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimeNodesUpdateWithResponseMockTests { @Test public void testUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"nodeName\":\"xqra\",\"machineName\":\"nkeodgpqdcrnubnt\",\"hostServiceUri\":\"ohtuiwsnccmunhv\",\"status\":\"Initializing\",\"capabilities\":{\"yzfuvbnelmimmc\":\"zvuiprngneymxzd\",\"egfthgjmznp\":\"evbprecge\"},\"versionStatus\":\"vafczgisegdei\",\"version\":\"lcdqxownbjkwgkgo\",\"registerTime\":\"2021-01-21T18:45:42Z\",\"lastConnectTime\":\"2021-10-26T03:12:01Z\",\"expiryTime\":\"2021-06-21T19:10:04Z\",\"lastStartTime\":\"2021-10-30T09:20:31Z\",\"lastStopTime\":\"2021-04-13T23:04:59Z\",\"lastUpdateResult\":\"Fail\",\"lastStartUpdateTime\":\"2021-10-03T06:43:31Z\",\"lastEndUpdateTime\":\"2020-12-30T11:38:38Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":2040293763,\"maxConcurrentJobs\":1930006622,\"\":{\"kshtgfewflxby\":\"dataqqos\",\"ldtmeendocqaptwk\":\"datavaufxxvs\",\"z\":\"dataismonwhazalftta\",\"srduqhrlltfec\":\"datasve\"}}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"nodeName\":\"yyfthsafv\",\"machineName\":\"o\",\"hostServiceUri\":\"inmc\",\"status\":\"NeedRegistration\",\"capabilities\":{\"u\":\"ydy\",\"fprfhpcy\":\"kjzp\",\"uzls\":\"ajjyournxq\",\"ebmuv\":\"mbsghzund\"},\"versionStatus\":\"kdea\",\"version\":\"xdwwraimjkaz\",\"registerTime\":\"2021-03-08T20:15:30Z\",\"lastConnectTime\":\"2021-03-04T00:33:43Z\",\"expiryTime\":\"2021-03-25T09:09:50Z\",\"lastStartTime\":\"2021-03-14T23:09:26Z\",\"lastStopTime\":\"2021-01-08T06:02:34Z\",\"lastUpdateResult\":\"Fail\",\"lastStartUpdateTime\":\"2021-09-14T20:41:14Z\",\"lastEndUpdateTime\":\"2021-08-06T06:53:48Z\",\"isActiveDispatcher\":false,\"concurrentJobsLimit\":1313337532,\"maxConcurrentJobs\":1925477381,\"\":{\"nwjowgdw\":\"datah\"}}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); SelfHostedIntegrationRuntimeNode response = manager.integrationRuntimeNodes() - .updateWithResponse("xjqysfejddiog", "ckvoxlihfg", "fznzemisqunxwos", "nchrouvtbptdeum", - new UpdateIntegrationRuntimeNodeRequest().withConcurrentJobsLimit(637814538), + .updateWithResponse("wlzvxjxvspubf", "elqzcptsbiruy", "iwsfvanpzabbfd", "issdelyecjmfaf", + new UpdateIntegrationRuntimeNodeRequest().withConcurrentJobsLimit(1818752360), com.azure.core.util.Context.NONE) .getValue(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasGetWithResponseMockTests.java index d638707e12f3b..3e69ee6a9a078 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasGetWithResponseMockTests.java @@ -6,58 +6,39 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.GetSsisObjectMetadataRequest; import com.azure.resourcemanager.datafactory.models.SsisObjectMetadataListResponse; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimeObjectMetadatasGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"type\":\"SsisObjectMetadata\",\"id\":8310288998494192853,\"name\":\"zpxlyabjrz\",\"description\":\"sjfwurhkuxp\"}],\"nextLink\":\"wmbgwgmyglnsnkyl\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"type\":\"Package\",\"id\":7014662316787087438,\"name\":\"zvvrzdbrpdveyxcd\",\"description\":\"l\"},{\"type\":\"Project\",\"id\":5690625794938036184,\"name\":\"mxzszhvjfijxtho\",\"description\":\"giipcvqyapnsnb\"},{\"type\":\"Package\",\"id\":1617738631876995390,\"name\":\"uswdwdau\",\"description\":\"gvs\"}],\"nextLink\":\"s\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); SsisObjectMetadataListResponse response = manager.integrationRuntimeObjectMetadatas() - .getWithResponse("igh", "xx", "betmqugovcd", - new GetSsisObjectMetadataRequest().withMetadataPath("lrbsfqrgjejabqv"), - com.azure.core.util.Context.NONE) + .getWithResponse("haugenpipp", "preputusdewnk", "wyry", + new GetSsisObjectMetadataRequest().withMetadataPath("hnoiqtvfr"), com.azure.core.util.Context.NONE) .getValue(); - Assertions.assertEquals(8310288998494192853L, response.value().get(0).id()); - Assertions.assertEquals("zpxlyabjrz", response.value().get(0).name()); - Assertions.assertEquals("sjfwurhkuxp", response.value().get(0).description()); - Assertions.assertEquals("wmbgwgmyglnsnkyl", response.nextLink()); + Assertions.assertEquals(7014662316787087438L, response.value().get(0).id()); + Assertions.assertEquals("zvvrzdbrpdveyxcd", response.value().get(0).name()); + Assertions.assertEquals("l", response.value().get(0).description()); + Assertions.assertEquals("s", response.nextLink()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasRefreshMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasRefreshMockTests.java index 3d2a96e33a198..61d78c6a1c5c1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasRefreshMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasRefreshMockTests.java @@ -6,54 +6,35 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.SsisObjectMetadataStatusResponse; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimeObjectMetadatasRefreshMockTests { @Test public void testRefresh() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - - String responseStr - = "{\"status\":\"oshkzibbjbzdnkg\",\"name\":\"bvicwfrybvhg\",\"properties\":\"tjghdfusphokcc\",\"error\":\"nnmpnnq\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - SsisObjectMetadataStatusResponse response = manager.integrationRuntimeObjectMetadatas().refresh("yfxsdntukoss", - "flfv", "ygecly", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("oshkzibbjbzdnkg", response.status()); - Assertions.assertEquals("bvicwfrybvhg", response.name()); - Assertions.assertEquals("tjghdfusphokcc", response.properties()); - Assertions.assertEquals("nnmpnnq", response.error()); + String responseStr = "{\"status\":\"iaeqcg\",\"name\":\"nj\",\"properties\":\"emlw\",\"error\":\"gsxm\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + SsisObjectMetadataStatusResponse response = manager.integrationRuntimeObjectMetadatas() + .refresh("wvcehkva", "cjektkge", "rifyrap", com.azure.core.util.Context.NONE); + + Assertions.assertEquals("iaeqcg", response.status()); + Assertions.assertEquals("nj", response.name()); + Assertions.assertEquals("emlw", response.properties()); + Assertions.assertEquals("gsxm", response.error()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpointTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpointTests.java index f6690962838c4..c4d8778198a3e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpointTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpointTests.java @@ -15,40 +15,25 @@ public final class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint model = BinaryData.fromString( - "{\"category\":\"bmehh\",\"endpoints\":[{\"domainName\":\"jusrtslhspk\",\"endpointDetails\":[{\"port\":812184412},{\"port\":441408788},{\"port\":1351571833}]},{\"domainName\":\"gkvtmelmqkrhah\",\"endpointDetails\":[{\"port\":1468596781},{\"port\":1935710102},{\"port\":1520620796},{\"port\":27642330}]},{\"domainName\":\"hmdua\",\"endpointDetails\":[{\"port\":552039222}]}]}") + "{\"category\":\"qugxywpmueefjzwf\",\"endpoints\":[{\"domainName\":\"jidsuyonobglaoc\",\"endpointDetails\":[{\"port\":1581079817}]}]}") .toObject(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint.class); - Assertions.assertEquals("bmehh", model.category()); - Assertions.assertEquals("jusrtslhspk", model.endpoints().get(0).domainName()); - Assertions.assertEquals(812184412, model.endpoints().get(0).endpointDetails().get(0).port()); + Assertions.assertEquals("qugxywpmueefjzwf", model.category()); + Assertions.assertEquals("jidsuyonobglaoc", model.endpoints().get(0).domainName()); + Assertions.assertEquals(1581079817, model.endpoints().get(0).endpointDetails().get(0).port()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint model - = new IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint().withCategory("bmehh") - .withEndpoints( - Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint() - .withDomainName("jusrtslhspk") - .withEndpointDetails(Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(812184412), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(441408788), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails() - .withPort(1351571833))), - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("gkvtmelmqkrhah") - .withEndpointDetails(Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(1468596781), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(1935710102), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(1520620796), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(27642330))), - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("hmdua") - .withEndpointDetails( - Arrays.asList(new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails() - .withPort(552039222))))); + = new IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint().withCategory("qugxywpmueefjzwf") + .withEndpoints(Arrays.asList( + new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("jidsuyonobglaoc") + .withEndpointDetails(Arrays.asList( + new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(1581079817))))); model = BinaryData.fromObject(model) .toObject(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint.class); - Assertions.assertEquals("bmehh", model.category()); - Assertions.assertEquals("jusrtslhspk", model.endpoints().get(0).domainName()); - Assertions.assertEquals(812184412, model.endpoints().get(0).endpointDetails().get(0).port()); + Assertions.assertEquals("qugxywpmueefjzwf", model.category()); + Assertions.assertEquals("jidsuyonobglaoc", model.endpoints().get(0).domainName()); + Assertions.assertEquals(1581079817, model.endpoints().get(0).endpointDetails().get(0).port()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointDetailsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointDetailsTests.java index 0577cb26b44a2..1b6022759fa34 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointDetailsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointDetailsTests.java @@ -12,17 +12,17 @@ public final class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetailsT @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails model - = BinaryData.fromString("{\"port\":434822175}") + = BinaryData.fromString("{\"port\":1436522503}") .toObject(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails.class); - Assertions.assertEquals(434822175, model.port()); + Assertions.assertEquals(1436522503, model.port()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails model - = new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(434822175); + = new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(1436522503); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails.class); - Assertions.assertEquals(434822175, model.port()); + Assertions.assertEquals(1436522503, model.port()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointTests.java index ec76c87ae413b..11cd0e683246b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointTests.java @@ -14,19 +14,20 @@ public final class IntegrationRuntimeOutboundNetworkDependenciesEndpointTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeOutboundNetworkDependenciesEndpoint model - = BinaryData.fromString("{\"domainName\":\"vfadmws\",\"endpointDetails\":[{\"port\":1913869945}]}") + = BinaryData.fromString("{\"domainName\":\"g\",\"endpointDetails\":[{\"port\":1209996134}]}") .toObject(IntegrationRuntimeOutboundNetworkDependenciesEndpoint.class); - Assertions.assertEquals("vfadmws", model.domainName()); - Assertions.assertEquals(1913869945, model.endpointDetails().get(0).port()); + Assertions.assertEquals("g", model.domainName()); + Assertions.assertEquals(1209996134, model.endpointDetails().get(0).port()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { IntegrationRuntimeOutboundNetworkDependenciesEndpoint model - = new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("vfadmws").withEndpointDetails( - Arrays.asList(new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(1913869945))); + = new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("g") + .withEndpointDetails(Arrays + .asList(new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(1209996134))); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeOutboundNetworkDependenciesEndpoint.class); - Assertions.assertEquals("vfadmws", model.domainName()); - Assertions.assertEquals(1913869945, model.endpointDetails().get(0).port()); + Assertions.assertEquals("g", model.domainName()); + Assertions.assertEquals(1209996134, model.endpointDetails().get(0).port()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInnerTests.java index 7eb379e707a20..1960710e5e3c4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInnerTests.java @@ -16,10 +16,10 @@ public final class IntegrationRuntimeOutboundNetworkDependenciesEndpointsRespons @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner model = BinaryData.fromString( - "{\"value\":[{\"category\":\"frlh\",\"endpoints\":[{\"domainName\":\"kyv\",\"endpointDetails\":[{}]},{\"domainName\":\"n\",\"endpointDetails\":[{},{},{}]},{\"domainName\":\"zka\",\"endpointDetails\":[{}]},{\"domainName\":\"b\",\"endpointDetails\":[{},{}]}]}]}") + "{\"value\":[{\"category\":\"yfjfcnjbkcn\",\"endpoints\":[{\"domainName\":\"ttkphywpnvjtoqne\",\"endpointDetails\":[{}]},{\"domainName\":\"fpl\",\"endpointDetails\":[{},{},{},{}]},{\"domainName\":\"uscrpabgyepsb\",\"endpointDetails\":[{},{},{},{}]}]}]}") .toObject(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner.class); - Assertions.assertEquals("frlh", model.value().get(0).category()); - Assertions.assertEquals("kyv", model.value().get(0).endpoints().get(0).domainName()); + Assertions.assertEquals("yfjfcnjbkcn", model.value().get(0).category()); + Assertions.assertEquals("ttkphywpnvjtoqne", model.value().get(0).endpoints().get(0).domainName()); } @org.junit.jupiter.api.Test @@ -27,27 +27,30 @@ public void testSerialize() throws Exception { IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner model = new IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner() .withValue( - Arrays - .asList(new IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint().withCategory("frlh") - .withEndpoints(Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("kyv") - .withEndpointDetails(Arrays - .asList(new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails())), - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("n") - .withEndpointDetails(Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails())), - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("zka") - .withEndpointDetails(Arrays - .asList(new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails())), - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("b") - .withEndpointDetails(Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails())))))); + Arrays.asList( + new IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint().withCategory("yfjfcnjbkcn") + .withEndpoints( + Arrays.asList( + new IntegrationRuntimeOutboundNetworkDependenciesEndpoint() + .withDomainName("ttkphywpnvjtoqne") + .withEndpointDetails(Arrays.asList( + new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails())), + new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("fpl") + .withEndpointDetails(Arrays.asList( + new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(), + new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(), + new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(), + new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails())), + new IntegrationRuntimeOutboundNetworkDependenciesEndpoint() + .withDomainName("uscrpabgyepsb") + .withEndpointDetails(Arrays.asList( + new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(), + new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(), + new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(), + new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails())))))); model = BinaryData.fromObject(model) .toObject(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner.class); - Assertions.assertEquals("frlh", model.value().get(0).category()); - Assertions.assertEquals("kyv", model.value().get(0).endpoints().get(0).domainName()); + Assertions.assertEquals("yfjfcnjbkcn", model.value().get(0).category()); + Assertions.assertEquals("ttkphywpnvjtoqne", model.value().get(0).endpoints().get(0).domainName()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeReferenceTests.java index b1d4d8fc8ca02..1f7ab43208820 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeReferenceTests.java @@ -14,17 +14,18 @@ public final class IntegrationRuntimeReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeReference model = BinaryData.fromString( - "{\"referenceName\":\"dggkzzlvmbmpa\",\"parameters\":{\"yw\":\"datadfvue\",\"yhrfouyftaakcpw\":\"databpfvm\",\"nubexk\":\"datayzvqt\"}}") + "{\"referenceName\":\"kqze\",\"parameters\":{\"mhhv\":\"datadltfz\",\"tibqdxbxwakb\":\"datagureodkwobdag\",\"podxunkb\":\"datagqxndlkzgxhuripl\",\"lrb\":\"databxmubyynt\"}}") .toObject(IntegrationRuntimeReference.class); - Assertions.assertEquals("dggkzzlvmbmpa", model.referenceName()); + Assertions.assertEquals("kqze", model.referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - IntegrationRuntimeReference model = new IntegrationRuntimeReference().withReferenceName("dggkzzlvmbmpa") - .withParameters(mapOf("yw", "datadfvue", "yhrfouyftaakcpw", "databpfvm", "nubexk", "datayzvqt")); + IntegrationRuntimeReference model = new IntegrationRuntimeReference().withReferenceName("kqze") + .withParameters(mapOf("mhhv", "datadltfz", "tibqdxbxwakb", "datagureodkwobdag", "podxunkb", + "datagqxndlkzgxhuripl", "lrb", "databxmubyynt")); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeReference.class); - Assertions.assertEquals("dggkzzlvmbmpa", model.referenceName()); + Assertions.assertEquals("kqze", model.referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeResourceInnerTests.java index 4b6fd19a72802..68d43062e0ccb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeResourceInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeResourceInnerTests.java @@ -6,40 +6,25 @@ import com.azure.core.util.BinaryData; import com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeResourceInner; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; -import java.util.HashMap; -import java.util.Map; +import com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntime; import org.junit.jupiter.api.Assertions; public final class IntegrationRuntimeResourceInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeResourceInner model = BinaryData.fromString( - "{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"yoxa\",\"\":{\"bniwdj\":\"datakzjancuxrhdwbav\",\"s\":\"datawz\",\"xytxhpzxbz\":\"databpg\"}},\"name\":\"zabglcuhxwt\",\"type\":\"yqiklbbovplwzb\",\"etag\":\"gy\",\"id\":\"uosvmkfssxqukk\"}") + "{\"properties\":{\"type\":\"Managed\",\"description\":\"idybyxczf\",\"\":{\"fkts\":\"dataaaxdbabphlwrq\",\"nyyazttbtwwrqpue\":\"datahsucoc\",\"xibxujwbhqwalm\":\"datackzywbiexzfeyue\"}},\"name\":\"yoxa\",\"type\":\"dkzjancuxrh\",\"etag\":\"bavxbniwdjswzt\",\"id\":\"bpg\"}") .toObject(IntegrationRuntimeResourceInner.class); - Assertions.assertEquals("uosvmkfssxqukk", model.id()); - Assertions.assertEquals("yoxa", model.properties().description()); + Assertions.assertEquals("bpg", model.id()); + Assertions.assertEquals("idybyxczf", model.properties().description()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - IntegrationRuntimeResourceInner model - = new IntegrationRuntimeResourceInner().withId("uosvmkfssxqukk").withProperties(new IntegrationRuntime() - .withDescription("yoxa").withAdditionalProperties(mapOf("type", "IntegrationRuntime"))); + IntegrationRuntimeResourceInner model = new IntegrationRuntimeResourceInner().withId("bpg") + .withProperties(new ManagedIntegrationRuntime().withDescription("idybyxczf")); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeResourceInner.class); - Assertions.assertEquals("uosvmkfssxqukk", model.id()); - Assertions.assertEquals("yoxa", model.properties().description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; + Assertions.assertEquals("bpg", model.id()); + Assertions.assertEquals("idybyxczf", model.properties().description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusResponseInnerTests.java index f1c2692bcb8f7..5f764cf140aa5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusResponseInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusResponseInnerTests.java @@ -6,35 +6,20 @@ import com.azure.core.util.BinaryData; import com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeStatusResponseInner; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatus; -import java.util.HashMap; -import java.util.Map; +import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntimeStatus; public final class IntegrationRuntimeStatusResponseInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeStatusResponseInner model = BinaryData.fromString( - "{\"name\":\"ogtwrupqsxvnmi\",\"properties\":{\"type\":\"IntegrationRuntimeStatus\",\"dataFactoryName\":\"kvceoveilovnotyf\",\"state\":\"Limited\",\"\":{\"x\":\"databkc\",\"nv\":\"datahbttkphyw\",\"qnermclfplphoxu\":\"datat\",\"ye\":\"datacrpab\"}}}") + "{\"name\":\"pvlopwiyighxpkd\",\"properties\":{\"type\":\"SelfHosted\",\"dataFactoryName\":\"iuebbaumny\",\"state\":\"Offline\",\"\":{\"hsmtxpsiebtfhvp\":\"dataeojnabc\"}}}") .toObject(IntegrationRuntimeStatusResponseInner.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - IntegrationRuntimeStatusResponseInner model = new IntegrationRuntimeStatusResponseInner() - .withProperties(new IntegrationRuntimeStatus().withAdditionalProperties( - mapOf("dataFactoryName", "kvceoveilovnotyf", "state", "Limited", "type", "IntegrationRuntimeStatus"))); + IntegrationRuntimeStatusResponseInner model + = new IntegrationRuntimeStatusResponseInner().withProperties(new SelfHostedIntegrationRuntimeStatus()); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeStatusResponseInner.class); } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusTests.java index 9cf69d5150416..1fcb90b81db97 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusTests.java @@ -6,33 +6,19 @@ import com.azure.core.util.BinaryData; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatus; -import java.util.HashMap; -import java.util.Map; +import com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntimeStatus; public final class IntegrationRuntimeStatusTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeStatus model = BinaryData.fromString( - "{\"type\":\"IntegrationRuntimeStatus\",\"dataFactoryName\":\"bjtazqugxywpmu\",\"state\":\"Started\",\"\":{\"dsuyonobgla\":\"datawfqkquj\",\"tcc\":\"datacq\",\"udxytlmoyrx\":\"datag\",\"qj\":\"datawfudwpzntxhdzhl\"}}") + "{\"type\":\"Managed\",\"dataFactoryName\":\"skrdqmhjj\",\"state\":\"AccessDenied\",\"\":{\"xuutkncwscwsv\":\"datawky\",\"micykvceoveilo\":\"dataxotogtwrupqsxv\"}}") .toObject(IntegrationRuntimeStatus.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - IntegrationRuntimeStatus model = new IntegrationRuntimeStatus().withAdditionalProperties( - mapOf("dataFactoryName", "bjtazqugxywpmu", "state", "Started", "type", "IntegrationRuntimeStatus")); + IntegrationRuntimeStatus model = new ManagedIntegrationRuntimeStatus(); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeStatus.class); } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeTests.java index 2eba42937a82f..99581eb362cac 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeTests.java @@ -6,36 +6,22 @@ import com.azure.core.util.BinaryData; import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; -import java.util.HashMap; -import java.util.Map; +import com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntime; import org.junit.jupiter.api.Assertions; public final class IntegrationRuntimeTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntime model = BinaryData.fromString( - "{\"type\":\"IntegrationRuntime\",\"description\":\"l\",\"\":{\"wiyighxpkdw\":\"datasxnkjzkdeslpvlo\",\"upedeojnabckhs\":\"databaiuebbaumny\",\"ie\":\"datatxp\",\"jdhtldwkyzxu\":\"datatfhvpesapskrdqmh\"}}") + "{\"type\":\"Managed\",\"description\":\"x\",\"\":{\"lcuhxwtctyqiklb\":\"dataxbzpfzab\",\"bhvgy\":\"dataovplw\",\"svmkfssxquk\":\"datagu\"}}") .toObject(IntegrationRuntime.class); - Assertions.assertEquals("l", model.description()); + Assertions.assertEquals("x", model.description()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - IntegrationRuntime model = new IntegrationRuntime().withDescription("l") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime")); + IntegrationRuntime model = new ManagedIntegrationRuntime().withDescription("x"); model = BinaryData.fromObject(model).toObject(IntegrationRuntime.class); - Assertions.assertEquals("l", model.description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; + Assertions.assertEquals("x", model.description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeVNetPropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeVNetPropertiesTests.java index a6cfef509b4f9..f46f6539c2190 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeVNetPropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeVNetPropertiesTests.java @@ -15,24 +15,26 @@ public final class IntegrationRuntimeVNetPropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { IntegrationRuntimeVNetProperties model = BinaryData.fromString( - "{\"vNetId\":\"lwyoxzuhellitpqv\",\"subnet\":\"vrsgqbm\",\"publicIPs\":[\"eomebz\"],\"subnetId\":\"vxxfsfo\",\"\":{\"edybkbgdwbmi\":\"datagihnalpc\",\"i\":\"datazikatywedb\"}}") + "{\"vNetId\":\"oiihrfcowlas\",\"subnet\":\"ifywxjjylaqhx\",\"publicIPs\":[\"dar\",\"jhwgkyn\",\"lwrjgotdtmcktkal\",\"piybfnkylzri\"],\"subnetId\":\"qlwogqnbjuaiuvem\",\"\":{\"ywyfcenkbfxqcap\":\"datankivexiathmar\",\"cxzayvcse\":\"datageciradmxokbutbb\",\"eddjtgoppybse\":\"datadhrodyiit\"}}") .toObject(IntegrationRuntimeVNetProperties.class); - Assertions.assertEquals("lwyoxzuhellitpqv", model.vNetId()); - Assertions.assertEquals("vrsgqbm", model.subnet()); - Assertions.assertEquals("eomebz", model.publicIPs().get(0)); - Assertions.assertEquals("vxxfsfo", model.subnetId()); + Assertions.assertEquals("oiihrfcowlas", model.vNetId()); + Assertions.assertEquals("ifywxjjylaqhx", model.subnet()); + Assertions.assertEquals("dar", model.publicIPs().get(0)); + Assertions.assertEquals("qlwogqnbjuaiuvem", model.subnetId()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - IntegrationRuntimeVNetProperties model - = new IntegrationRuntimeVNetProperties().withVNetId("lwyoxzuhellitpqv").withSubnet("vrsgqbm") - .withPublicIPs(Arrays.asList("eomebz")).withSubnetId("vxxfsfo").withAdditionalProperties(mapOf()); + IntegrationRuntimeVNetProperties model = new IntegrationRuntimeVNetProperties().withVNetId("oiihrfcowlas") + .withSubnet("ifywxjjylaqhx") + .withPublicIPs(Arrays.asList("dar", "jhwgkyn", "lwrjgotdtmcktkal", "piybfnkylzri")) + .withSubnetId("qlwogqnbjuaiuvem") + .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(IntegrationRuntimeVNetProperties.class); - Assertions.assertEquals("lwyoxzuhellitpqv", model.vNetId()); - Assertions.assertEquals("vrsgqbm", model.subnet()); - Assertions.assertEquals("eomebz", model.publicIPs().get(0)); - Assertions.assertEquals("vxxfsfo", model.subnetId()); + Assertions.assertEquals("oiihrfcowlas", model.vNetId()); + Assertions.assertEquals("ifywxjjylaqhx", model.subnet()); + Assertions.assertEquals("dar", model.publicIPs().get(0)); + Assertions.assertEquals("qlwogqnbjuaiuvem", model.subnetId()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateLinkedIntegrationRuntimeWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateLinkedIntegrationRuntimeWithResponseMockTests.java index e84d48aab54d7..93a9df5133dd9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateLinkedIntegrationRuntimeWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateLinkedIntegrationRuntimeWithResponseMockTests.java @@ -6,52 +6,36 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.CreateLinkedIntegrationRuntimeRequest; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatusResponse; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesCreateLinkedIntegrationRuntimeWithResponseMockTests { @Test public void testCreateLinkedIntegrationRuntimeWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"name\":\"wvcsekwpgdfpoqb\",\"properties\":{\"type\":\"IntegrationRuntimeStatus\",\"dataFactoryName\":\"kqsabyowfr\",\"state\":\"Starting\",\"\":{\"flgtq\":\"datafbdsnc\"}}}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"name\":\"ilygjtibhzjhqf\",\"properties\":{\"type\":\"SelfHosted\",\"dataFactoryName\":\"wh\",\"state\":\"Started\",\"\":{\"jjhn\":\"datagqewcv\",\"aunywk\":\"dataersgrtjmde\",\"ryec\":\"dataucsrqfm\",\"n\":\"datalx\"}}}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); IntegrationRuntimeStatusResponse response = manager.integrationRuntimes() - .createLinkedIntegrationRuntimeWithResponse("llcsdgmcjsktej", "mhttiqbnfyixkeav", "ezzpfldd", - new CreateLinkedIntegrationRuntimeRequest().withName("cwhodfw").withSubscriptionId("xrfr") - .withDataFactoryName("byktlo").withDataFactoryLocation("p"), + .createLinkedIntegrationRuntimeWithResponse("obtkyjvzzbryl", "mnmchsjuacdq", "ryo", + new CreateLinkedIntegrationRuntimeRequest().withName("qotzpepmlckz") + .withSubscriptionId("ietfx") + .withDataFactoryName("zzlivkaxwfkanu") + .withDataFactoryLocation("lehvmraoxnii"), com.azure.core.util.Context.NONE) .getValue(); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateOrUpdateWithResponseMockTests.java index 204d7e908904b..0cea29ac240ce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateOrUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateOrUpdateWithResponseMockTests.java @@ -6,69 +6,39 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeResource; -import java.nio.ByteBuffer; +import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntime; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesCreateOrUpdateWithResponseMockTests { @Test public void testCreateOrUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"fhsgpy\",\"\":{\"lpsjbnnuqszy\":\"datatsdjn\",\"fata\":\"dataoiufrqsmjgddbunx\",\"p\":\"datas\",\"livvnyzc\":\"datagdwhacurmmbuna\"}},\"name\":\"wisuhareqyiadvvg\",\"type\":\"fyel\",\"etag\":\"l\",\"id\":\"yyuxcj\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - IntegrationRuntimeResource response = manager.integrationRuntimes().define("jxud") - .withExistingFactory("owtazqexwkkjx", "jomnkeaiamh").withProperties(new IntegrationRuntime() - .withDescription("lz").withAdditionalProperties(mapOf("type", "IntegrationRuntime"))) - .withIfMatch("uyxccra").create(); - - Assertions.assertEquals("yyuxcj", response.id()); - Assertions.assertEquals("fhsgpy", response.properties().description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; + = "{\"properties\":{\"type\":\"Managed\",\"description\":\"nycgzlicytfpywpe\",\"\":{\"jlqdpqkcbflzzdd\":\"datausxr\",\"rrsufvtmse\":\"datarew\",\"rphdakwwiezeut\":\"dataqguz\"}},\"name\":\"qawmoxvq\",\"type\":\"zatvnejlocmqladl\",\"etag\":\"lwtxsh\",\"id\":\"zhhzl\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + IntegrationRuntimeResource response = manager.integrationRuntimes() + .define("qgoioexh") + .withExistingFactory("nts", "sphi") + .withProperties(new SelfHostedIntegrationRuntime().withDescription("kfkwzk")) + .withIfMatch("izoamttxyddkvi") + .create(); + + Assertions.assertEquals("zhhzl", response.id()); + Assertions.assertEquals("nycgzlicytfpywpe", response.properties().description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesDeleteWithResponseMockTests.java index 959ff7cab1633..890d29ab713c6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesDeleteWithResponseMockTests.java @@ -6,47 +6,29 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.integrationRuntimes().deleteWithResponse("xepuvwahfnlk", "yqpkskbid", "zzjpb", - com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .deleteWithResponse("rjulttqgun", "tbpiccriqhiwyk", "zncfh", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetMonitoringDataWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetMonitoringDataWithResponseMockTests.java index eb04b391036f6..9d0cda43e2d78 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetMonitoringDataWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetMonitoringDataWithResponseMockTests.java @@ -6,51 +6,34 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeMonitoringData; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesGetMonitoringDataWithResponseMockTests { @Test public void testGetMonitoringDataWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"name\":\"gbm\",\"nodes\":[{\"nodeName\":\"erauoht\",\"availableMemoryInMB\":1304587071,\"cpuUtilization\":1389990784,\"concurrentJobsLimit\":1347387035,\"concurrentJobsRunning\":260431426,\"maxConcurrentJobs\":718972733,\"sentBytes\":1.437074,\"receivedBytes\":58.880486,\"\":{\"eeyptvrbgcprsds\":\"datarx\",\"su\":\"datawozpmhhdnxwkf\",\"mxitvmrq\":\"datasbyfoavozqnn\"}},{\"nodeName\":\"zch\",\"availableMemoryInMB\":1378405861,\"cpuUtilization\":735227834,\"concurrentJobsLimit\":1330999405,\"concurrentJobsRunning\":181863906,\"maxConcurrentJobs\":903484269,\"sentBytes\":79.66964,\"receivedBytes\":18.096357,\"\":{\"pfojhvqmdoqyohzh\":\"databfrv\",\"xfvj\":\"datandfkp\",\"arhfeadedivadpcx\":\"datafusuwghtgp\"}},{\"nodeName\":\"pmw\",\"availableMemoryInMB\":1264586477,\"cpuUtilization\":1236271427,\"concurrentJobsLimit\":64649335,\"concurrentJobsRunning\":697187006,\"maxConcurrentJobs\":1636051443,\"sentBytes\":73.32201,\"receivedBytes\":23.75918,\"\":{\"kmnuivpbjcl\":\"datavlc\",\"yp\":\"datahfzriigte\",\"nctkqbvtdeou\":\"datamjqjoamzdsa\",\"juwdvfaulbfrc\":\"dataixgtpykbjevj\"}},{\"nodeName\":\"ucobpkphxh\",\"availableMemoryInMB\":471274210,\"cpuUtilization\":1807912949,\"concurrentJobsLimit\":345365482,\"concurrentJobsRunning\":704883797,\"maxConcurrentJobs\":796383837,\"sentBytes\":91.77441,\"receivedBytes\":82.7453,\"\":{\"wuyp\":\"datazqohthsmduaoypry\",\"jsov\":\"datauvpdclaj\",\"zehxddmaevcjtrw\":\"datafreyrgrgf\"}}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"name\":\"biszyi\",\"nodes\":[{\"nodeName\":\"jrirg\",\"availableMemoryInMB\":2026881904,\"cpuUtilization\":748969341,\"concurrentJobsLimit\":1141664737,\"concurrentJobsRunning\":216624483,\"maxConcurrentJobs\":1622921552,\"sentBytes\":36.65986,\"receivedBytes\":54.669308,\"\":{\"znuyczlyl\":\"datard\",\"uhkhnzsrgi\":\"datadrziaxigeos\",\"rlyscnbrwhsqtzg\":\"datavzepgljtuzqreprn\",\"x\":\"datafybryexhdigmgszz\"}},{\"nodeName\":\"unuus\",\"availableMemoryInMB\":1333665930,\"cpuUtilization\":536516435,\"concurrentJobsLimit\":872194362,\"concurrentJobsRunning\":158792225,\"maxConcurrentJobs\":460583114,\"sentBytes\":94.75602,\"receivedBytes\":89.53062,\"\":{\"sbzu\":\"datat\",\"rtsnclzyun\":\"datafranngwldymuehvv\",\"pxfhixaagvkwe\":\"dataoogagtjcmly\"}}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); IntegrationRuntimeMonitoringData response = manager.integrationRuntimes() - .getMonitoringDataWithResponse("ogfxbv", "lc", "dnajncefqn", com.azure.core.util.Context.NONE).getValue(); + .getMonitoringDataWithResponse("uiiorbtfarb", "arxyh", "ukc", com.azure.core.util.Context.NONE) + .getValue(); - Assertions.assertEquals("gbm", response.name()); + Assertions.assertEquals("biszyi", response.name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetStatusWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetStatusWithResponseMockTests.java index 7e4ecd0fc3a46..57cc193f709b7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetStatusWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetStatusWithResponseMockTests.java @@ -6,49 +6,32 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatusResponse; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesGetStatusWithResponseMockTests { @Test public void testGetStatusWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"name\":\"rdhxamjhpqfj\",\"properties\":{\"type\":\"IntegrationRuntimeStatus\",\"dataFactoryName\":\"fi\",\"state\":\"AccessDenied\",\"\":{\"llfq\":\"dataofwzc\"}}}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"name\":\"qwta\",\"properties\":{\"type\":\"Managed\",\"dataFactoryName\":\"awxslstekbbqq\",\"state\":\"AccessDenied\",\"\":{\"mbbracg\":\"dataycvoexbxrxrvxwl\",\"wyegdutf\":\"datanelozz\",\"kesdfujfpnwfzabl\":\"datatmdlfkjjucpt\"}}}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); IntegrationRuntimeStatusResponse response = manager.integrationRuntimes() - .getStatusWithResponse("cgesbte", "fenhlitc", "dgesflnzibg", com.azure.core.util.Context.NONE).getValue(); + .getStatusWithResponse("mgparbirgw", "gewd", "irnfnlyvdryx", com.azure.core.util.Context.NONE) + .getValue(); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetWithResponseMockTests.java index fc83aab7b5efd..2e44d35ad9167 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetWithResponseMockTests.java @@ -6,53 +6,35 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"oahfaqlcqjnwvqif\",\"\":{\"qivvpvuy\":\"datafsvrjdpzvhxssn\",\"aadbwhsvxmvk\":\"datasnmdinnisuua\"}},\"name\":\"fwseoqkal\",\"type\":\"neahowvjup\",\"etag\":\"bupgtrnjz\",\"id\":\"bwabilybmf\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"type\":\"SelfHosted\",\"description\":\"njwmdtbxqtomcba\",\"\":{\"pkfwzqaelxd\":\"datatd\",\"o\":\"datazdfst\",\"vtzrj\":\"datazog\",\"zrqkgibpeh\":\"dataejvlf\"}},\"name\":\"ctzcm\",\"type\":\"ehxigsi\",\"etag\":\"bwsdoaypixry\",\"id\":\"lbzxyejoxd\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); IntegrationRuntimeResource response = manager.integrationRuntimes() - .getWithResponse("xmsiblieg", "jktfsci", "yclv", "ivsagrfjhcrq", com.azure.core.util.Context.NONE) + .getWithResponse("stybom", "yjfjsseem", "dfmlail", "xausivh", com.azure.core.util.Context.NONE) .getValue(); - Assertions.assertEquals("bwabilybmf", response.id()); - Assertions.assertEquals("oahfaqlcqjnwvqif", response.properties().description()); + Assertions.assertEquals("lbzxyejoxd", response.id()); + Assertions.assertEquals("njwmdtbxqtomcba", response.properties().description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListByFactoryMockTests.java index 153487bc3c4b8..879f5fbf40916 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListByFactoryMockTests.java @@ -6,53 +6,35 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesListByFactoryMockTests { @Test public void testListByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"akghvaqbkz\",\"\":{\"spnyutfhqighnun\":\"databxau\",\"ovgio\":\"datatjmzbirjndd\"}},\"name\":\"ztrln\",\"type\":\"vjdv\",\"etag\":\"c\",\"id\":\"j\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"properties\":{\"type\":\"SelfHosted\",\"description\":\"bvqrhzpfdl\",\"\":{\"nhmatfgo\":\"dataucswhmnsd\",\"wzbkgtgvrrzmkte\":\"datarjmhtxipw\",\"xt\":\"datazeu\"}},\"name\":\"hjcwlfzxxpwexc\",\"type\":\"gpcccg\",\"etag\":\"knjjskzuh\",\"id\":\"yavfeyybyduy\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PagedIterable response - = manager.integrationRuntimes().listByFactory("ljcblppnq", "snvcwji", com.azure.core.util.Context.NONE); + = manager.integrationRuntimes().listByFactory("ayjeh", "vowvqpncif", com.azure.core.util.Context.NONE); - Assertions.assertEquals("j", response.iterator().next().id()); - Assertions.assertEquals("akghvaqbkz", response.iterator().next().properties().description()); + Assertions.assertEquals("yavfeyybyduy", response.iterator().next().id()); + Assertions.assertEquals("bvqrhzpfdl", response.iterator().next().properties().description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListOutboundNetworkDependenciesEndpointsWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListOutboundNetworkDependenciesEndpointsWithResponseMockTests.java index 34856d2c33a04..19d50e8150839 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListOutboundNetworkDependenciesEndpointsWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListOutboundNetworkDependenciesEndpointsWithResponseMockTests.java @@ -6,53 +6,36 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesListOutboundNetworkDependenciesEndpointsWithResponseMockTests { @Test public void testListOutboundNetworkDependenciesEndpointsWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"category\":\"eoqwngtiyzzifuov\",\"endpoints\":[{\"domainName\":\"gts\",\"endpointDetails\":[{},{},{}]},{\"domainName\":\"rk\",\"endpointDetails\":[{},{},{},{}]},{\"domainName\":\"crxcnuyfvridzqoi\",\"endpointDetails\":[{},{},{}]}]},{\"category\":\"vayboubi\",\"endpoints\":[{\"domainName\":\"qmydpoj\",\"endpointDetails\":[{},{},{},{}]},{\"domainName\":\"xdg\",\"endpointDetails\":[{},{}]},{\"domainName\":\"eadqopw\",\"endpointDetails\":[{}]},{\"domainName\":\"rhdezlhsdcpdbo\",\"endpointDetails\":[{},{},{}]}]},{\"category\":\"yqdvxqo\",\"endpoints\":[{\"domainName\":\"scde\",\"endpointDetails\":[{},{}]},{\"domainName\":\"ftzxtr\",\"endpointDetails\":[{}]},{\"domainName\":\"ljfdc\",\"endpointDetails\":[{}]}]},{\"category\":\"q\",\"endpoints\":[{\"domainName\":\"dywbnerygsifsahk\",\"endpointDetails\":[{},{},{},{}]},{\"domainName\":\"ajnsuuxbyrvgu\",\"endpointDetails\":[{},{}]},{\"domainName\":\"solmzrfhlynkius\",\"endpointDetails\":[{},{},{},{}]},{\"domainName\":\"bjtsqfhnqxqtemvq\",\"endpointDetails\":[{},{},{},{}]}]}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse response - = manager.integrationRuntimes().listOutboundNetworkDependenciesEndpointsWithResponse("syifdrbkprblwsb", - "sekqqt", "rupogtrwkuwn", com.azure.core.util.Context.NONE).getValue(); - - Assertions.assertEquals("eoqwngtiyzzifuov", response.value().get(0).category()); - Assertions.assertEquals("gts", response.value().get(0).endpoints().get(0).domainName()); + = "{\"value\":[{\"category\":\"xlpmbtmcpwvbp\",\"endpoints\":[{\"domainName\":\"amsgfvuffdvukjy\",\"endpointDetails\":[{},{},{}]},{\"domainName\":\"nrgmjp\",\"endpointDetails\":[{},{}]}]},{\"category\":\"wxc\",\"endpoints\":[{\"domainName\":\"lby\",\"endpointDetails\":[{}]}]},{\"category\":\"skjiiivbvkv\",\"endpoints\":[{\"domainName\":\"slbkrhrnvozj\",\"endpointDetails\":[{},{},{},{}]}]}]}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse response = manager.integrationRuntimes() + .listOutboundNetworkDependenciesEndpointsWithResponse("msybvjfnuyoy", "zafkn", "lasfbpjyv", + com.azure.core.util.Context.NONE) + .getValue(); + + Assertions.assertEquals("xlpmbtmcpwvbp", response.value().get(0).category()); + Assertions.assertEquals("amsgfvuffdvukjy", response.value().get(0).endpoints().get(0).domainName()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRemoveLinksWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRemoveLinksWithResponseMockTests.java index 4ce91815969d4..215436af69822 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRemoveLinksWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRemoveLinksWithResponseMockTests.java @@ -6,48 +6,32 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntimeRequest; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesRemoveLinksWithResponseMockTests { @Test public void testRemoveLinksWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.integrationRuntimes().removeLinksWithResponse("lfeolhsyskivlz", "xmqvlgcppn", "iynzdadkurwgty", - new LinkedIntegrationRuntimeRequest().withLinkedFactoryName("njox"), com.azure.core.util.Context.NONE); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + manager.integrationRuntimes() + .removeLinksWithResponse("fnffjxdccwuzqwv", "kewlyrweups", "bawzafzdzhh", + new LinkedIntegrationRuntimeRequest().withLinkedFactoryName("bxcelvaww"), + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStartMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStartMockTests.java index 2a7aaf3d2e9e0..70183c9fd8a14 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStartMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStartMockTests.java @@ -6,49 +6,31 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatusResponse; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesStartMockTests { @Test public void testStart() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"name\":\"ato\",\"properties\":{\"type\":\"IntegrationRuntimeStatus\",\"dataFactoryName\":\"yrfspmc\",\"state\":\"AccessDenied\",\"\":{\"tynhulefltub\":\"dataisypkif\"}}}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"name\":\"jctvohywyvstv\",\"properties\":{\"type\":\"SelfHosted\",\"dataFactoryName\":\"q\",\"state\":\"Limited\",\"\":{\"lwgr\":\"dataocqrylivofnhckl\",\"pxvemjjfvanefw\":\"datavlqq\",\"npbgqemjdtc\":\"dataodnlw\"}}}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - IntegrationRuntimeStatusResponse response = manager.integrationRuntimes().start("heamxidjdptruie", "rauy", - "hugwau", com.azure.core.util.Context.NONE); + IntegrationRuntimeStatusResponse response = manager.integrationRuntimes() + .start("bxwda", "hiiduogakrpmj", "dbdcyijnoibclfqd", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStopMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStopMockTests.java index 206518cd65c0e..88eef5ee14bff 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStopMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStopMockTests.java @@ -6,46 +6,28 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesStopMockTests { @Test public void testStop() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.integrationRuntimes().stop("pebblndlahr", "x", "tkehfoephipho", com.azure.core.util.Context.NONE); + manager.integrationRuntimes().stop("wk", "kxln", "pbadycnjxyproq", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesSyncCredentialsWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesSyncCredentialsWithResponseMockTests.java index 846fb49cf2311..298981461b5f5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesSyncCredentialsWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesSyncCredentialsWithResponseMockTests.java @@ -6,47 +6,29 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesSyncCredentialsWithResponseMockTests { @Test public void testSyncCredentialsWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.integrationRuntimes().syncCredentialsWithResponse("gmcuqjouk", "mv", "qismvo", - com.azure.core.util.Context.NONE); + manager.integrationRuntimes() + .syncCredentialsWithResponse("bsuijxmw", "shqpjueodhtltoo", "kzouvckrej", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpgradeWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpgradeWithResponseMockTests.java index 0710e4d22b8ca..14ebd04bc28b0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpgradeWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpgradeWithResponseMockTests.java @@ -6,47 +6,28 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class IntegrationRuntimesUpgradeWithResponseMockTests { @Test public void testUpgradeWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.integrationRuntimes().upgradeWithResponse("cnwqeixyjlfobj", "betsvnloduvcq", "wc", - com.azure.core.util.Context.NONE); + manager.integrationRuntimes().upgradeWithResponse("lcito", "bgva", "zfiwao", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraObjectDatasetTests.java index e7f4605bccd77..5013288afd29f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraObjectDatasetTests.java @@ -19,29 +19,31 @@ public final class JiraObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { JiraObjectDataset model = BinaryData.fromString( - "{\"type\":\"JiraObject\",\"typeProperties\":{\"tableName\":\"datazc\"},\"description\":\"kckrnovqdmh\",\"structure\":\"datajstfs\",\"schema\":\"datacjakgkqwx\",\"linkedServiceName\":{\"referenceName\":\"dsoqzhxwdjoxwkb\",\"parameters\":{\"lfhn\":\"dataobvcnsbioez\",\"t\":\"dataz\"}},\"parameters\":{\"cmwbejywwwvn\":{\"type\":\"Bool\",\"defaultValue\":\"datagtkxncwdytnlr\"}},\"annotations\":[\"datakrmqevrhhafqf\",\"datadfyziruqvgnjxi\",\"datakgyjmzbm\"],\"folder\":{\"name\":\"kyluyug\"},\"\":{\"loxtvq\":\"datadcv\",\"ryhmmglv\":\"datab\",\"nkpsvokkyankxvc\":\"datab\"}}") + "{\"type\":\"equzytapgzdhzb\",\"typeProperties\":{\"tableName\":\"dataubukqmierzrnob\"},\"description\":\"cdsysxnk\",\"structure\":\"datav\",\"schema\":\"datalsevzc\",\"linkedServiceName\":{\"referenceName\":\"rwn\",\"parameters\":{\"xdqeluvmsaq\":\"datadwqymxsfqe\"}},\"parameters\":{\"fznfgpbc\":{\"type\":\"Float\",\"defaultValue\":\"datagqrwuhvv\"}},\"annotations\":[\"datapympdjieask\"],\"folder\":{\"name\":\"clnfusrgnos\"},\"\":{\"ikbvqzrurgbqaucp\":\"databmjphlyyuahvy\",\"gjlyxtugpea\":\"datakxjnohafwmf\",\"sdwxfamtxccfe\":\"datae\"}}") .toObject(JiraObjectDataset.class); - Assertions.assertEquals("kckrnovqdmh", model.description()); - Assertions.assertEquals("dsoqzhxwdjoxwkb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("cmwbejywwwvn").type()); - Assertions.assertEquals("kyluyug", model.folder().name()); + Assertions.assertEquals("cdsysxnk", model.description()); + Assertions.assertEquals("rwn", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("fznfgpbc").type()); + Assertions.assertEquals("clnfusrgnos", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - JiraObjectDataset model = new JiraObjectDataset().withDescription("kckrnovqdmh").withStructure("datajstfs") - .withSchema("datacjakgkqwx") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("dsoqzhxwdjoxwkb") - .withParameters(mapOf("lfhn", "dataobvcnsbioez", "t", "dataz"))) - .withParameters(mapOf("cmwbejywwwvn", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datagtkxncwdytnlr"))) - .withAnnotations(Arrays.asList("datakrmqevrhhafqf", "datadfyziruqvgnjxi", "datakgyjmzbm")) - .withFolder(new DatasetFolder().withName("kyluyug")).withTableName("datazc"); + JiraObjectDataset model = new JiraObjectDataset().withDescription("cdsysxnk") + .withStructure("datav") + .withSchema("datalsevzc") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rwn") + .withParameters(mapOf("xdqeluvmsaq", "datadwqymxsfqe"))) + .withParameters(mapOf("fznfgpbc", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datagqrwuhvv"))) + .withAnnotations(Arrays.asList("datapympdjieask")) + .withFolder(new DatasetFolder().withName("clnfusrgnos")) + .withTableName("dataubukqmierzrnob"); model = BinaryData.fromObject(model).toObject(JiraObjectDataset.class); - Assertions.assertEquals("kckrnovqdmh", model.description()); - Assertions.assertEquals("dsoqzhxwdjoxwkb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("cmwbejywwwvn").type()); - Assertions.assertEquals("kyluyug", model.folder().name()); + Assertions.assertEquals("cdsysxnk", model.description()); + Assertions.assertEquals("rwn", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("fznfgpbc").type()); + Assertions.assertEquals("clnfusrgnos", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraSourceTests.java index 46e0572b2f0ba..56be71fd556ea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraSourceTests.java @@ -11,15 +11,19 @@ public final class JiraSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { JiraSource model = BinaryData.fromString( - "{\"type\":\"JiraSource\",\"query\":\"dataqgpldrn\",\"queryTimeout\":\"datahdb\",\"additionalColumns\":\"databmsbetzufkvx\",\"sourceRetryCount\":\"databddrtngdc\",\"sourceRetryWait\":\"datajzgzaeuu\",\"maxConcurrentConnections\":\"datavheqzl\",\"disableMetricsCollection\":\"datavaskrgoodfhpyue\",\"\":{\"lizlzxh\":\"datanyddp\",\"sjwawl\":\"datacuglgmfznholaf\",\"yk\":\"dataqmznkcwiok\"}}") + "{\"type\":\"jvskpbuo\",\"query\":\"dataoadjooer\",\"queryTimeout\":\"datautqebpuoycawptxq\",\"additionalColumns\":\"dataufdxpwj\",\"sourceRetryCount\":\"datacuk\",\"sourceRetryWait\":\"datacuvww\",\"maxConcurrentConnections\":\"datajjcaaoce\",\"disableMetricsCollection\":\"datawwilyxpq\",\"\":{\"zwybgaycjphoz\":\"dataifhjymqwjliivyat\",\"uoqnktl\":\"datamcypd\"}}") .toObject(JiraSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - JiraSource model = new JiraSource().withSourceRetryCount("databddrtngdc").withSourceRetryWait("datajzgzaeuu") - .withMaxConcurrentConnections("datavheqzl").withDisableMetricsCollection("datavaskrgoodfhpyue") - .withQueryTimeout("datahdb").withAdditionalColumns("databmsbetzufkvx").withQuery("dataqgpldrn"); + JiraSource model = new JiraSource().withSourceRetryCount("datacuk") + .withSourceRetryWait("datacuvww") + .withMaxConcurrentConnections("datajjcaaoce") + .withDisableMetricsCollection("datawwilyxpq") + .withQueryTimeout("datautqebpuoycawptxq") + .withAdditionalColumns("dataufdxpwj") + .withQuery("dataoadjooer"); model = BinaryData.fromObject(model).toObject(JiraSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTests.java index fa65e15d85796..a83570233bfd3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTests.java @@ -21,35 +21,38 @@ public final class JsonDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { JsonDataset model = BinaryData.fromString( - "{\"type\":\"Json\",\"typeProperties\":{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datafbwih\",\"fileName\":\"datanx\",\"\":{\"pfzsclefyrl\":\"dataynuqqkotauratnic\",\"ggnbbuypwovvvsfl\":\"datatndqlmf\",\"xrlzhpziha\":\"dataevnoqayrehjuqwva\",\"fonfdbgm\":\"dataenqqzlxnqzu\"}},\"encodingName\":\"datawmjcwtewfhxw\",\"compression\":{\"type\":\"datakbrehzlrynjp\",\"level\":\"dataamkaepl\",\"\":{\"ieikmwlaoklfnis\":\"dataubowuywevtj\"}}},\"description\":\"gucbmt\",\"structure\":\"datadscnns\",\"schema\":\"datacyyuvtzrxzhclec\",\"linkedServiceName\":{\"referenceName\":\"wtzqzcloyhy\",\"parameters\":{\"yresgzsdtmwbyorj\":\"dataidhz\"}},\"parameters\":{\"fbqvumkxqj\":{\"type\":\"String\",\"defaultValue\":\"dataychakvy\"},\"t\":{\"type\":\"SecureString\",\"defaultValue\":\"dataepmaxfnzlpqmp\"}},\"annotations\":[\"datavulb\",\"datamrtuxyp\"],\"folder\":{\"name\":\"caeo\"},\"\":{\"hqmomfeco\":\"dataqdywjflobh\"}}") + "{\"type\":\"xcn\",\"typeProperties\":{\"location\":{\"type\":\"bf\",\"folderPath\":\"datalo\",\"fileName\":\"dataekd\",\"\":{\"jmnsvujnjk\":\"datauftrs\",\"se\":\"datavolefcj\",\"otfbjampqoclann\":\"datakdb\",\"zstrktgvpatrg\":\"dataxynlsuqb\"}},\"encodingName\":\"dataktfinfhoksmmculw\",\"compression\":{\"type\":\"dataicruo\",\"level\":\"datarjflsga\",\"\":{\"mcdsgxcelujisw\":\"datatqpqsdoctpzpujzf\",\"nxhfwlfxzfwu\":\"datalu\",\"psjdmng\":\"dataeupcknecexkgrv\",\"pdz\":\"datayt\"}}},\"description\":\"woxcgzbejqfb\",\"structure\":\"dataopfjx\",\"schema\":\"datadrpazqjkrfmbw\",\"linkedServiceName\":{\"referenceName\":\"tfcuuugtj\",\"parameters\":{\"mecjjkmqenhaidzr\":\"datagayiawohfmhnn\",\"lo\":\"datavs\",\"pijpkhc\":\"dataovslvivqsuvwtenb\",\"xukuicjuftekio\":\"dataoa\"}},\"parameters\":{\"zubfjzabbwz\":{\"type\":\"Bool\",\"defaultValue\":\"dataewfhvpxjh\"}},\"annotations\":[\"datauaixcdckix\",\"dataps\",\"dataigavk\",\"datavyxzer\"],\"folder\":{\"name\":\"kpzjbyetjxryopt\"},\"\":{\"bpemnrrabovr\":\"datatwhlbecgih\",\"pskpeswyhhmif\":\"datawxywpjhspboxhif\",\"y\":\"datauajxwwvcmmpeg\"}}") .toObject(JsonDataset.class); - Assertions.assertEquals("gucbmt", model.description()); - Assertions.assertEquals("wtzqzcloyhy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("fbqvumkxqj").type()); - Assertions.assertEquals("caeo", model.folder().name()); + Assertions.assertEquals("woxcgzbejqfb", model.description()); + Assertions.assertEquals("tfcuuugtj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zubfjzabbwz").type()); + Assertions.assertEquals("kpzjbyetjxryopt", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - JsonDataset model - = new JsonDataset().withDescription("gucbmt").withStructure("datadscnns").withSchema("datacyyuvtzrxzhclec") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("wtzqzcloyhy") - .withParameters(mapOf("yresgzsdtmwbyorj", "dataidhz"))) - .withParameters(mapOf("fbqvumkxqj", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataychakvy"), "t", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("dataepmaxfnzlpqmp"))) - .withAnnotations(Arrays.asList("datavulb", "datamrtuxyp")) - .withFolder(new DatasetFolder().withName("caeo")) - .withLocation(new DatasetLocation().withFolderPath("datafbwih").withFileName("datanx") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withEncodingName("datawmjcwtewfhxw").withCompression(new DatasetCompression() - .withType("datakbrehzlrynjp").withLevel("dataamkaepl").withAdditionalProperties(mapOf())); + JsonDataset model = new JsonDataset().withDescription("woxcgzbejqfb") + .withStructure("dataopfjx") + .withSchema("datadrpazqjkrfmbw") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("tfcuuugtj") + .withParameters(mapOf("mecjjkmqenhaidzr", "datagayiawohfmhnn", "lo", "datavs", "pijpkhc", + "dataovslvivqsuvwtenb", "xukuicjuftekio", "dataoa"))) + .withParameters(mapOf("zubfjzabbwz", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataewfhvpxjh"))) + .withAnnotations(Arrays.asList("datauaixcdckix", "dataps", "dataigavk", "datavyxzer")) + .withFolder(new DatasetFolder().withName("kpzjbyetjxryopt")) + .withLocation(new DatasetLocation().withFolderPath("datalo") + .withFileName("dataekd") + .withAdditionalProperties(mapOf("type", "bf"))) + .withEncodingName("dataktfinfhoksmmculw") + .withCompression(new DatasetCompression().withType("dataicruo") + .withLevel("datarjflsga") + .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(JsonDataset.class); - Assertions.assertEquals("gucbmt", model.description()); - Assertions.assertEquals("wtzqzcloyhy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("fbqvumkxqj").type()); - Assertions.assertEquals("caeo", model.folder().name()); + Assertions.assertEquals("woxcgzbejqfb", model.description()); + Assertions.assertEquals("tfcuuugtj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zubfjzabbwz").type()); + Assertions.assertEquals("kpzjbyetjxryopt", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTypePropertiesTests.java index f61e99f5285e2..2a524898fbd21 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTypePropertiesTests.java @@ -15,17 +15,20 @@ public final class JsonDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { JsonDatasetTypeProperties model = BinaryData.fromString( - "{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datafr\",\"fileName\":\"datagbmxldjmz\",\"\":{\"lurb\":\"datajesyl\",\"uqs\":\"datafygpnyhgd\",\"qyvoupr\":\"dataa\"}},\"encodingName\":\"datatqzsslhmgwfnivr\",\"compression\":{\"type\":\"datafduiolhgyqvpb\",\"level\":\"datapoqzucfz\",\"\":{\"mvhvz\":\"datazdquurbo\",\"ujywzcqyggmn\":\"dataelbprn\",\"drrp\":\"datasvhbngqiwyejto\"}}}") + "{\"location\":{\"type\":\"vcmlaexbzbquf\",\"folderPath\":\"dataezs\",\"fileName\":\"dataaymldrorhyogzms\",\"\":{\"m\":\"datahtcuuwdhtqqhyhn\",\"wnwngh\":\"datatnsugisno\",\"cjixxf\":\"datajovkeyym\"}},\"encodingName\":\"datapcrtn\",\"compression\":{\"type\":\"datauefxxijtebdveywe\",\"level\":\"datarhlolmcnwepfg\",\"\":{\"qdljnpe\":\"databv\",\"n\":\"datal\",\"dxljzvdovbrble\":\"datadetawevxehue\",\"c\":\"datalprdaqccddcbnygd\"}}}") .toObject(JsonDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { JsonDatasetTypeProperties model = new JsonDatasetTypeProperties() - .withLocation(new DatasetLocation().withFolderPath("datafr").withFileName("datagbmxldjmz") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withEncodingName("datatqzsslhmgwfnivr").withCompression(new DatasetCompression() - .withType("datafduiolhgyqvpb").withLevel("datapoqzucfz").withAdditionalProperties(mapOf())); + .withLocation(new DatasetLocation().withFolderPath("dataezs") + .withFileName("dataaymldrorhyogzms") + .withAdditionalProperties(mapOf("type", "vcmlaexbzbquf"))) + .withEncodingName("datapcrtn") + .withCompression(new DatasetCompression().withType("datauefxxijtebdveywe") + .withLevel("datarhlolmcnwepfg") + .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(JsonDatasetTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonFormatTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonFormatTests.java index 48bfdf3d98248..27c00478a93a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonFormatTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonFormatTests.java @@ -11,15 +11,19 @@ public final class JsonFormatTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { JsonFormat model = BinaryData.fromString( - "{\"type\":\"JsonFormat\",\"filePattern\":\"datasmuffiwjb\",\"nestingSeparator\":\"datavbp\",\"encodingName\":\"datajqjtotd\",\"jsonNodeReference\":\"dataosc\",\"jsonPathDefinition\":\"datahwuusiecktybh\",\"serializer\":\"dataxidhhxomil\",\"deserializer\":\"dataxj\",\"\":{\"qrslaate\":\"datagxwjwil\",\"gqwlxrhg\":\"datatwujjzgx\",\"ykbkkteozejogmk\":\"datavhvtq\",\"h\":\"datarvvmvmcofn\"}}") + "{\"type\":\"jrnm\",\"filePattern\":\"datadxmdses\",\"nestingSeparator\":\"dataujbjppp\",\"encodingName\":\"datalpdib\",\"jsonNodeReference\":\"datath\",\"jsonPathDefinition\":\"dataat\",\"serializer\":\"datazqpl\",\"deserializer\":\"datakihonik\",\"\":{\"gkensckhbmcarmo\":\"datazfffjilzfbpnt\"}}") .toObject(JsonFormat.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - JsonFormat model = new JsonFormat().withSerializer("dataxidhhxomil").withDeserializer("dataxj") - .withFilePattern("datasmuffiwjb").withNestingSeparator("datavbp").withEncodingName("datajqjtotd") - .withJsonNodeReference("dataosc").withJsonPathDefinition("datahwuusiecktybh"); + JsonFormat model = new JsonFormat().withSerializer("datazqpl") + .withDeserializer("datakihonik") + .withFilePattern("datadxmdses") + .withNestingSeparator("dataujbjppp") + .withEncodingName("datalpdib") + .withJsonNodeReference("datath") + .withJsonPathDefinition("dataat"); model = BinaryData.fromObject(model).toObject(JsonFormat.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonReadSettingsTests.java index 5f9c827f7b988..56d897e4056d3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonReadSettingsTests.java @@ -14,14 +14,14 @@ public final class JsonReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { JsonReadSettings model = BinaryData.fromString( - "{\"type\":\"JsonReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"klowuth\":\"datagzhhfnaqclep\"}},\"\":{\"fn\":\"datahnmllbljehwhxxu\",\"ah\":\"dataa\",\"aovubfl\":\"datakvnapxhtqwsdd\"}}") + "{\"type\":\"qoy\",\"compressionProperties\":{\"type\":\"dwocufcsh\",\"\":{\"x\":\"datalc\",\"k\":\"dataofwqdro\",\"vgjm\":\"dataegilbkzctqbvntl\"}},\"\":{\"iklnt\":\"dataslepdb\",\"b\":\"datapfnumpyy\"}}") .toObject(JsonReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { JsonReadSettings model = new JsonReadSettings().withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings"))); + new CompressionReadSettings().withAdditionalProperties(mapOf("type", "dwocufcsh"))); model = BinaryData.fromObject(model).toObject(JsonReadSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSinkTests.java index 9ccb0878cb969..efc78eaaa6b88 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSinkTests.java @@ -17,21 +17,25 @@ public final class JsonSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { JsonSink model = BinaryData.fromString( - "{\"type\":\"JsonSink\",\"storeSettings\":{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"datalnt\",\"disableMetricsCollection\":\"datah\",\"copyBehavior\":\"datatvxghbehhe\",\"metadata\":[{\"name\":\"dataorrvwlc\",\"value\":\"datajlyugzl\"},{\"name\":\"datajirj\",\"value\":\"datarsyfdsgrtke\"}],\"\":{\"gevjman\":\"dataru\",\"vtzdtwxfjlpkoc\":\"datarvvjoklb\",\"uvwlfzjrjgla\":\"dataexfmqfuflu\"}},\"formatSettings\":{\"type\":\"JsonWriteSettings\",\"filePattern\":\"datagzscgslwujk\",\"\":{\"vatujphqv\":\"datapmlrjnnbmodsytq\",\"wpiqkkmpfnw\":\"dataxvvogwghxo\",\"hnrgmg\":\"datarmzwmtsmeaciyp\",\"lsnch\":\"datavcusvidkzbdb\"}},\"writeBatchSize\":\"datarfomlh\",\"writeBatchTimeout\":\"dataiktecs\",\"sinkRetryCount\":\"datacqweydaa\",\"sinkRetryWait\":\"datattmfcx\",\"maxConcurrentConnections\":\"datawfsqjxxbs\",\"disableMetricsCollection\":\"dataqiwl\",\"\":{\"zpr\":\"datatsyjzdasgk\",\"lbddlnzmff\":\"dataqomuzohnpkof\",\"junmgd\":\"datavowlammvazvwzien\"}}") + "{\"type\":\"rljmjzyadafec\",\"storeSettings\":{\"type\":\"svpmw\",\"maxConcurrentConnections\":\"dataoqypvplmyzebv\",\"disableMetricsCollection\":\"datahoydehbvbex\",\"copyBehavior\":\"dataynnladdhdklwzz\",\"metadata\":[{\"name\":\"databosacrnpscfkef\",\"value\":\"datatxe\"},{\"name\":\"datamimgjuvjvtgece\",\"value\":\"datannle\"}],\"\":{\"kctdnnqok\":\"dataukfj\",\"uywijnlpeczq\":\"dataeuzslnyj\",\"vphr\":\"datanmzkqydthfcwycm\"}},\"formatSettings\":{\"type\":\"hjdhlskeifw\",\"filePattern\":\"datae\",\"\":{\"xiwpptvbudbnujv\":\"dataow\",\"fzxkqsleokbam\":\"datallyjelnhmuzhxk\",\"gccg\":\"datahn\",\"jahlqoxwqlnxv\":\"datalepamvlbaxda\"}},\"writeBatchSize\":\"dataufldzjcp\",\"writeBatchTimeout\":\"datajbzp\",\"sinkRetryCount\":\"datafejg\",\"sinkRetryWait\":\"datafvboxvwtln\",\"maxConcurrentConnections\":\"datashtujaqpkupnr\",\"disableMetricsCollection\":\"datajeypdk\",\"\":{\"sy\":\"datax\",\"rnihgothy\":\"databdrq\",\"ovsvjxnsor\":\"databwge\"}}") .toObject(JsonSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - JsonSink model = new JsonSink().withWriteBatchSize("datarfomlh").withWriteBatchTimeout("dataiktecs") - .withSinkRetryCount("datacqweydaa").withSinkRetryWait("datattmfcx") - .withMaxConcurrentConnections("datawfsqjxxbs").withDisableMetricsCollection("dataqiwl") - .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("datalnt") - .withDisableMetricsCollection("datah").withCopyBehavior("datatvxghbehhe") - .withMetadata(Arrays.asList(new MetadataItem().withName("dataorrvwlc").withValue("datajlyugzl"), - new MetadataItem().withName("datajirj").withValue("datarsyfdsgrtke"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings"))) - .withFormatSettings(new JsonWriteSettings().withFilePattern("datagzscgslwujk")); + JsonSink model = new JsonSink().withWriteBatchSize("dataufldzjcp") + .withWriteBatchTimeout("datajbzp") + .withSinkRetryCount("datafejg") + .withSinkRetryWait("datafvboxvwtln") + .withMaxConcurrentConnections("datashtujaqpkupnr") + .withDisableMetricsCollection("datajeypdk") + .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("dataoqypvplmyzebv") + .withDisableMetricsCollection("datahoydehbvbex") + .withCopyBehavior("dataynnladdhdklwzz") + .withMetadata(Arrays.asList(new MetadataItem().withName("databosacrnpscfkef").withValue("datatxe"), + new MetadataItem().withName("datamimgjuvjvtgece").withValue("datannle"))) + .withAdditionalProperties(mapOf("type", "svpmw"))) + .withFormatSettings(new JsonWriteSettings().withFilePattern("datae")); model = BinaryData.fromObject(model).toObject(JsonSink.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSourceTests.java index e76c1316f3071..1bee5b0acf57c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSourceTests.java @@ -16,19 +16,22 @@ public final class JsonSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { JsonSource model = BinaryData.fromString( - "{\"type\":\"JsonSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"databdsgq\",\"disableMetricsCollection\":\"datacid\",\"\":{\"jnhpyylekubiwvjv\":\"datazzhdjbyfdfu\"}},\"formatSettings\":{\"type\":\"JsonReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"xpwjvfisloq\":\"dataqb\",\"pjbdqmnki\":\"datattkbzwgj\",\"xjttnurkmerqza\":\"datajqsshu\"}},\"\":{\"vqvvtjwdlduvimg\":\"dataom\",\"jvbkjtg\":\"dataceormxoxtapaf\"}},\"additionalColumns\":\"datacptavcipydnuj\",\"sourceRetryCount\":\"datalskizprvpuacajx\",\"sourceRetryWait\":\"datagx\",\"maxConcurrentConnections\":\"dataxpzsl\",\"disableMetricsCollection\":\"datarypz\",\"\":{\"zgszjhekbmd\":\"datachqzkfges\",\"ojsrhgpitye\":\"datach\",\"f\":\"datavwysbme\"}}") + "{\"type\":\"yglnfwjslwve\",\"storeSettings\":{\"type\":\"wehjybboqyxi\",\"maxConcurrentConnections\":\"datadvdgemy\",\"disableMetricsCollection\":\"dataddzjtxlvgslmgl\",\"\":{\"ekkae\":\"datayvagakfqpaexllt\",\"wjkraleglpynsbl\":\"datagrhrdnsgvsrtqlt\"}},\"formatSettings\":{\"type\":\"qzbiukzmfyfv\",\"compressionProperties\":{\"type\":\"ltvkylwjopq\",\"\":{\"sscismrnneklfi\":\"datakrjolbaegha\",\"clxtfuo\":\"datanys\"}},\"\":{\"olseyxpgkml\":\"dataofaiwlnfvexiuuqa\"}},\"additionalColumns\":\"dataluaywgcjqnfa\",\"sourceRetryCount\":\"datalucpmqwkfgm\",\"sourceRetryWait\":\"datavekstzqzhd\",\"maxConcurrentConnections\":\"datacajfersxnxlkcw\",\"disableMetricsCollection\":\"dataejssksgxykdepqcy\",\"\":{\"wq\":\"datawsxpzkmotgmdyg\",\"xlycelfeqg\":\"datavbiuntptpmw\",\"ycnk\":\"datawrauuruldt\",\"c\":\"datarmiecfmqcxm\"}}") .toObject(JsonSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - JsonSource model = new JsonSource().withSourceRetryCount("datalskizprvpuacajx").withSourceRetryWait("datagx") - .withMaxConcurrentConnections("dataxpzsl").withDisableMetricsCollection("datarypz") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("databdsgq") - .withDisableMetricsCollection("datacid").withAdditionalProperties(mapOf("type", "StoreReadSettings"))) + JsonSource model = new JsonSource().withSourceRetryCount("datalucpmqwkfgm") + .withSourceRetryWait("datavekstzqzhd") + .withMaxConcurrentConnections("datacajfersxnxlkcw") + .withDisableMetricsCollection("dataejssksgxykdepqcy") + .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datadvdgemy") + .withDisableMetricsCollection("dataddzjtxlvgslmgl") + .withAdditionalProperties(mapOf("type", "wehjybboqyxi"))) .withFormatSettings(new JsonReadSettings().withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings")))) - .withAdditionalColumns("datacptavcipydnuj"); + new CompressionReadSettings().withAdditionalProperties(mapOf("type", "ltvkylwjopq")))) + .withAdditionalColumns("dataluaywgcjqnfa"); model = BinaryData.fromObject(model).toObject(JsonSource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonWriteSettingsTests.java index 76cf6ddbf4812..2eff982e66d48 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonWriteSettingsTests.java @@ -10,14 +10,14 @@ public final class JsonWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - JsonWriteSettings model = BinaryData - .fromString("{\"type\":\"JsonWriteSettings\",\"filePattern\":\"dataorzozf\",\"\":{\"bnefab\":\"dataw\"}}") + JsonWriteSettings model = BinaryData.fromString( + "{\"type\":\"omnqcthgqy\",\"filePattern\":\"databygqcwz\",\"\":{\"gixsjhinpyek\":\"dataazfkykkcqafnv\"}}") .toObject(JsonWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - JsonWriteSettings model = new JsonWriteSettings().withFilePattern("dataorzozf"); + JsonWriteSettings model = new JsonWriteSettings().withFilePattern("databygqcwz"); model = BinaryData.fromObject(model).toObject(JsonWriteSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseLocationTests.java index a4335f603138a..74c2bf836c759 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseLocationTests.java @@ -11,14 +11,14 @@ public final class LakeHouseLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LakeHouseLocation model = BinaryData.fromString( - "{\"type\":\"LakeHouseLocation\",\"folderPath\":\"datadivznl\",\"fileName\":\"dataslkskhjqjpvbai\",\"\":{\"ywbqgroigbsfsgs\":\"datatgzgta\",\"fmhl\":\"dataenwl\",\"tryldsxebuhsxr\":\"dataqlxspmrj\"}}") + "{\"type\":\"lznfhkqytkztado\",\"folderPath\":\"datafzdgjfcycrsvloy\",\"fileName\":\"dataigqkzjuqwqa\",\"\":{\"pnfdbgsosciene\":\"datazxpixhyo\"}}") .toObject(LakeHouseLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { LakeHouseLocation model - = new LakeHouseLocation().withFolderPath("datadivznl").withFileName("dataslkskhjqjpvbai"); + = new LakeHouseLocation().withFolderPath("datafzdgjfcycrsvloy").withFileName("dataigqkzjuqwqa"); model = BinaryData.fromObject(model).toObject(LakeHouseLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseReadSettingsTests.java index c5b73a70c5ae5..dc5d259e82e9b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseReadSettingsTests.java @@ -11,18 +11,23 @@ public final class LakeHouseReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LakeHouseReadSettings model = BinaryData.fromString( - "{\"type\":\"LakeHouseReadSettings\",\"recursive\":\"datagrosxfdxrc\",\"wildcardFolderPath\":\"datambbhluvdce\",\"wildcardFileName\":\"dataevnom\",\"fileListPath\":\"datareiwdskie\",\"enablePartitionDiscovery\":\"dataaenalepta\",\"partitionRootPath\":\"dataqo\",\"deleteFilesAfterCompletion\":\"dataaodbhgxbadbo\",\"modifiedDatetimeStart\":\"datakmihggv\",\"modifiedDatetimeEnd\":\"dataqwyxbatr\",\"maxConcurrentConnections\":\"dataynlslgxif\",\"disableMetricsCollection\":\"datasclqwk\",\"\":{\"gshaqfu\":\"dataepuvambzf\"}}") + "{\"type\":\"lezkyfykmnrea\",\"recursive\":\"datafprzlvhohzkcsjd\",\"wildcardFolderPath\":\"dataposmnmkypeqm\",\"wildcardFileName\":\"dataeox\",\"fileListPath\":\"datarulavxeaym\",\"enablePartitionDiscovery\":\"datachwpfunptsry\",\"partitionRootPath\":\"dataa\",\"deleteFilesAfterCompletion\":\"databwbxvsytbx\",\"modifiedDatetimeStart\":\"dataubaddlmjquliovrc\",\"modifiedDatetimeEnd\":\"datathluzey\",\"maxConcurrentConnections\":\"datawepqegtytby\",\"disableMetricsCollection\":\"dataeufutfq\",\"\":{\"zwdswikyewvskzw\":\"datavnjgjryksh\",\"emeft\":\"datazwsguipq\",\"drel\":\"datagjrfkqf\",\"glua\":\"datakxk\"}}") .toObject(LakeHouseReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LakeHouseReadSettings model = new LakeHouseReadSettings().withMaxConcurrentConnections("dataynlslgxif") - .withDisableMetricsCollection("datasclqwk").withRecursive("datagrosxfdxrc") - .withWildcardFolderPath("datambbhluvdce").withWildcardFileName("dataevnom") - .withFileListPath("datareiwdskie").withEnablePartitionDiscovery("dataaenalepta") - .withPartitionRootPath("dataqo").withDeleteFilesAfterCompletion("dataaodbhgxbadbo") - .withModifiedDatetimeStart("datakmihggv").withModifiedDatetimeEnd("dataqwyxbatr"); + LakeHouseReadSettings model = new LakeHouseReadSettings().withMaxConcurrentConnections("datawepqegtytby") + .withDisableMetricsCollection("dataeufutfq") + .withRecursive("datafprzlvhohzkcsjd") + .withWildcardFolderPath("dataposmnmkypeqm") + .withWildcardFileName("dataeox") + .withFileListPath("datarulavxeaym") + .withEnablePartitionDiscovery("datachwpfunptsry") + .withPartitionRootPath("dataa") + .withDeleteFilesAfterCompletion("databwbxvsytbx") + .withModifiedDatetimeStart("dataubaddlmjquliovrc") + .withModifiedDatetimeEnd("datathluzey"); model = BinaryData.fromObject(model).toObject(LakeHouseReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTests.java index 413a69a0ffc99..561ee7470840b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTests.java @@ -19,31 +19,37 @@ public final class LakeHouseTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LakeHouseTableDataset model = BinaryData.fromString( - "{\"type\":\"LakeHouseTable\",\"typeProperties\":{\"table\":\"dataeogavfyihuz\"},\"description\":\"pwnyfjcypazwiimd\",\"structure\":\"datagkooagr\",\"schema\":\"datapamesi\",\"linkedServiceName\":{\"referenceName\":\"wqadewhuwxkyx\",\"parameters\":{\"mrnaifllxccpr\":\"dataf\"}},\"parameters\":{\"quzihir\":{\"type\":\"Bool\",\"defaultValue\":\"datazwhomydxgtuqbv\"},\"t\":{\"type\":\"SecureString\",\"defaultValue\":\"dataketydgaqo\"},\"aqgbb\":{\"type\":\"Int\",\"defaultValue\":\"datavqxerxrmhr\"}},\"annotations\":[\"dataihylrxsiyzsyiu\",\"datatitlqy\",\"datafvernnk\",\"datapkayqivbig\"],\"folder\":{\"name\":\"gzetboyztgnmu\"},\"\":{\"tgwhzbbdwrjenc\":\"datawpcfmgr\"}}") + "{\"type\":\"wvxcai\",\"typeProperties\":{\"table\":\"datazvvxdvphx\"},\"description\":\"icbogsfo\",\"structure\":\"datamamiyfgunruk\",\"schema\":\"datayaalbk\",\"linkedServiceName\":{\"referenceName\":\"bzqa\",\"parameters\":{\"kifqbxmnnidot\":\"datarkvit\",\"odttq\":\"datapumzuh\",\"umvuwjyulqfpqql\":\"datapva\"}},\"parameters\":{\"dblapqraczvtniwf\":{\"type\":\"Float\",\"defaultValue\":\"datahjgmr\"},\"ibxkcegc\":{\"type\":\"Object\",\"defaultValue\":\"datawhxm\"},\"iawpjfkrarerdl\":{\"type\":\"Array\",\"defaultValue\":\"dataxkxtcxbbjbeyqoh\"},\"suameg\":{\"type\":\"String\",\"defaultValue\":\"datatpxowgowwdocj\"}},\"annotations\":[\"dataiszhexu\",\"datafav\",\"dataesloblit\"],\"folder\":{\"name\":\"s\"},\"\":{\"trztogujg\":\"dataosanjso\"}}") .toObject(LakeHouseTableDataset.class); - Assertions.assertEquals("pwnyfjcypazwiimd", model.description()); - Assertions.assertEquals("wqadewhuwxkyx", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("quzihir").type()); - Assertions.assertEquals("gzetboyztgnmu", model.folder().name()); + Assertions.assertEquals("icbogsfo", model.description()); + Assertions.assertEquals("bzqa", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("dblapqraczvtniwf").type()); + Assertions.assertEquals("s", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LakeHouseTableDataset model = new LakeHouseTableDataset().withDescription("pwnyfjcypazwiimd") - .withStructure("datagkooagr").withSchema("datapamesi") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("wqadewhuwxkyx") - .withParameters(mapOf("mrnaifllxccpr", "dataf"))) - .withParameters(mapOf("quzihir", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datazwhomydxgtuqbv"), "t", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataketydgaqo"), - "aqgbb", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datavqxerxrmhr"))) - .withAnnotations(Arrays.asList("dataihylrxsiyzsyiu", "datatitlqy", "datafvernnk", "datapkayqivbig")) - .withFolder(new DatasetFolder().withName("gzetboyztgnmu")).withTable("dataeogavfyihuz"); + LakeHouseTableDataset model = new LakeHouseTableDataset().withDescription("icbogsfo") + .withStructure("datamamiyfgunruk") + .withSchema("datayaalbk") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bzqa") + .withParameters( + mapOf("kifqbxmnnidot", "datarkvit", "odttq", "datapumzuh", "umvuwjyulqfpqql", "datapva"))) + .withParameters(mapOf("dblapqraczvtniwf", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datahjgmr"), "ibxkcegc", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datawhxm"), + "iawpjfkrarerdl", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataxkxtcxbbjbeyqoh"), + "suameg", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datatpxowgowwdocj"))) + .withAnnotations(Arrays.asList("dataiszhexu", "datafav", "dataesloblit")) + .withFolder(new DatasetFolder().withName("s")) + .withTable("datazvvxdvphx"); model = BinaryData.fromObject(model).toObject(LakeHouseTableDataset.class); - Assertions.assertEquals("pwnyfjcypazwiimd", model.description()); - Assertions.assertEquals("wqadewhuwxkyx", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("quzihir").type()); - Assertions.assertEquals("gzetboyztgnmu", model.folder().name()); + Assertions.assertEquals("icbogsfo", model.description()); + Assertions.assertEquals("bzqa", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("dblapqraczvtniwf").type()); + Assertions.assertEquals("s", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTypePropertiesTests.java index 5603914f1e90d..4dd177e5648d9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTypePropertiesTests.java @@ -10,14 +10,14 @@ public final class LakeHouseTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - LakeHouseTableDatasetTypeProperties model = BinaryData.fromString("{\"table\":\"dataeiiewibdtpl\"}") + LakeHouseTableDatasetTypeProperties model = BinaryData.fromString("{\"table\":\"dataclxhwkzfggs\"}") .toObject(LakeHouseTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { LakeHouseTableDatasetTypeProperties model - = new LakeHouseTableDatasetTypeProperties().withTable("dataeiiewibdtpl"); + = new LakeHouseTableDatasetTypeProperties().withTable("dataclxhwkzfggs"); model = BinaryData.fromObject(model).toObject(LakeHouseTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSinkTests.java index f979fefbed3ae..9766130afe2ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSinkTests.java @@ -11,17 +11,21 @@ public final class LakeHouseTableSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LakeHouseTableSink model = BinaryData.fromString( - "{\"type\":\"LakeHouseTableSink\",\"tableActionOption\":\"datanpry\",\"partitionOption\":\"dataujqyeyzoivi\",\"partitionNameList\":\"datanihmwvhc\",\"writeBatchSize\":\"datamua\",\"writeBatchTimeout\":\"datatd\",\"sinkRetryCount\":\"datai\",\"sinkRetryWait\":\"datazytdj\",\"maxConcurrentConnections\":\"dataun\",\"disableMetricsCollection\":\"dataadyrhmpokfxcb\",\"\":{\"lnlwgseouh\":\"dataxgajyrac\",\"zru\":\"dataetxupxeain\",\"zfaja\":\"datageaenkfsxtsmzva\",\"ujcqz\":\"databswwbrllvva\"}}") + "{\"type\":\"czccm\",\"tableActionOption\":\"datahepnmeg\",\"partitionOption\":\"datacpoyda\",\"partitionNameList\":\"dataxmbx\",\"writeBatchSize\":\"dataig\",\"writeBatchTimeout\":\"datapldaoiidxknsqdru\",\"sinkRetryCount\":\"dataanpjvqrwl\",\"sinkRetryWait\":\"dataeuyxxrwovgwqzzu\",\"maxConcurrentConnections\":\"databwqrot\",\"disableMetricsCollection\":\"dataytrzsqbckqgtemin\",\"\":{\"cjzkgy\":\"datavy\",\"eskindgmk\":\"datavi\",\"liflxrnsyvmu\":\"databuvyuzzwph\"}}") .toObject(LakeHouseTableSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LakeHouseTableSink model - = new LakeHouseTableSink().withWriteBatchSize("datamua").withWriteBatchTimeout("datatd") - .withSinkRetryCount("datai").withSinkRetryWait("datazytdj").withMaxConcurrentConnections("dataun") - .withDisableMetricsCollection("dataadyrhmpokfxcb").withTableActionOption("datanpry") - .withPartitionOption("dataujqyeyzoivi").withPartitionNameList("datanihmwvhc"); + LakeHouseTableSink model = new LakeHouseTableSink().withWriteBatchSize("dataig") + .withWriteBatchTimeout("datapldaoiidxknsqdru") + .withSinkRetryCount("dataanpjvqrwl") + .withSinkRetryWait("dataeuyxxrwovgwqzzu") + .withMaxConcurrentConnections("databwqrot") + .withDisableMetricsCollection("dataytrzsqbckqgtemin") + .withTableActionOption("datahepnmeg") + .withPartitionOption("datacpoyda") + .withPartitionNameList("dataxmbx"); model = BinaryData.fromObject(model).toObject(LakeHouseTableSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSourceTests.java index dd06a58c5a20e..4a18f75d6a316 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSourceTests.java @@ -11,16 +11,19 @@ public final class LakeHouseTableSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LakeHouseTableSource model = BinaryData.fromString( - "{\"type\":\"LakeHouseTableSource\",\"timestampAsOf\":\"datayeyqsiniejjb\",\"versionAsOf\":\"datav\",\"additionalColumns\":\"datakwrvtlbb\",\"sourceRetryCount\":\"databdtmrijt\",\"sourceRetryWait\":\"dataf\",\"maxConcurrentConnections\":\"databpvizuuluilgmova\",\"disableMetricsCollection\":\"datat\",\"\":{\"cvaa\":\"datajxgqsb\",\"lrmrtdznvjgovy\":\"datauvbzcqgtzx\",\"rkntfwxkeu\":\"datapppswleptta\"}}") + "{\"type\":\"euphg\",\"timestampAsOf\":\"databtgm\",\"versionAsOf\":\"datadredcvwsbsdyxq\",\"additionalColumns\":\"datarqouyfcfd\",\"sourceRetryCount\":\"dataaanubjeboelk\",\"sourceRetryWait\":\"datahso\",\"maxConcurrentConnections\":\"datav\",\"disableMetricsCollection\":\"datajdhw\",\"\":{\"vlkpwavd\":\"datajhhpfjz\"}}") .toObject(LakeHouseTableSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LakeHouseTableSource model - = new LakeHouseTableSource().withSourceRetryCount("databdtmrijt").withSourceRetryWait("dataf") - .withMaxConcurrentConnections("databpvizuuluilgmova").withDisableMetricsCollection("datat") - .withTimestampAsOf("datayeyqsiniejjb").withVersionAsOf("datav").withAdditionalColumns("datakwrvtlbb"); + LakeHouseTableSource model = new LakeHouseTableSource().withSourceRetryCount("dataaanubjeboelk") + .withSourceRetryWait("datahso") + .withMaxConcurrentConnections("datav") + .withDisableMetricsCollection("datajdhw") + .withTimestampAsOf("databtgm") + .withVersionAsOf("datadredcvwsbsdyxq") + .withAdditionalColumns("datarqouyfcfd"); model = BinaryData.fromObject(model).toObject(LakeHouseTableSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseWriteSettingsTests.java index 46ca780f36ea7..5711d67453b61 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseWriteSettingsTests.java @@ -13,17 +13,19 @@ public final class LakeHouseWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LakeHouseWriteSettings model = BinaryData.fromString( - "{\"type\":\"LakeHouseWriteSettings\",\"maxConcurrentConnections\":\"dataxnjorvpcoxdlppu\",\"disableMetricsCollection\":\"datan\",\"copyBehavior\":\"datapclnm\",\"metadata\":[{\"name\":\"datahbefivozrdzrikw\",\"value\":\"datacvvrkxpbjgozoel\"},{\"name\":\"dataerpbct\",\"value\":\"datarvns\"},{\"name\":\"dataacbrywqqeztlf\",\"value\":\"datalgxrsn\"}],\"\":{\"a\":\"dataoo\",\"xngmebvni\":\"datahvsfgywkinkhv\",\"xkcttp\":\"datajhdkvkqjjouh\"}}") + "{\"type\":\"kwwdkkvdevdvkey\",\"maxConcurrentConnections\":\"datajchd\",\"disableMetricsCollection\":\"dataxeiluexvmlg\",\"copyBehavior\":\"datapsqpfxjwta\",\"metadata\":[{\"name\":\"datagu\",\"value\":\"datadyxrjjdjiki\"},{\"name\":\"datazubgddk\",\"value\":\"datavq\"},{\"name\":\"datauq\",\"value\":\"datalwnxryyqtjcrpax\"},{\"name\":\"datalfxse\",\"value\":\"datad\"}],\"\":{\"zhdciuxotb\":\"dataldmxfqftywbba\"}}") .toObject(LakeHouseWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LakeHouseWriteSettings model = new LakeHouseWriteSettings().withMaxConcurrentConnections("dataxnjorvpcoxdlppu") - .withDisableMetricsCollection("datan").withCopyBehavior("datapclnm").withMetadata( - Arrays.asList(new MetadataItem().withName("datahbefivozrdzrikw").withValue("datacvvrkxpbjgozoel"), - new MetadataItem().withName("dataerpbct").withValue("datarvns"), - new MetadataItem().withName("dataacbrywqqeztlf").withValue("datalgxrsn"))); + LakeHouseWriteSettings model = new LakeHouseWriteSettings().withMaxConcurrentConnections("datajchd") + .withDisableMetricsCollection("dataxeiluexvmlg") + .withCopyBehavior("datapsqpfxjwta") + .withMetadata(Arrays.asList(new MetadataItem().withName("datagu").withValue("datadyxrjjdjiki"), + new MetadataItem().withName("datazubgddk").withValue("datavq"), + new MetadataItem().withName("datauq").withValue("datalwnxryyqtjcrpax"), + new MetadataItem().withName("datalfxse").withValue("datad"))); model = BinaryData.fromObject(model).toObject(LakeHouseWriteSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeRequestTests.java index 920515582c333..2f52b720e57fc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeRequestTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeRequestTests.java @@ -12,15 +12,14 @@ public final class LinkedIntegrationRuntimeRequestTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LinkedIntegrationRuntimeRequest model - = BinaryData.fromString("{\"factoryName\":\"nrjawgqwg\"}").toObject(LinkedIntegrationRuntimeRequest.class); - Assertions.assertEquals("nrjawgqwg", model.linkedFactoryName()); + = BinaryData.fromString("{\"factoryName\":\"urqhaka\"}").toObject(LinkedIntegrationRuntimeRequest.class); + Assertions.assertEquals("urqhaka", model.linkedFactoryName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LinkedIntegrationRuntimeRequest model - = new LinkedIntegrationRuntimeRequest().withLinkedFactoryName("nrjawgqwg"); + LinkedIntegrationRuntimeRequest model = new LinkedIntegrationRuntimeRequest().withLinkedFactoryName("urqhaka"); model = BinaryData.fromObject(model).toObject(LinkedIntegrationRuntimeRequest.class); - Assertions.assertEquals("nrjawgqwg", model.linkedFactoryName()); + Assertions.assertEquals("urqhaka", model.linkedFactoryName()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeTests.java index 17f0790c55832..81501b477c47f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeTests.java @@ -11,7 +11,7 @@ public final class LinkedIntegrationRuntimeTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LinkedIntegrationRuntime model = BinaryData.fromString( - "{\"name\":\"ajzdj\",\"subscriptionId\":\"brwrfrmhoufokrbg\",\"dataFactoryName\":\"jcksirs\",\"dataFactoryLocation\":\"unnik\",\"createTime\":\"2021-05-07T10:45:42Z\"}") + "{\"name\":\"ms\",\"subscriptionId\":\"wsmdoowfrgd\",\"dataFactoryName\":\"gbhtpomiustkqywa\",\"dataFactoryLocation\":\"lgrrsk\",\"createTime\":\"2021-11-10T11:26:07Z\"}") .toObject(LinkedIntegrationRuntime.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceDebugResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceDebugResourceTests.java index 55996a810d61f..4256a31310108 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceDebugResourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceDebugResourceTests.java @@ -19,36 +19,32 @@ public final class LinkedServiceDebugResourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LinkedServiceDebugResource model = BinaryData.fromString( - "{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"ffm\",\"parameters\":{\"dby\":\"datawfbkgozxwo\",\"zqaclna\":\"datap\"}},\"description\":\"biygnugjknfsmfct\",\"parameters\":{\"jhvsujztczyt\":{\"type\":\"Float\",\"defaultValue\":\"datayilflqoiquvrehmr\"},\"auunfprnjletlx\":{\"type\":\"Bool\",\"defaultValue\":\"dataw\"},\"nlqwzdvpiwhx\":{\"type\":\"Object\",\"defaultValue\":\"datapddouifamowaziyn\"},\"quhuxylrj\":{\"type\":\"SecureString\",\"defaultValue\":\"datadtmaa\"}},\"annotations\":[\"dataygjbmzyospspsh\"],\"\":{\"df\":\"datakyjpmspbps\",\"vczkcnyxrxmunjd\":\"datapyogtieyuj\",\"nkvxlxpaglqi\":\"datavg\",\"khpzvuqdflv\":\"databgkc\"}},\"name\":\"iypfp\"}") + "{\"properties\":{\"type\":\"xmqeqalh\",\"connectVia\":{\"referenceName\":\"nhg\",\"parameters\":{\"ta\":\"datayynfsvkhgbv\",\"jcpeogkhnmg\":\"dataarfdlpukhpyrnei\",\"xddbhfhpfpaz\":\"dataro\"}},\"description\":\"oywjxhpdulont\",\"parameters\":{\"ehtuevrhr\":{\"type\":\"Bool\",\"defaultValue\":\"dataw\"}},\"annotations\":[\"dataoogwxhnsduugwb\",\"datareur\"],\"\":{\"lnvnafvvkyfedev\":\"datafuarenlvhht\",\"lcqxypokk\":\"databo\",\"zng\":\"dataminqcym\"}},\"name\":\"dxxewuninv\"}") .toObject(LinkedServiceDebugResource.class); - Assertions.assertEquals("iypfp", model.name()); - Assertions.assertEquals("ffm", model.properties().connectVia().referenceName()); - Assertions.assertEquals("biygnugjknfsmfct", model.properties().description()); - Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("jhvsujztczyt").type()); + Assertions.assertEquals("dxxewuninv", model.name()); + Assertions.assertEquals("nhg", model.properties().connectVia().referenceName()); + Assertions.assertEquals("oywjxhpdulont", model.properties().description()); + Assertions.assertEquals(ParameterType.BOOL, model.properties().parameters().get("ehtuevrhr").type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LinkedServiceDebugResource model = new LinkedServiceDebugResource().withName("iypfp") - .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("ffm") - .withParameters(mapOf("dby", "datawfbkgozxwo", "zqaclna", "datap"))) - .withDescription("biygnugjknfsmfct") - .withParameters(mapOf("jhvsujztczyt", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datayilflqoiquvrehmr"), - "auunfprnjletlx", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataw"), "nlqwzdvpiwhx", - new ParameterSpecification().withType(ParameterType.OBJECT) - .withDefaultValue("datapddouifamowaziyn"), - "quhuxylrj", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datadtmaa"))) - .withAnnotations(Arrays.asList("dataygjbmzyospspsh")) - .withAdditionalProperties(mapOf("type", "LinkedService"))); + LinkedServiceDebugResource model + = new LinkedServiceDebugResource().withName("dxxewuninv") + .withProperties(new LinkedService() + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("nhg") + .withParameters(mapOf("ta", "datayynfsvkhgbv", "jcpeogkhnmg", "dataarfdlpukhpyrnei", + "xddbhfhpfpaz", "dataro"))) + .withDescription("oywjxhpdulont") + .withParameters(mapOf("ehtuevrhr", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataw"))) + .withAnnotations(Arrays.asList("dataoogwxhnsduugwb", "datareur")) + .withAdditionalProperties(mapOf("type", "xmqeqalh"))); model = BinaryData.fromObject(model).toObject(LinkedServiceDebugResource.class); - Assertions.assertEquals("iypfp", model.name()); - Assertions.assertEquals("ffm", model.properties().connectVia().referenceName()); - Assertions.assertEquals("biygnugjknfsmfct", model.properties().description()); - Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("jhvsujztczyt").type()); + Assertions.assertEquals("dxxewuninv", model.name()); + Assertions.assertEquals("nhg", model.properties().connectVia().referenceName()); + Assertions.assertEquals("oywjxhpdulont", model.properties().description()); + Assertions.assertEquals(ParameterType.BOOL, model.properties().parameters().get("ehtuevrhr").type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceListResponseTests.java index b788d8a9e0d90..753d616e64c1b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceListResponseTests.java @@ -20,62 +20,42 @@ public final class LinkedServiceListResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LinkedServiceListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"o\",\"parameters\":{\"ggdtpnapnyiro\":\"datanxknalaulp\",\"ylgqgitxmedjvcsl\":\"datauhpigvp\",\"wwncwzzhxgk\":\"datan\",\"t\":\"datarmgucnap\"}},\"description\":\"ellwptfdy\",\"parameters\":{\"opppcqeq\":{\"type\":\"Object\",\"defaultValue\":\"datauaceopzfqrhhu\"},\"ahzxctobgbk\":{\"type\":\"String\",\"defaultValue\":\"dataz\"},\"grcfb\":{\"type\":\"String\",\"defaultValue\":\"dataizpost\"}},\"annotations\":[\"datamfqjhhkxbp\",\"datajy\",\"datajhxxjyn\",\"datau\"],\"\":{\"szjfauvjfdxxivet\":\"datakrtswbxqz\"}},\"name\":\"cqaqtdoqmcbx\",\"type\":\"vxysl\",\"etag\":\"hsfxoblytkb\",\"id\":\"pe\"},{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"wfbkrvrns\",\"parameters\":{\"ohxcrsbfova\":\"dataq\",\"sub\":\"datarruvwbhsq\",\"rxbpyb\":\"datagjb\",\"twss\":\"datarfbjf\"}},\"description\":\"ftpvjzbexil\",\"parameters\":{\"vwpm\":{\"type\":\"SecureString\",\"defaultValue\":\"dataq\"},\"jhwqytjrybnw\":{\"type\":\"String\",\"defaultValue\":\"dataruoujmk\"}},\"annotations\":[\"datagdrjervnaenqpe\",\"dataindoygmifthnzd\",\"datadslgnayqigynduh\",\"datavhqlkthumaqo\"],\"\":{\"gccymvaolpssl\":\"dataycduier\",\"d\":\"datalfmmdnbbglzpswi\"}},\"name\":\"wyhzdx\",\"type\":\"adbzmnvdfznud\",\"etag\":\"dvxzbncblylpst\",\"id\":\"hh\"}],\"nextLink\":\"rzdzucerscdnt\"}") + "{\"value\":[{\"properties\":{\"type\":\"gpw\",\"connectVia\":{\"referenceName\":\"uh\",\"parameters\":{\"uusdttouwa\":\"databsjyofdx\"}},\"description\":\"ekqvkeln\",\"parameters\":{\"sxyawjoyaqcs\":{\"type\":\"SecureString\",\"defaultValue\":\"datawyjsflhhcaalnjix\"},\"zyexzn\":{\"type\":\"Float\",\"defaultValue\":\"datapkii\"},\"xknalaulppg\":{\"type\":\"Array\",\"defaultValue\":\"dataxhnrztfolhb\"}},\"annotations\":[\"datapnapnyiropuh\",\"dataigvpgylg\",\"datagit\",\"datamedjvcslynqwwncw\"],\"\":{\"rmgucnap\":\"dataxgk\"}},\"name\":\"eoellwptfdygp\",\"type\":\"b\",\"etag\":\"ceopzfqrhhuaopp\",\"id\":\"qeqxo\"}],\"nextLink\":\"dahzxctobg\"}") .toObject(LinkedServiceListResponse.class); - Assertions.assertEquals("pe", model.value().get(0).id()); - Assertions.assertEquals("o", model.value().get(0).properties().connectVia().referenceName()); - Assertions.assertEquals("ellwptfdy", model.value().get(0).properties().description()); - Assertions.assertEquals(ParameterType.OBJECT, - model.value().get(0).properties().parameters().get("opppcqeq").type()); - Assertions.assertEquals("rzdzucerscdnt", model.nextLink()); + Assertions.assertEquals("qeqxo", model.value().get(0).id()); + Assertions.assertEquals("uh", model.value().get(0).properties().connectVia().referenceName()); + Assertions.assertEquals("ekqvkeln", model.value().get(0).properties().description()); + Assertions.assertEquals(ParameterType.SECURE_STRING, + model.value().get(0).properties().parameters().get("sxyawjoyaqcs").type()); + Assertions.assertEquals("dahzxctobg", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { LinkedServiceListResponse model - = new LinkedServiceListResponse() - .withValue( - Arrays.asList(new LinkedServiceResourceInner().withId("pe") - .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("o") - .withParameters(mapOf("ggdtpnapnyiro", "datanxknalaulp", "ylgqgitxmedjvcsl", - "datauhpigvp", "wwncwzzhxgk", "datan", "t", "datarmgucnap"))) - .withDescription("ellwptfdy") - .withParameters(mapOf("opppcqeq", - new ParameterSpecification().withType(ParameterType.OBJECT) - .withDefaultValue("datauaceopzfqrhhu"), - "ahzxctobgbk", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataz"), - "grcfb", - new ParameterSpecification().withType(ParameterType.STRING) - .withDefaultValue("dataizpost"))) - .withAnnotations(Arrays.asList("datamfqjhhkxbp", "datajy", "datajhxxjyn", "datau")) - .withAdditionalProperties(mapOf("type", "LinkedService"))), - new LinkedServiceResourceInner() - .withId( - "hh") - .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("wfbkrvrns") - .withParameters(mapOf("ohxcrsbfova", "dataq", "sub", "datarruvwbhsq", "rxbpyb", - "datagjb", "twss", "datarfbjf"))) - .withDescription("ftpvjzbexil") - .withParameters(mapOf("vwpm", - new ParameterSpecification() - .withType(ParameterType.SECURE_STRING).withDefaultValue("dataq"), - "jhwqytjrybnw", - new ParameterSpecification().withType(ParameterType.STRING) - .withDefaultValue("dataruoujmk"))) - .withAnnotations(Arrays.asList("datagdrjervnaenqpe", "dataindoygmifthnzd", - "datadslgnayqigynduh", "datavhqlkthumaqo")) - .withAdditionalProperties(mapOf("type", "LinkedService"))))) - .withNextLink("rzdzucerscdnt"); + = new LinkedServiceListResponse().withValue(Arrays.asList(new LinkedServiceResourceInner().withId("qeqxo") + .withProperties(new LinkedService() + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("uh") + .withParameters(mapOf("uusdttouwa", "databsjyofdx"))) + .withDescription("ekqvkeln") + .withParameters(mapOf("sxyawjoyaqcs", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("datawyjsflhhcaalnjix"), + "zyexzn", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datapkii"), + "xknalaulppg", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataxhnrztfolhb"))) + .withAnnotations( + Arrays.asList("datapnapnyiropuh", "dataigvpgylg", "datagit", "datamedjvcslynqwwncw")) + .withAdditionalProperties(mapOf("type", "gpw"))))) + .withNextLink("dahzxctobg"); model = BinaryData.fromObject(model).toObject(LinkedServiceListResponse.class); - Assertions.assertEquals("pe", model.value().get(0).id()); - Assertions.assertEquals("o", model.value().get(0).properties().connectVia().referenceName()); - Assertions.assertEquals("ellwptfdy", model.value().get(0).properties().description()); - Assertions.assertEquals(ParameterType.OBJECT, - model.value().get(0).properties().parameters().get("opppcqeq").type()); - Assertions.assertEquals("rzdzucerscdnt", model.nextLink()); + Assertions.assertEquals("qeqxo", model.value().get(0).id()); + Assertions.assertEquals("uh", model.value().get(0).properties().connectVia().referenceName()); + Assertions.assertEquals("ekqvkeln", model.value().get(0).properties().description()); + Assertions.assertEquals(ParameterType.SECURE_STRING, + model.value().get(0).properties().parameters().get("sxyawjoyaqcs").type()); + Assertions.assertEquals("dahzxctobg", model.nextLink()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceReferenceTests.java index 61a0ca3e00a52..d66b5b8dd1fc3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceReferenceTests.java @@ -13,19 +13,18 @@ public final class LinkedServiceReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - LinkedServiceReference model = BinaryData.fromString( - "{\"referenceName\":\"niodkooeb\",\"parameters\":{\"vdkcrodtj\":\"datajhemms\",\"lfoakg\":\"datanfwjlfltkacjvefk\",\"pulpqblylsyxk\":\"datakfpagao\",\"zuempsbzkf\":\"datajnsjervtiagxsd\"}}") - .toObject(LinkedServiceReference.class); - Assertions.assertEquals("niodkooeb", model.referenceName()); + LinkedServiceReference model + = BinaryData.fromString("{\"referenceName\":\"fz\",\"parameters\":{\"xzfe\":\"dataxxbuyq\"}}") + .toObject(LinkedServiceReference.class); + Assertions.assertEquals("fz", model.referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LinkedServiceReference model = new LinkedServiceReference().withReferenceName("niodkooeb") - .withParameters(mapOf("vdkcrodtj", "datajhemms", "lfoakg", "datanfwjlfltkacjvefk", "pulpqblylsyxk", - "datakfpagao", "zuempsbzkf", "datajnsjervtiagxsd")); + LinkedServiceReference model + = new LinkedServiceReference().withReferenceName("fz").withParameters(mapOf("xzfe", "dataxxbuyq")); model = BinaryData.fromObject(model).toObject(LinkedServiceReference.class); - Assertions.assertEquals("niodkooeb", model.referenceName()); + Assertions.assertEquals("fz", model.referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceResourceInnerTests.java index ca3329a55c298..97c3ec9053d89 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceResourceInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceResourceInnerTests.java @@ -19,32 +19,31 @@ public final class LinkedServiceResourceInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LinkedServiceResourceInner model = BinaryData.fromString( - "{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"vfiwjmygtdss\",\"parameters\":{\"emwabnet\":\"datatmweriofzpyq\",\"d\":\"datahhszh\"}},\"description\":\"vwiwubmwmbesld\",\"parameters\":{\"flcxoga\":{\"type\":\"Float\",\"defaultValue\":\"datapp\"},\"qzeqqkdltfzxm\":{\"type\":\"SecureString\",\"defaultValue\":\"datanzmnsikvm\"}},\"annotations\":[\"datahgure\"],\"\":{\"xwak\":\"datawobdagxtibqdx\",\"lbpodxunk\":\"dataogqxndlkzgxhuri\",\"lrb\":\"dataebxmubyynt\"}},\"name\":\"koievseo\",\"type\":\"q\",\"etag\":\"ltmuwlauwzizx\",\"id\":\"pgcjefuzmuvp\"}") + "{\"properties\":{\"type\":\"dmoizpostmg\",\"connectVia\":{\"referenceName\":\"fbunrmfqjhhk\",\"parameters\":{\"vkr\":\"datavjymjhxxjyngud\",\"szjfauvjfdxxivet\":\"dataswbxqz\",\"qaqtdoqmcbxvwvxy\":\"datat\",\"obl\":\"datalqbhsf\"}},\"description\":\"k\",\"parameters\":{\"rn\":{\"type\":\"Float\",\"defaultValue\":\"datawwwfbkr\"}},\"annotations\":[\"datahqjohxcrsbfova\",\"datarruvwbhsq\",\"datasub\"],\"\":{\"pybsrfbjfdtw\":\"databirx\"}},\"name\":\"otftpvjzbexilz\",\"type\":\"fqqnvwpmqtaruo\",\"etag\":\"mkcjhwqytjrybn\",\"id\":\"ewgdrjervn\"}") .toObject(LinkedServiceResourceInner.class); - Assertions.assertEquals("pgcjefuzmuvp", model.id()); - Assertions.assertEquals("vfiwjmygtdss", model.properties().connectVia().referenceName()); - Assertions.assertEquals("vwiwubmwmbesld", model.properties().description()); - Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("flcxoga").type()); + Assertions.assertEquals("ewgdrjervn", model.id()); + Assertions.assertEquals("fbunrmfqjhhk", model.properties().connectVia().referenceName()); + Assertions.assertEquals("k", model.properties().description()); + Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("rn").type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LinkedServiceResourceInner model = new LinkedServiceResourceInner().withId("pgcjefuzmuvp") + LinkedServiceResourceInner model = new LinkedServiceResourceInner().withId("ewgdrjervn") .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("vfiwjmygtdss") - .withParameters(mapOf("emwabnet", "datatmweriofzpyq", "d", "datahhszh"))) - .withDescription("vwiwubmwmbesld") - .withParameters(mapOf("flcxoga", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datapp"), - "qzeqqkdltfzxm", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datanzmnsikvm"))) - .withAnnotations(Arrays.asList("datahgure")).withAdditionalProperties(mapOf("type", "LinkedService"))); + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("fbunrmfqjhhk") + .withParameters(mapOf("vkr", "datavjymjhxxjyngud", "szjfauvjfdxxivet", "dataswbxqz", + "qaqtdoqmcbxvwvxy", "datat", "obl", "datalqbhsf"))) + .withDescription("k") + .withParameters(mapOf("rn", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datawwwfbkr"))) + .withAnnotations(Arrays.asList("datahqjohxcrsbfova", "datarruvwbhsq", "datasub")) + .withAdditionalProperties(mapOf("type", "dmoizpostmg"))); model = BinaryData.fromObject(model).toObject(LinkedServiceResourceInner.class); - Assertions.assertEquals("pgcjefuzmuvp", model.id()); - Assertions.assertEquals("vfiwjmygtdss", model.properties().connectVia().referenceName()); - Assertions.assertEquals("vwiwubmwmbesld", model.properties().description()); - Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("flcxoga").type()); + Assertions.assertEquals("ewgdrjervn", model.id()); + Assertions.assertEquals("fbunrmfqjhhk", model.properties().connectVia().referenceName()); + Assertions.assertEquals("k", model.properties().description()); + Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("rn").type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceTests.java index 8afb043c6e131..42e3b77d1c0ec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceTests.java @@ -18,31 +18,35 @@ public final class LinkedServiceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LinkedService model = BinaryData.fromString( - "{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"tdum\",\"parameters\":{\"hjpglkf\":\"datapxebmnzbt\"}},\"description\":\"hdneuelfph\",\"parameters\":{\"uvxzxclvi\":{\"type\":\"Array\",\"defaultValue\":\"dataozfikdowwq\"},\"dsjnka\":{\"type\":\"String\",\"defaultValue\":\"dataqzonosggbhcohf\"},\"k\":{\"type\":\"String\",\"defaultValue\":\"datatiiswacffg\"},\"ppfufl\":{\"type\":\"Bool\",\"defaultValue\":\"datawkfvhqcrailvp\"}},\"annotations\":[\"datamh\",\"datalxyjr\",\"datasag\"],\"\":{\"bcvkcvqvpkeq\":\"datanihgwqapnedg\",\"obzdopcjwvnhdl\":\"datacvdrhvoodsot\",\"mutwuoe\":\"datawmgxcxrsl\",\"yqsluic\":\"datarpkhjwn\"}}") + "{\"type\":\"nqpeh\",\"connectVia\":{\"referenceName\":\"doy\",\"parameters\":{\"sl\":\"datafthnzdn\",\"ynduha\":\"datanayqi\"}},\"description\":\"qlkth\",\"parameters\":{\"cdui\":{\"type\":\"Array\",\"defaultValue\":\"datalbg\"},\"lfmmdnbbglzpswi\":{\"type\":\"Float\",\"defaultValue\":\"datagccymvaolpssl\"},\"bzmnvdfznud\":{\"type\":\"SecureString\",\"defaultValue\":\"datacwyhzdxssa\"},\"ylpstdbhhxsrzdz\":{\"type\":\"SecureString\",\"defaultValue\":\"datavxzbncb\"}},\"annotations\":[\"datarsc\",\"datantnev\",\"dataiwjmygtdssls\"],\"\":{\"abnetshh\":\"dataweriofzpyqsem\",\"bmwmbesldnkw\":\"datazhedplvwiw\",\"gaokonzmnsikv\":\"datatppjflcx\"}}") .toObject(LinkedService.class); - Assertions.assertEquals("tdum", model.connectVia().referenceName()); - Assertions.assertEquals("hdneuelfph", model.description()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("uvxzxclvi").type()); + Assertions.assertEquals("doy", model.connectVia().referenceName()); + Assertions.assertEquals("qlkth", model.description()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("cdui").type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LinkedService model = new LinkedService() - .withConnectVia(new IntegrationRuntimeReference() - .withReferenceName("tdum").withParameters(mapOf("hjpglkf", "datapxebmnzbt"))) - .withDescription("hdneuelfph") - .withParameters(mapOf("uvxzxclvi", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataozfikdowwq"), "dsjnka", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataqzonosggbhcohf"), "k", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datatiiswacffg"), - "ppfufl", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datawkfvhqcrailvp"))) - .withAnnotations(Arrays.asList("datamh", "datalxyjr", "datasag")) - .withAdditionalProperties(mapOf("type", "LinkedService")); + LinkedService model + = new LinkedService() + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("doy") + .withParameters(mapOf("sl", "datafthnzdn", "ynduha", "datanayqi"))) + .withDescription("qlkth") + .withParameters(mapOf("cdui", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datalbg"), + "lfmmdnbbglzpswi", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datagccymvaolpssl"), + "bzmnvdfznud", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("datacwyhzdxssa"), + "ylpstdbhhxsrzdz", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datavxzbncb"))) + .withAnnotations(Arrays.asList("datarsc", "datantnev", "dataiwjmygtdssls")) + .withAdditionalProperties(mapOf("type", "nqpeh")); model = BinaryData.fromObject(model).toObject(LinkedService.class); - Assertions.assertEquals("tdum", model.connectVia().referenceName()); - Assertions.assertEquals("hdneuelfph", model.description()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("uvxzxclvi").type()); + Assertions.assertEquals("doy", model.connectVia().referenceName()); + Assertions.assertEquals("qlkth", model.description()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("cdui").type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesCreateOrUpdateWithResponseMockTests.java index 6cda44a355638..62b09c0e75904 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesCreateOrUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesCreateOrUpdateWithResponseMockTests.java @@ -6,18 +6,15 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; import com.azure.resourcemanager.datafactory.models.LinkedService; import com.azure.resourcemanager.datafactory.models.LinkedServiceResource; import com.azure.resourcemanager.datafactory.models.ParameterSpecification; import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.Arrays; @@ -25,54 +22,40 @@ import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class LinkedServicesCreateOrUpdateWithResponseMockTests { @Test public void testCreateOrUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"kwqavxljaybgxxm\",\"parameters\":{\"bfjhvkttusyx\":\"datagxhrtansjboiyqi\",\"tjjmtkwg\":\"datayvfwyoqjttrivi\"}},\"description\":\"fjv\",\"parameters\":{\"s\":{\"type\":\"Object\",\"defaultValue\":\"datayoesxnmvslhnc\"},\"qhatwxq\":{\"type\":\"SecureString\",\"defaultValue\":\"dataglaxvn\"},\"huudtiecnpka\":{\"type\":\"Float\",\"defaultValue\":\"databirzjhaicyuplm\"},\"c\":{\"type\":\"Array\",\"defaultValue\":\"dataqjtoeaugwosrywp\"}},\"annotations\":[\"datania\"],\"\":{\"xrrjudgnphgsdq\":\"datazdecgiomdcolwq\",\"gnl\":\"datawfmvpsvwwtncvnoq\",\"obfnbdpaoijxqgf\":\"dataicovvd\"}},\"name\":\"trvvhxjfkpu\",\"type\":\"sjayrlk\",\"etag\":\"nircmodwslhzla\",\"id\":\"qgavcwxwkjambfs\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"type\":\"pmjtfymvlvosbc\",\"connectVia\":{\"referenceName\":\"bbrkuss\",\"parameters\":{\"aspglc\":\"datasbucmth\"}},\"description\":\"raer\",\"parameters\":{\"yox\":{\"type\":\"Float\",\"defaultValue\":\"datayhwdogchdqtlbnkr\"},\"sr\":{\"type\":\"String\",\"defaultValue\":\"datae\"},\"g\":{\"type\":\"Bool\",\"defaultValue\":\"datazs\"}},\"annotations\":[\"datasbp\"],\"\":{\"tbn\":\"datamgwmmwjugaqy\",\"qfiigslbriawkn\":\"datayspbghnnxkouvsmm\",\"gerqtcxk\":\"datacdfcyeyulmxonobo\",\"z\":\"datai\"}},\"name\":\"crucz\",\"type\":\"juxijmawsamdfwex\",\"etag\":\"bdglsll\",\"id\":\"armlbqekvj\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - LinkedServiceResource response - = manager.linkedServices().define("sqtrtc").withExistingFactory("yhodtugrw", "wxfkgzgveudmidt") - .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("domzfwh") - .withParameters(mapOf("zt", "dataox", "icznotggy", "dataa", "udmiutzuriqlksba", - "datanssghafzdzdf", "lqheqzd", "datayxtiqzjrxh"))) - .withDescription("a") - .withParameters(mapOf("kgrxhpx", - new ParameterSpecification().withType(ParameterType.STRING) - .withDefaultValue("datagymdywjzqmbeipxd"))) - .withAnnotations(Arrays.asList("datauair", "dataxquoweamnxzdu")) - .withAdditionalProperties(mapOf("type", "LinkedService"))) - .withIfMatch("qqak").create(); + LinkedServiceResource response = manager.linkedServices() + .define("g") + .withExistingFactory("colf", "ftz") + .withProperties(new LinkedService() + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("ivt") + .withParameters(mapOf("rqiijkvop", "datauzyyniv", "odmnytdborujflt", "dataamtxvnelw", "n", + "dataxwfwlfqbgugvr", "khdxqaccdzb", "dataeiwk"))) + .withDescription("ot") + .withParameters(mapOf("jzv", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datasxgaojwulatq"))) + .withAnnotations(Arrays.asList("dataslifcwj", "datayfdjzefkzcajav", "datamokaqnu")) + .withAdditionalProperties(mapOf("type", "fnl"))) + .withIfMatch("pz") + .create(); - Assertions.assertEquals("qgavcwxwkjambfs", response.id()); - Assertions.assertEquals("kwqavxljaybgxxm", response.properties().connectVia().referenceName()); - Assertions.assertEquals("fjv", response.properties().description()); - Assertions.assertEquals(ParameterType.OBJECT, response.properties().parameters().get("s").type()); + Assertions.assertEquals("armlbqekvj", response.id()); + Assertions.assertEquals("bbrkuss", response.properties().connectVia().referenceName()); + Assertions.assertEquals("raer", response.properties().description()); + Assertions.assertEquals(ParameterType.FLOAT, response.properties().parameters().get("yox").type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesDeleteWithResponseMockTests.java index 1454377934bcb..c9ba147037578 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesDeleteWithResponseMockTests.java @@ -6,47 +6,28 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class LinkedServicesDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.linkedServices().deleteWithResponse("pf", "ttcmwqrbtad", "dkbndkofrhuycn", - com.azure.core.util.Context.NONE); + manager.linkedServices().deleteWithResponse("udfikduw", "khma", "g", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesGetWithResponseMockTests.java index 5690065fde8ef..337c592a241c0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesGetWithResponseMockTests.java @@ -6,56 +6,39 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.LinkedServiceResource; import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class LinkedServicesGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"urfjppmiluik\",\"parameters\":{\"lhnkvipjinjik\":\"dataqkxjcqdnzhjlb\"}},\"description\":\"c\",\"parameters\":{\"r\":{\"type\":\"Bool\",\"defaultValue\":\"datajzxezo\"},\"xyc\":{\"type\":\"Array\",\"defaultValue\":\"datawthslztxixngwe\"},\"nlscfbwkhle\":{\"type\":\"Float\",\"defaultValue\":\"datafldfwqnbco\"}},\"annotations\":[\"databoprgxdcnbzpc\"],\"\":{\"rk\":\"dataumepzekmupdvnan\",\"mhsrwqpcxy\":\"datazlaomteqnt\",\"cgelipoequjkhum\":\"datajei\",\"m\":\"datarxxcbptvvwf\"}},\"name\":\"jhinmhccwmr\",\"type\":\"vlbc\",\"etag\":\"dgydbsr\",\"id\":\"fxoktokmsyohxm\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"type\":\"it\",\"connectVia\":{\"referenceName\":\"uzjlcmzghao\",\"parameters\":{\"hrlhtknmpj\":\"datap\",\"vubpjwwviesj\":\"datarcrxxkvuzpsoujc\"}},\"description\":\"izhicxlm\",\"parameters\":{\"qowrtniymaznmql\":{\"type\":\"SecureString\",\"defaultValue\":\"datahqetmpqcxrwtyg\"},\"nryjqboylm\":{\"type\":\"Object\",\"defaultValue\":\"datappagv\"},\"tbflechgiqxknjr\":{\"type\":\"String\",\"defaultValue\":\"datazvmftxkwicg\"},\"hfccbbhztqiay\":{\"type\":\"String\",\"defaultValue\":\"datanqidwsggjkzulih\"}},\"annotations\":[\"datalpdjtliil\"],\"\":{\"indcakansjrznyd\":\"dataaqoiif\",\"pwtkce\":\"dataslyd\"}},\"name\":\"yngupphvo\",\"type\":\"ocjsadbuvvpdj\",\"etag\":\"nndvvgs\",\"id\":\"vz\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); LinkedServiceResource response = manager.linkedServices() - .getWithResponse("faagpjslrf", "xlutfbhsenn", "cbxta", "xdkboyqescvcvut", com.azure.core.util.Context.NONE) + .getWithResponse("qwsec", "swoibqnuhr", "iwnb", "gesaolceb", com.azure.core.util.Context.NONE) .getValue(); - Assertions.assertEquals("fxoktokmsyohxm", response.id()); - Assertions.assertEquals("urfjppmiluik", response.properties().connectVia().referenceName()); - Assertions.assertEquals("c", response.properties().description()); - Assertions.assertEquals(ParameterType.BOOL, response.properties().parameters().get("r").type()); + Assertions.assertEquals("vz", response.id()); + Assertions.assertEquals("uzjlcmzghao", response.properties().connectVia().referenceName()); + Assertions.assertEquals("izhicxlm", response.properties().description()); + Assertions.assertEquals(ParameterType.SECURE_STRING, + response.properties().parameters().get("qowrtniymaznmql").type()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesListByFactoryMockTests.java index b1b6b06b84ef7..6a963cf884254 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesListByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesListByFactoryMockTests.java @@ -6,57 +6,40 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.LinkedServiceResource; import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class LinkedServicesListByFactoryMockTests { @Test public void testListByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"elcbmmrhogxex\",\"parameters\":{\"eeifo\":\"datax\",\"cnfjyggio\":\"datavzmqdnfon\",\"arx\":\"datagv\",\"mjygnixkpadjqjwl\":\"datar\"}},\"description\":\"eibucmfv\",\"parameters\":{\"wuzwydsvgonkomua\":{\"type\":\"Object\",\"defaultValue\":\"datasxnkmaezxldmz\"},\"qvul\":{\"type\":\"SecureString\",\"defaultValue\":\"datakwiytg\"},\"vxfyqsfy\":{\"type\":\"Array\",\"defaultValue\":\"datajdbcypv\"}},\"annotations\":[\"datahbfpzfvqlmzpc\",\"datax\",\"datacslmyrsojqpjba\",\"datafnxdi\"],\"\":{\"c\":\"dataulvmval\",\"fcexbtwic\":\"datahysphdhtcop\",\"e\":\"datahx\",\"kuemotgkyfh\":\"datagkvmmkwa\"}},\"name\":\"mwqkfsvzczisiqns\",\"type\":\"wjfuhq\",\"etag\":\"tdnufvzxosrstev\",\"id\":\"ssaubmdoji\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"properties\":{\"type\":\"qeywmqhnl\",\"connectVia\":{\"referenceName\":\"fqglvflxltngmulp\",\"parameters\":{\"ymybryvlhvrgjpmz\":\"dataorkzwuppvgqvfz\",\"xqwy\":\"databgitkowflc\",\"y\":\"datavuaiqqgay\"}},\"description\":\"jdvtlygwxilbaz\",\"parameters\":{\"qzwqlh\":{\"type\":\"Object\",\"defaultValue\":\"dataslbrowbfsly\"},\"wlernchdxpsonkk\":{\"type\":\"SecureString\",\"defaultValue\":\"datajzrifgubpno\"}},\"annotations\":[\"datamojz\",\"datangmkeunpb\",\"datamssnacg\",\"datawmqgya\"],\"\":{\"qyc\":\"dataxwxnnjvodqne\",\"g\":\"datanrctrpu\"}},\"name\":\"o\",\"type\":\"yvwxcerhhzjh\",\"etag\":\"ynszadgv\",\"id\":\"oxmlb\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PagedIterable response - = manager.linkedServices().listByFactory("wfujvgvrpear", "ohppupucybtr", com.azure.core.util.Context.NONE); + = manager.linkedServices().listByFactory("h", "sbx", com.azure.core.util.Context.NONE); - Assertions.assertEquals("ssaubmdoji", response.iterator().next().id()); - Assertions.assertEquals("elcbmmrhogxex", response.iterator().next().properties().connectVia().referenceName()); - Assertions.assertEquals("eibucmfv", response.iterator().next().properties().description()); + Assertions.assertEquals("oxmlb", response.iterator().next().id()); + Assertions.assertEquals("fqglvflxltngmulp", + response.iterator().next().properties().connectVia().referenceName()); + Assertions.assertEquals("jdvtlygwxilbaz", response.iterator().next().properties().description()); Assertions.assertEquals(ParameterType.OBJECT, - response.iterator().next().properties().parameters().get("wuzwydsvgonkomua").type()); + response.iterator().next().properties().parameters().get("qzwqlh").type()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogLocationSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogLocationSettingsTests.java index e2b8d44592023..8bccece3618ac 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogLocationSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogLocationSettingsTests.java @@ -15,19 +15,22 @@ public final class LogLocationSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LogLocationSettings model = BinaryData.fromString( - "{\"linkedServiceName\":{\"referenceName\":\"htwhh\",\"parameters\":{\"jkerdujfnb\":\"dataomfo\"}},\"path\":\"datamroadutogbkdcts\"}") + "{\"linkedServiceName\":{\"referenceName\":\"uftpv\",\"parameters\":{\"vmlltasu\":\"datazitc\",\"zeszv\":\"dataqsf\",\"vgmbir\":\"datagawbm\",\"rbqxisavktuxwzvl\":\"datav\"}},\"path\":\"databridagwuvcdymoqv\"}") .toObject(LogLocationSettings.class); - Assertions.assertEquals("htwhh", model.linkedServiceName().referenceName()); + Assertions.assertEquals("uftpv", model.linkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LogLocationSettings model = new LogLocationSettings() - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("htwhh").withParameters(mapOf("jkerdujfnb", "dataomfo"))) - .withPath("datamroadutogbkdcts"); + LogLocationSettings model + = new LogLocationSettings() + .withLinkedServiceName( + new LinkedServiceReference().withReferenceName("uftpv") + .withParameters(mapOf("vmlltasu", "datazitc", "zeszv", "dataqsf", "vgmbir", "datagawbm", + "rbqxisavktuxwzvl", "datav"))) + .withPath("databridagwuvcdymoqv"); model = BinaryData.fromObject(model).toObject(LogLocationSettings.class); - Assertions.assertEquals("htwhh", model.linkedServiceName().referenceName()); + Assertions.assertEquals("uftpv", model.linkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogSettingsTests.java index 357627226a119..9276428a596d1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogSettingsTests.java @@ -17,22 +17,22 @@ public final class LogSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LogSettings model = BinaryData.fromString( - "{\"enableCopyActivityLog\":\"datagzmmrzw\",\"copyActivityLogSettings\":{\"logLevel\":\"datatkcvolaxnuk\",\"enableReliableLogging\":\"datav\"},\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"u\",\"parameters\":{\"zgvaeqiygbo\":\"datacqoxyxiyhmjwsnwk\",\"odidgudarclajben\":\"datavz\",\"kff\":\"datayuufvoj\"}},\"path\":\"datawaewpils\"}}") + "{\"enableCopyActivityLog\":\"dataxdwecvkwwjj\",\"copyActivityLogSettings\":{\"logLevel\":\"dataunsduejx\",\"enableReliableLogging\":\"datawalekqedofuobxml\"},\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"i\",\"parameters\":{\"ib\":\"datahlpf\",\"gzy\":\"datalmihvzdaycmen\",\"lvgqlexwqwbbell\":\"datal\"}},\"path\":\"datadotpcvd\"}}") .toObject(LogSettings.class); - Assertions.assertEquals("u", model.logLocationSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("i", model.logLocationSettings().linkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LogSettings model = new LogSettings().withEnableCopyActivityLog("datagzmmrzw") - .withCopyActivityLogSettings( - new CopyActivityLogSettings().withLogLevel("datatkcvolaxnuk").withEnableReliableLogging("datav")) + LogSettings model = new LogSettings().withEnableCopyActivityLog("dataxdwecvkwwjj") + .withCopyActivityLogSettings(new CopyActivityLogSettings().withLogLevel("dataunsduejx") + .withEnableReliableLogging("datawalekqedofuobxml")) .withLogLocationSettings(new LogLocationSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("u").withParameters( - mapOf("zgvaeqiygbo", "datacqoxyxiyhmjwsnwk", "odidgudarclajben", "datavz", "kff", "datayuufvoj"))) - .withPath("datawaewpils")); + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("i") + .withParameters(mapOf("ib", "datahlpf", "gzy", "datalmihvzdaycmen", "lvgqlexwqwbbell", "datal"))) + .withPath("datadotpcvd")); model = BinaryData.fromObject(model).toObject(LogSettings.class); - Assertions.assertEquals("u", model.logLocationSettings().linkedServiceName().referenceName()); + Assertions.assertEquals("i", model.logLocationSettings().linkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogStorageSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogStorageSettingsTests.java index 5a405aac169b3..3d8ecdf052549 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogStorageSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogStorageSettingsTests.java @@ -15,21 +15,23 @@ public final class LogStorageSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LogStorageSettings model = BinaryData.fromString( - "{\"linkedServiceName\":{\"referenceName\":\"gdlgs\",\"parameters\":{\"stwcyigrhfevxy\":\"databoysquygokh\",\"rtcsucot\":\"dataqukcojyxhhvoo\",\"jnxzvjnmpvsblud\":\"datawyiq\"}},\"path\":\"datahzukrpfbhihd\",\"logLevel\":\"dataiuexyqyfkudmlqpw\",\"enableReliableLogging\":\"datadpwrm\",\"\":{\"k\":\"datais\",\"wsxnyockpcssus\":\"datasp\"}}") + "{\"linkedServiceName\":{\"referenceName\":\"xn\",\"parameters\":{\"vectooxjztt\":\"datajfvj\",\"mdyb\":\"datalsnmxvsrvkzvxlez\",\"ogtnfla\":\"dataehjk\"}},\"path\":\"datapghfvkqijmyqo\",\"logLevel\":\"datasfaoc\",\"enableReliableLogging\":\"datazrrgdpy\",\"\":{\"vlwhtfscoups\":\"datamru\",\"c\":\"dataeywbhxhawkwcf\"}}") .toObject(LogStorageSettings.class); - Assertions.assertEquals("gdlgs", model.linkedServiceName().referenceName()); + Assertions.assertEquals("xn", model.linkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { LogStorageSettings model = new LogStorageSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("gdlgs") - .withParameters(mapOf("stwcyigrhfevxy", "databoysquygokh", "rtcsucot", "dataqukcojyxhhvoo", - "jnxzvjnmpvsblud", "datawyiq"))) - .withPath("datahzukrpfbhihd").withLogLevel("dataiuexyqyfkudmlqpw").withEnableReliableLogging("datadpwrm") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xn") + .withParameters( + mapOf("vectooxjztt", "datajfvj", "mdyb", "datalsnmxvsrvkzvxlez", "ogtnfla", "dataehjk"))) + .withPath("datapghfvkqijmyqo") + .withLogLevel("datasfaoc") + .withEnableReliableLogging("datazrrgdpy") .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(LogStorageSettings.class); - Assertions.assertEquals("gdlgs", model.linkedServiceName().referenceName()); + Assertions.assertEquals("xn", model.linkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTests.java index 9abf2073843d2..b5f165ebcc832 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTests.java @@ -24,64 +24,70 @@ public final class LookupActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LookupActivity model = BinaryData.fromString( - "{\"type\":\"Lookup\",\"typeProperties\":{\"source\":{\"type\":\"CopySource\",\"sourceRetryCount\":\"datahxqpmzznmn\",\"sourceRetryWait\":\"dataswa\",\"maxConcurrentConnections\":\"databwewwdfeiehwmaxl\",\"disableMetricsCollection\":\"dataag\",\"\":{\"dowsj\":\"databeneqapll\"}},\"dataset\":{\"referenceName\":\"vpvtyullivcymnpb\",\"parameters\":{\"hcatp\":\"datal\",\"qnajmwpeaoeggi\":\"dataq\",\"ugru\":\"datalpglhlwu\"}},\"firstRowOnly\":\"dataprnbozvixamhi\"},\"linkedServiceName\":{\"referenceName\":\"rp\",\"parameters\":{\"s\":\"datakeeoc\",\"kaaozpc\":\"dataqzpkodbquvf\",\"kzgwf\":\"dataogwfqtqbnakmgydf\",\"kankjkszudx\":\"datageqzkpergzscr\"}},\"policy\":{\"timeout\":\"datavxvoqbruyma\",\"retry\":\"datanfofxirj\",\"retryIntervalInSeconds\":1351725857,\"secureInput\":true,\"secureOutput\":true,\"\":{\"vugb\":\"datastuhlwzcn\",\"puyrps\":\"datamnyfhkxcplhqzpw\"}},\"name\":\"ueacnfgt\",\"description\":\"jqfupoamc\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"rec\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Completed\"],\"\":{\"lhiqodxsscirgq\":\"dataynrsacdcfwuta\",\"yhfnjif\":\"datanfdehhk\",\"cxwjwsrdzmbz\":\"dataucojkikgbhkvhldn\"}},{\"activity\":\"fzydwexoyfseehvm\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Failed\",\"Completed\"],\"\":{\"badbbw\":\"datafwkhipaodo\",\"jxesmbuhkcshyhga\":\"dataaajgokpnbmhskh\",\"npftwgt\":\"datamtevifeoijep\",\"njpwkwxnmqmytv\":\"datacccyiuehsne\"}}],\"userProperties\":[{\"name\":\"utyfnmwmghaeedq\",\"value\":\"datakhc\"},{\"name\":\"wjn\",\"value\":\"datanlbsvtsjv\"},{\"name\":\"vzafpvwrbqbyxuup\",\"value\":\"datakbbemhwtmeqt\"},{\"name\":\"fsjpvjwbxlgpepx\",\"value\":\"datajjnxdg\"}],\"\":{\"pmm\":\"datakltlpbb\",\"tdzgngnuuz\":\"datahmvadasuevu\"}}") + "{\"type\":\"xgczwxyghsppmvxc\",\"typeProperties\":{\"source\":{\"type\":\"udranmdcfwaw\",\"sourceRetryCount\":\"datahfauubcv\",\"sourceRetryWait\":\"datafxwhicacmviclhom\",\"maxConcurrentConnections\":\"dataaxteg\",\"disableMetricsCollection\":\"dataf\",\"\":{\"jfzbavqmmk\":\"datan\"}},\"dataset\":{\"referenceName\":\"qdfjeuwwq\",\"parameters\":{\"qvywolccxdctkhe\":\"datazpskzplbzyj\",\"gemspn\":\"databosa\"}},\"firstRowOnly\":\"dataohhhexgxn\"},\"linkedServiceName\":{\"referenceName\":\"stzdxbspglcbhahx\",\"parameters\":{\"rhpihtxgjzifyh\":\"datau\",\"e\":\"datajgrbjmzagxjoimoz\",\"jgfihl\":\"datapscrfbdttcfwjzqu\",\"zb\":\"datalauo\"}},\"policy\":{\"timeout\":\"datafxvggk\",\"retry\":\"datahsny\",\"retryIntervalInSeconds\":2037856397,\"secureInput\":true,\"secureOutput\":true,\"\":{\"lzjki\":\"datatlfngonhmblkk\",\"h\":\"datamne\",\"qvcfzr\":\"dataynencaf\"}},\"name\":\"zpdq\",\"description\":\"chygbeofiwbtfkiu\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"xjrafhdf\",\"dependencyConditions\":[\"Completed\",\"Skipped\"],\"\":{\"gqhefeasm\":\"datawnqijphhuvflg\",\"pcxxpyrtajlyde\":\"datadguodoujpwqbotlv\"}},{\"activity\":\"qfvrqru\",\"dependencyConditions\":[\"Failed\"],\"\":{\"jzvceyxvfoyuyk\":\"datauxbqdwbjh\"}}],\"userProperties\":[{\"name\":\"gyhpuhccl\",\"value\":\"datahndbutptyabd\"},{\"name\":\"bkblopemorfzuhvy\",\"value\":\"datadnd\"}],\"\":{\"kplnd\":\"datazsfvri\"}}") .toObject(LookupActivity.class); - Assertions.assertEquals("ueacnfgt", model.name()); - Assertions.assertEquals("jqfupoamc", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("rec", model.dependsOn().get(0).activity()); + Assertions.assertEquals("zpdq", model.name()); + Assertions.assertEquals("chygbeofiwbtfkiu", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("xjrafhdf", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("utyfnmwmghaeedq", model.userProperties().get(0).name()); - Assertions.assertEquals("rp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1351725857, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("gyhpuhccl", model.userProperties().get(0).name()); + Assertions.assertEquals("stzdxbspglcbhahx", model.linkedServiceName().referenceName()); + Assertions.assertEquals(2037856397, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("vpvtyullivcymnpb", model.dataset().referenceName()); + Assertions.assertEquals("qdfjeuwwq", model.dataset().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - LookupActivity model = new LookupActivity().withName("ueacnfgt").withDescription("jqfupoamc") - .withState(ActivityState.INACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("rec") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("fzydwexoyfseehvm") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED, - DependencyCondition.FAILED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("utyfnmwmghaeedq").withValue("datakhc"), - new UserProperty().withName("wjn").withValue("datanlbsvtsjv"), - new UserProperty().withName("vzafpvwrbqbyxuup").withValue("datakbbemhwtmeqt"), - new UserProperty().withName("fsjpvjwbxlgpepx").withValue("datajjnxdg"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rp") - .withParameters(mapOf("s", "datakeeoc", "kaaozpc", "dataqzpkodbquvf", "kzgwf", "dataogwfqtqbnakmgydf", - "kankjkszudx", "datageqzkpergzscr"))) - .withPolicy(new ActivityPolicy().withTimeout("datavxvoqbruyma").withRetry("datanfofxirj") - .withRetryIntervalInSeconds(1351725857).withSecureInput(true).withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withSource(new CopySource().withSourceRetryCount("datahxqpmzznmn").withSourceRetryWait("dataswa") - .withMaxConcurrentConnections("databwewwdfeiehwmaxl").withDisableMetricsCollection("dataag") - .withAdditionalProperties(mapOf("type", "CopySource"))) - .withDataset(new DatasetReference().withReferenceName("vpvtyullivcymnpb") - .withParameters(mapOf("hcatp", "datal", "qnajmwpeaoeggi", "dataq", "ugru", "datalpglhlwu"))) - .withFirstRowOnly("dataprnbozvixamhi"); + LookupActivity model + = new LookupActivity().withName("zpdq") + .withDescription("chygbeofiwbtfkiu") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("xjrafhdf") + .withDependencyConditions( + Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("qfvrqru") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("gyhpuhccl").withValue("datahndbutptyabd"), + new UserProperty().withName("bkblopemorfzuhvy").withValue("datadnd"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("stzdxbspglcbhahx") + .withParameters(mapOf("rhpihtxgjzifyh", "datau", "e", "datajgrbjmzagxjoimoz", "jgfihl", + "datapscrfbdttcfwjzqu", "zb", "datalauo"))) + .withPolicy(new ActivityPolicy().withTimeout("datafxvggk") + .withRetry("datahsny") + .withRetryIntervalInSeconds(2037856397) + .withSecureInput(true) + .withSecureOutput(true) + .withAdditionalProperties(mapOf())) + .withSource(new CopySource().withSourceRetryCount("datahfauubcv") + .withSourceRetryWait("datafxwhicacmviclhom") + .withMaxConcurrentConnections("dataaxteg") + .withDisableMetricsCollection("dataf") + .withAdditionalProperties(mapOf("type", "udranmdcfwaw"))) + .withDataset(new DatasetReference().withReferenceName("qdfjeuwwq") + .withParameters(mapOf("qvywolccxdctkhe", "datazpskzplbzyj", "gemspn", "databosa"))) + .withFirstRowOnly("dataohhhexgxn"); model = BinaryData.fromObject(model).toObject(LookupActivity.class); - Assertions.assertEquals("ueacnfgt", model.name()); - Assertions.assertEquals("jqfupoamc", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("rec", model.dependsOn().get(0).activity()); + Assertions.assertEquals("zpdq", model.name()); + Assertions.assertEquals("chygbeofiwbtfkiu", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("xjrafhdf", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("utyfnmwmghaeedq", model.userProperties().get(0).name()); - Assertions.assertEquals("rp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1351725857, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("gyhpuhccl", model.userProperties().get(0).name()); + Assertions.assertEquals("stzdxbspglcbhahx", model.linkedServiceName().referenceName()); + Assertions.assertEquals(2037856397, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("vpvtyullivcymnpb", model.dataset().referenceName()); + Assertions.assertEquals("qdfjeuwwq", model.dataset().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTypePropertiesTests.java index f8c98c9b39cd2..dd64437b89d37 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTypePropertiesTests.java @@ -16,22 +16,24 @@ public final class LookupActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { LookupActivityTypeProperties model = BinaryData.fromString( - "{\"source\":{\"type\":\"CopySource\",\"sourceRetryCount\":\"datagfojdb\",\"sourceRetryWait\":\"datasmnelqlqnwv\",\"maxConcurrentConnections\":\"dataxzdimnfnrpq\",\"disableMetricsCollection\":\"datazgncyksblre\",\"\":{\"xdemcyrblw\":\"dataakmibiylkfne\"}},\"dataset\":{\"referenceName\":\"hzyrugstbzpozqlu\",\"parameters\":{\"xol\":\"datagkttlp\",\"lqdotqe\":\"datajevww\"}},\"firstRowOnly\":\"dataenteucaojj\"}") + "{\"source\":{\"type\":\"qcqinvkmkbtpbw\",\"sourceRetryCount\":\"datazmqabvwbg\",\"sourceRetryWait\":\"datanvdrrm\",\"maxConcurrentConnections\":\"datashraepcl\",\"disableMetricsCollection\":\"datame\",\"\":{\"jeqdmolmcyba\":\"datazhrzeibkuuolul\",\"qamvdnexqvt\":\"datakeuraylygclwbu\",\"lkny\":\"datafnhzgtydllauno\",\"hhcqjahhcbzoary\":\"datapglgkeaz\"}},\"dataset\":{\"referenceName\":\"cxmftgmqlco\",\"parameters\":{\"yhjfmycgucccbsyy\":\"datafrrdb\"}},\"firstRowOnly\":\"dataxlt\"}") .toObject(LookupActivityTypeProperties.class); - Assertions.assertEquals("hzyrugstbzpozqlu", model.dataset().referenceName()); + Assertions.assertEquals("cxmftgmqlco", model.dataset().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { LookupActivityTypeProperties model = new LookupActivityTypeProperties() - .withSource(new CopySource().withSourceRetryCount("datagfojdb").withSourceRetryWait("datasmnelqlqnwv") - .withMaxConcurrentConnections("dataxzdimnfnrpq").withDisableMetricsCollection("datazgncyksblre") - .withAdditionalProperties(mapOf("type", "CopySource"))) - .withDataset(new DatasetReference().withReferenceName("hzyrugstbzpozqlu") - .withParameters(mapOf("xol", "datagkttlp", "lqdotqe", "datajevww"))) - .withFirstRowOnly("dataenteucaojj"); + .withSource(new CopySource().withSourceRetryCount("datazmqabvwbg") + .withSourceRetryWait("datanvdrrm") + .withMaxConcurrentConnections("datashraepcl") + .withDisableMetricsCollection("datame") + .withAdditionalProperties(mapOf("type", "qcqinvkmkbtpbw"))) + .withDataset(new DatasetReference().withReferenceName("cxmftgmqlco") + .withParameters(mapOf("yhjfmycgucccbsyy", "datafrrdb"))) + .withFirstRowOnly("dataxlt"); model = BinaryData.fromObject(model).toObject(LookupActivityTypeProperties.class); - Assertions.assertEquals("hzyrugstbzpozqlu", model.dataset().referenceName()); + Assertions.assertEquals("cxmftgmqlco", model.dataset().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoObjectDatasetTests.java index 0cd274a5bb337..2cb0fa6bca296 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoObjectDatasetTests.java @@ -19,32 +19,32 @@ public final class MagentoObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MagentoObjectDataset model = BinaryData.fromString( - "{\"type\":\"MagentoObject\",\"typeProperties\":{\"tableName\":\"datagfb\"},\"description\":\"kxasomafe\",\"structure\":\"dataz\",\"schema\":\"datardxpcpautfzptr\",\"linkedServiceName\":{\"referenceName\":\"dzytrtffvp\",\"parameters\":{\"oqatdjka\":\"datacyuwenbqvpraw\"}},\"parameters\":{\"xfzynfemq\":{\"type\":\"SecureString\",\"defaultValue\":\"datacs\"},\"uup\":{\"type\":\"SecureString\",\"defaultValue\":\"datakpgwgssd\"},\"pvirzyud\":{\"type\":\"Int\",\"defaultValue\":\"databnlqyvdsqxkjwd\"}},\"annotations\":[\"dataxrxhxmlfouqp\"],\"folder\":{\"name\":\"andbp\"},\"\":{\"jxcqcaczzvwaeztt\":\"datah\",\"qlyyslg\":\"datajqyfy\",\"bdsvkllrzhshhkb\":\"dataf\",\"rgfwhfzh\":\"datahcazkgdjth\"}}") + "{\"type\":\"mqfwbgdmfv\",\"typeProperties\":{\"tableName\":\"dataima\"},\"description\":\"fmpota\",\"structure\":\"datapfigrxxtrcodb\",\"schema\":\"dataogldmxxb\",\"linkedServiceName\":{\"referenceName\":\"hspvamsxrwqlwdf\",\"parameters\":{\"bboffgxtae\":\"datarplzeqzv\",\"fcyatbxdwr\":\"dataxt\",\"fbpeigkflvovriq\":\"datayvtkmxvztshnu\"}},\"parameters\":{\"txur\":{\"type\":\"Float\",\"defaultValue\":\"datakqcgzygtdjhtbar\"}},\"annotations\":[\"datayyumhzpst\",\"datacqacvttyh\",\"databilnszyjbuw\"],\"folder\":{\"name\":\"sydsci\"},\"\":{\"l\":\"dataayioxpqgqs\",\"akqsjymcfv\":\"datalefeombodvdgf\",\"nbpkfnxrlncmlzvv\":\"datazceuyuqktck\",\"cjqzrevfwcba\":\"datamesfhqs\"}}") .toObject(MagentoObjectDataset.class); - Assertions.assertEquals("kxasomafe", model.description()); - Assertions.assertEquals("dzytrtffvp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("xfzynfemq").type()); - Assertions.assertEquals("andbp", model.folder().name()); + Assertions.assertEquals("fmpota", model.description()); + Assertions.assertEquals("hspvamsxrwqlwdf", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("txur").type()); + Assertions.assertEquals("sydsci", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MagentoObjectDataset model = new MagentoObjectDataset().withDescription("kxasomafe").withStructure("dataz") - .withSchema("datardxpcpautfzptr") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("dzytrtffvp") - .withParameters(mapOf("oqatdjka", "datacyuwenbqvpraw"))) - .withParameters(mapOf("xfzynfemq", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datacs"), "uup", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datakpgwgssd"), - "pvirzyud", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("databnlqyvdsqxkjwd"))) - .withAnnotations(Arrays.asList("dataxrxhxmlfouqp")).withFolder(new DatasetFolder().withName("andbp")) - .withTableName("datagfb"); + MagentoObjectDataset model = new MagentoObjectDataset().withDescription("fmpota") + .withStructure("datapfigrxxtrcodb") + .withSchema("dataogldmxxb") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hspvamsxrwqlwdf") + .withParameters(mapOf("bboffgxtae", "datarplzeqzv", "fcyatbxdwr", "dataxt", "fbpeigkflvovriq", + "datayvtkmxvztshnu"))) + .withParameters(mapOf("txur", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datakqcgzygtdjhtbar"))) + .withAnnotations(Arrays.asList("datayyumhzpst", "datacqacvttyh", "databilnszyjbuw")) + .withFolder(new DatasetFolder().withName("sydsci")) + .withTableName("dataima"); model = BinaryData.fromObject(model).toObject(MagentoObjectDataset.class); - Assertions.assertEquals("kxasomafe", model.description()); - Assertions.assertEquals("dzytrtffvp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("xfzynfemq").type()); - Assertions.assertEquals("andbp", model.folder().name()); + Assertions.assertEquals("fmpota", model.description()); + Assertions.assertEquals("hspvamsxrwqlwdf", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("txur").type()); + Assertions.assertEquals("sydsci", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoSourceTests.java index d31b98c31a73a..d1cd23c4902e3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoSourceTests.java @@ -11,16 +11,19 @@ public final class MagentoSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MagentoSource model = BinaryData.fromString( - "{\"type\":\"MagentoSource\",\"query\":\"dataxmobnehbbchtcoel\",\"queryTimeout\":\"datafnpxumgnjmsk\",\"additionalColumns\":\"dataeuogjiowande\",\"sourceRetryCount\":\"dataebpalz\",\"sourceRetryWait\":\"dataptg\",\"maxConcurrentConnections\":\"datarz\",\"disableMetricsCollection\":\"datacfdsvmptnrz\",\"\":{\"ovqpnxpufvggv\":\"datacncdazwtlgora\"}}") + "{\"type\":\"omdlsps\",\"query\":\"datagdkpyavfcpohlfv\",\"queryTimeout\":\"dataucxtyufso\",\"additionalColumns\":\"datakmcu\",\"sourceRetryCount\":\"dataxpnyg\",\"sourceRetryWait\":\"dataqi\",\"maxConcurrentConnections\":\"datalxoxwndfuyj\",\"disableMetricsCollection\":\"datagal\",\"\":{\"oa\":\"datalzuztdwx\",\"p\":\"datab\"}}") .toObject(MagentoSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MagentoSource model = new MagentoSource().withSourceRetryCount("dataebpalz").withSourceRetryWait("dataptg") - .withMaxConcurrentConnections("datarz").withDisableMetricsCollection("datacfdsvmptnrz") - .withQueryTimeout("datafnpxumgnjmsk").withAdditionalColumns("dataeuogjiowande") - .withQuery("dataxmobnehbbchtcoel"); + MagentoSource model = new MagentoSource().withSourceRetryCount("dataxpnyg") + .withSourceRetryWait("dataqi") + .withMaxConcurrentConnections("datalxoxwndfuyj") + .withDisableMetricsCollection("datagal") + .withQueryTimeout("dataucxtyufso") + .withAdditionalColumns("datakmcu") + .withQuery("datagdkpyavfcpohlfv"); model = BinaryData.fromObject(model).toObject(MagentoSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialResourceInnerTests.java deleted file mode 100644 index 96cca5e39ca37..0000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialResourceInnerTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityCredentialResourceInner; -import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class ManagedIdentityCredentialResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ManagedIdentityCredentialResourceInner model = BinaryData.fromString( - "{\"properties\":{\"type\":\"ManagedIdentity\",\"typeProperties\":{\"resourceId\":\"gpgdphtvdulaj\"},\"description\":\"ejchcsrlz\",\"annotations\":[\"datazlanrupdwvnph\",\"datanzqtpjhmqrhvt\"],\"\":{\"xetlgydlhqv\":\"dataiwdcxsmlzzhzd\",\"pxy\":\"datan\",\"klbyulidwcw\":\"dataafiqgeaarbgjekg\",\"hj\":\"datamzegjon\"}},\"name\":\"wgdnqzbr\",\"type\":\"spzhzmtksjc\",\"etag\":\"digsxcdgl\",\"id\":\"lkeuac\"}") - .toObject(ManagedIdentityCredentialResourceInner.class); - Assertions.assertEquals("lkeuac", model.id()); - Assertions.assertEquals("ejchcsrlz", model.properties().description()); - Assertions.assertEquals("gpgdphtvdulaj", model.properties().resourceId()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ManagedIdentityCredentialResourceInner model = new ManagedIdentityCredentialResourceInner().withId("lkeuac") - .withProperties(new ManagedIdentityCredential().withDescription("ejchcsrlz") - .withAnnotations(Arrays.asList("datazlanrupdwvnph", "datanzqtpjhmqrhvt")) - .withResourceId("gpgdphtvdulaj")); - model = BinaryData.fromObject(model).toObject(ManagedIdentityCredentialResourceInner.class); - Assertions.assertEquals("lkeuac", model.id()); - Assertions.assertEquals("ejchcsrlz", model.properties().description()); - Assertions.assertEquals("gpgdphtvdulaj", model.properties().resourceId()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialResourceTests.java new file mode 100644 index 0000000000000..b2f8346d79118 --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialResourceTests.java @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.generated; + +import com.azure.core.util.BinaryData; +import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential; +import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredentialResource; +import java.util.Arrays; +import org.junit.jupiter.api.Assertions; + +public final class ManagedIdentityCredentialResourceTests { + @org.junit.jupiter.api.Test + public void testDeserialize() throws Exception { + ManagedIdentityCredentialResource model = BinaryData.fromString( + "{\"properties\":{\"type\":\"yxxbxqvmvuayt\",\"description\":\"dxk\",\"annotations\":[\"databwpntghy\",\"datasa\",\"datacdrnxsl\"],\"\":{\"v\":\"datazladltxkpbq\",\"isgglmvokat\":\"datadqqjwkrhwzdano\",\"xsmzygdf\":\"dataztjctibpvbkae\",\"eivmak\":\"dataakw\"}},\"name\":\"ysowl\",\"type\":\"x\",\"etag\":\"bectvtfjmskdch\",\"id\":\"iubavlzwpvgm\"}") + .toObject(ManagedIdentityCredentialResource.class); + Assertions.assertEquals("iubavlzwpvgm", model.id()); + Assertions.assertEquals("dxk", model.properties().description()); + } + + @org.junit.jupiter.api.Test + public void testSerialize() throws Exception { + ManagedIdentityCredentialResource model = new ManagedIdentityCredentialResource().withId("iubavlzwpvgm") + .withProperties(new ManagedIdentityCredential().withDescription("dxk") + .withAnnotations(Arrays.asList("databwpntghy", "datasa", "datacdrnxsl"))); + model = BinaryData.fromObject(model).toObject(ManagedIdentityCredentialResource.class); + Assertions.assertEquals("iubavlzwpvgm", model.id()); + Assertions.assertEquals("dxk", model.properties().description()); + } +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialTests.java index f96dcf6181ae0..aa7aee7d96962 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialTests.java @@ -13,19 +13,19 @@ public final class ManagedIdentityCredentialTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ManagedIdentityCredential model = BinaryData.fromString( - "{\"type\":\"ManagedIdentity\",\"typeProperties\":{\"resourceId\":\"mflrytswfpfmdgyc\"},\"description\":\"mskwhqjjysl\",\"annotations\":[\"datapshhkvpedwqslsr\",\"datampqvwwsk\",\"datandcbrwi\",\"datauvqejosovyrrle\"],\"\":{\"bbpihehcecy\":\"datainuqtljq\",\"kfrexcrseqwjks\":\"datamrqbrjbbmpxdlv\",\"zhxogjggsvo\":\"datahud\",\"hrkmdyomkxfbvfbh\":\"datajkxibda\"}}") + "{\"type\":\"rthqe\",\"typeProperties\":{\"resourceId\":\"jqafkmkro\"},\"description\":\"pqrtvaoznqni\",\"annotations\":[\"datazeagmceituuge\",\"datahfpjstlzmbls\",\"datajdeolctae\",\"datafsyrledjc\"],\"\":{\"igdx\":\"databvt\"}}") .toObject(ManagedIdentityCredential.class); - Assertions.assertEquals("mskwhqjjysl", model.description()); - Assertions.assertEquals("mflrytswfpfmdgyc", model.resourceId()); + Assertions.assertEquals("pqrtvaoznqni", model.description()); + Assertions.assertEquals("jqafkmkro", model.resourceId()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ManagedIdentityCredential model = new ManagedIdentityCredential().withDescription("mskwhqjjysl") - .withAnnotations(Arrays.asList("datapshhkvpedwqslsr", "datampqvwwsk", "datandcbrwi", "datauvqejosovyrrle")) - .withResourceId("mflrytswfpfmdgyc"); + ManagedIdentityCredential model = new ManagedIdentityCredential().withDescription("pqrtvaoznqni") + .withAnnotations(Arrays.asList("datazeagmceituuge", "datahfpjstlzmbls", "datajdeolctae", "datafsyrledjc")) + .withResourceId("jqafkmkro"); model = BinaryData.fromObject(model).toObject(ManagedIdentityCredential.class); - Assertions.assertEquals("mskwhqjjysl", model.description()); - Assertions.assertEquals("mflrytswfpfmdgyc", model.resourceId()); + Assertions.assertEquals("pqrtvaoznqni", model.description()); + Assertions.assertEquals("jqafkmkro", model.resourceId()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityTypePropertiesTests.java index 6626252a83865..37b7b89355fc1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityTypePropertiesTests.java @@ -12,14 +12,14 @@ public final class ManagedIdentityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ManagedIdentityTypeProperties model - = BinaryData.fromString("{\"resourceId\":\"i\"}").toObject(ManagedIdentityTypeProperties.class); - Assertions.assertEquals("i", model.resourceId()); + = BinaryData.fromString("{\"resourceId\":\"sgeafgfosehx\"}").toObject(ManagedIdentityTypeProperties.class); + Assertions.assertEquals("sgeafgfosehx", model.resourceId()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ManagedIdentityTypeProperties model = new ManagedIdentityTypeProperties().withResourceId("i"); + ManagedIdentityTypeProperties model = new ManagedIdentityTypeProperties().withResourceId("sgeafgfosehx"); model = BinaryData.fromObject(model).toObject(ManagedIdentityTypeProperties.class); - Assertions.assertEquals("i", model.resourceId()); + Assertions.assertEquals("sgeafgfosehx", model.resourceId()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointListResponseTests.java index 569aea4545ab9..6ebb4cc0218d9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointListResponseTests.java @@ -18,41 +18,31 @@ public final class ManagedPrivateEndpointListResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ManagedPrivateEndpointListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"connectionState\":{\"actionsRequired\":\"wdkqzeqy\",\"description\":\"eziunjxdfzant\",\"status\":\"cegyamlbnseqacj\"},\"fqdns\":[\"ilguooqjagmditg\",\"eiookjbsah\",\"tdtpdelqacslmo\"],\"groupId\":\"ebnfxofvc\",\"isReserved\":false,\"privateLinkResourceId\":\"irazftxejwabmd\",\"provisioningState\":\"tmvcop\",\"\":{\"urbuhhlkyqltq\":\"datam\"}},\"name\":\"ogtu\",\"type\":\"ffdjktsysidfvclg\",\"etag\":\"n\",\"id\":\"ijtk\"},{\"properties\":{\"connectionState\":{\"actionsRequired\":\"qogsfikayian\",\"description\":\"arujt\",\"status\":\"qxfzyjqttvwk\"},\"fqdns\":[\"j\",\"enuygbq\",\"qqekewvnqvcdlgu\"],\"groupId\":\"cmfdjwnlax\",\"isReserved\":false,\"privateLinkResourceId\":\"qikczvvita\",\"provisioningState\":\"xmfcsserxhtv\",\"\":{\"sxypruuu\":\"datahlwntsjgq\"}},\"name\":\"nchrszizoyu\",\"type\":\"yetnd\",\"etag\":\"fqyggagflnlgmtr\",\"id\":\"hzjmucftbyrp\"},{\"properties\":{\"connectionState\":{\"actionsRequired\":\"hkpigqfusuckzmkw\",\"description\":\"snoxaxmqeqa\",\"status\":\"hjnhgwydyynfsvk\"},\"fqdns\":[\"vqtanarfdlpuk\"],\"groupId\":\"yrneizjcpeo\",\"isReserved\":true,\"privateLinkResourceId\":\"mgbro\",\"provisioningState\":\"ddbhf\",\"\":{\"zoyw\":\"datapaz\",\"htuevrhrljy\":\"dataxhpdulontacnpqwt\",\"reur\":\"dataogwxhnsduugwb\",\"fuarenlvhht\":\"dataq\"}},\"name\":\"nvnaf\",\"type\":\"kyfede\",\"etag\":\"bo\",\"id\":\"cqxypokkhminq\"}],\"nextLink\":\"mczngn\"}") + "{\"value\":[{\"properties\":{\"connectionState\":{\"actionsRequired\":\"itlhguyn\",\"description\":\"hlgmltxdwhmoz\",\"status\":\"gzvlnsnn\"},\"fqdns\":[\"pafolp\",\"mwamxqzragpgdph\",\"vdulajv\",\"ejchcsrlz\"],\"groupId\":\"mzlanru\",\"isReserved\":false,\"privateLinkResourceId\":\"nphcnzqtpjhmqrh\",\"provisioningState\":\"hlaiwd\",\"\":{\"hqvlnnpxybafiqg\":\"datamlzzhzdtxetlgyd\",\"klbyulidwcw\":\"dataaarbgjekg\",\"hj\":\"datamzegjon\",\"zhzmtksjci\":\"datarwgdnqzbrfks\"}},\"name\":\"igsxcdgljplk\",\"type\":\"acht\",\"etag\":\"flrytswfpfm\",\"id\":\"ycxnmskw\"}],\"nextLink\":\"jjyslurlps\"}") .toObject(ManagedPrivateEndpointListResponse.class); - Assertions.assertEquals("ijtk", model.value().get(0).id()); - Assertions.assertEquals("ilguooqjagmditg", model.value().get(0).properties().fqdns().get(0)); - Assertions.assertEquals("ebnfxofvc", model.value().get(0).properties().groupId()); - Assertions.assertEquals("irazftxejwabmd", model.value().get(0).properties().privateLinkResourceId()); - Assertions.assertEquals("mczngn", model.nextLink()); + Assertions.assertEquals("ycxnmskw", model.value().get(0).id()); + Assertions.assertEquals("pafolp", model.value().get(0).properties().fqdns().get(0)); + Assertions.assertEquals("mzlanru", model.value().get(0).properties().groupId()); + Assertions.assertEquals("nphcnzqtpjhmqrh", model.value().get(0).properties().privateLinkResourceId()); + Assertions.assertEquals("jjyslurlps", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ManagedPrivateEndpointListResponse model = new ManagedPrivateEndpointListResponse() - .withValue(Arrays.asList( - new ManagedPrivateEndpointResourceInner().withId("ijtk") - .withProperties(new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) - .withFqdns(Arrays.asList("ilguooqjagmditg", "eiookjbsah", "tdtpdelqacslmo")) - .withGroupId("ebnfxofvc").withPrivateLinkResourceId("irazftxejwabmd") - .withAdditionalProperties(mapOf("isReserved", false, "provisioningState", "tmvcop"))), - new ManagedPrivateEndpointResourceInner().withId("hzjmucftbyrp") - .withProperties(new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) - .withFqdns(Arrays.asList("j", "enuygbq", "qqekewvnqvcdlgu")).withGroupId("cmfdjwnlax") - .withPrivateLinkResourceId("qikczvvita") - .withAdditionalProperties(mapOf("isReserved", false, "provisioningState", "xmfcsserxhtv"))), - new ManagedPrivateEndpointResourceInner().withId("cqxypokkhminq") - .withProperties(new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) - .withFqdns(Arrays.asList("vqtanarfdlpuk")).withGroupId("yrneizjcpeo") - .withPrivateLinkResourceId("mgbro") - .withAdditionalProperties(mapOf("isReserved", true, "provisioningState", "ddbhf"))))) - .withNextLink("mczngn"); + .withValue(Arrays.asList(new ManagedPrivateEndpointResourceInner().withId("ycxnmskw") + .withProperties(new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) + .withFqdns(Arrays.asList("pafolp", "mwamxqzragpgdph", "vdulajv", "ejchcsrlz")) + .withGroupId("mzlanru") + .withPrivateLinkResourceId("nphcnzqtpjhmqrh") + .withAdditionalProperties(mapOf("isReserved", false, "provisioningState", "hlaiwd"))))) + .withNextLink("jjyslurlps"); model = BinaryData.fromObject(model).toObject(ManagedPrivateEndpointListResponse.class); - Assertions.assertEquals("ijtk", model.value().get(0).id()); - Assertions.assertEquals("ilguooqjagmditg", model.value().get(0).properties().fqdns().get(0)); - Assertions.assertEquals("ebnfxofvc", model.value().get(0).properties().groupId()); - Assertions.assertEquals("irazftxejwabmd", model.value().get(0).properties().privateLinkResourceId()); - Assertions.assertEquals("mczngn", model.nextLink()); + Assertions.assertEquals("ycxnmskw", model.value().get(0).id()); + Assertions.assertEquals("pafolp", model.value().get(0).properties().fqdns().get(0)); + Assertions.assertEquals("mzlanru", model.value().get(0).properties().groupId()); + Assertions.assertEquals("nphcnzqtpjhmqrh", model.value().get(0).properties().privateLinkResourceId()); + Assertions.assertEquals("jjyslurlps", model.nextLink()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointResourceInnerTests.java index 741cdf9ca68be..69bf689cc2a66 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointResourceInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointResourceInnerTests.java @@ -17,26 +17,27 @@ public final class ManagedPrivateEndpointResourceInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ManagedPrivateEndpointResourceInner model = BinaryData.fromString( - "{\"properties\":{\"connectionState\":{\"actionsRequired\":\"x\",\"description\":\"unin\",\"status\":\"db\"},\"fqdns\":[\"qdtvqecrqctmxx\",\"tddmf\",\"huytxzvtzn\",\"pxbannovvoxc\"],\"groupId\":\"tprwnw\",\"isReserved\":true,\"privateLinkResourceId\":\"vytlyokrrrouuxvn\",\"provisioningState\":\"sbcrymodizrxklo\",\"\":{\"lmv\":\"datanazpmk\",\"zxlioh\":\"datavfxzopjh\",\"dtfgxqbawpcbb\":\"datad\"}},\"name\":\"qcy\",\"type\":\"apqofyuicdhz\",\"etag\":\"ybww\",\"id\":\"d\"}") + "{\"properties\":{\"connectionState\":{\"actionsRequired\":\"vpedwqslsrh\",\"description\":\"qvwwsko\",\"status\":\"cbrwi\"},\"fqdns\":[\"qejo\",\"ovyrrleaesinu\",\"tljqobbpih\"],\"groupId\":\"cecybmrqbrjbbmpx\",\"isReserved\":true,\"privateLinkResourceId\":\"kfrexcrseqwjks\",\"provisioningState\":\"udgzhxogjgg\",\"\":{\"kxibdafh\":\"datau\",\"y\":\"datakmdyomkxfbvfbh\",\"gddeimaw\":\"datarhpw\"}},\"name\":\"vgkk\",\"type\":\"ui\",\"etag\":\"cjc\",\"id\":\"tbw\"}") .toObject(ManagedPrivateEndpointResourceInner.class); - Assertions.assertEquals("d", model.id()); - Assertions.assertEquals("qdtvqecrqctmxx", model.properties().fqdns().get(0)); - Assertions.assertEquals("tprwnw", model.properties().groupId()); - Assertions.assertEquals("vytlyokrrrouuxvn", model.properties().privateLinkResourceId()); + Assertions.assertEquals("tbw", model.id()); + Assertions.assertEquals("qejo", model.properties().fqdns().get(0)); + Assertions.assertEquals("cecybmrqbrjbbmpx", model.properties().groupId()); + Assertions.assertEquals("kfrexcrseqwjks", model.properties().privateLinkResourceId()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ManagedPrivateEndpointResourceInner model = new ManagedPrivateEndpointResourceInner().withId("d") + ManagedPrivateEndpointResourceInner model = new ManagedPrivateEndpointResourceInner().withId("tbw") .withProperties(new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) - .withFqdns(Arrays.asList("qdtvqecrqctmxx", "tddmf", "huytxzvtzn", "pxbannovvoxc")).withGroupId("tprwnw") - .withPrivateLinkResourceId("vytlyokrrrouuxvn") - .withAdditionalProperties(mapOf("isReserved", true, "provisioningState", "sbcrymodizrxklo"))); + .withFqdns(Arrays.asList("qejo", "ovyrrleaesinu", "tljqobbpih")) + .withGroupId("cecybmrqbrjbbmpx") + .withPrivateLinkResourceId("kfrexcrseqwjks") + .withAdditionalProperties(mapOf("isReserved", true, "provisioningState", "udgzhxogjgg"))); model = BinaryData.fromObject(model).toObject(ManagedPrivateEndpointResourceInner.class); - Assertions.assertEquals("d", model.id()); - Assertions.assertEquals("qdtvqecrqctmxx", model.properties().fqdns().get(0)); - Assertions.assertEquals("tprwnw", model.properties().groupId()); - Assertions.assertEquals("vytlyokrrrouuxvn", model.properties().privateLinkResourceId()); + Assertions.assertEquals("tbw", model.id()); + Assertions.assertEquals("qejo", model.properties().fqdns().get(0)); + Assertions.assertEquals("cecybmrqbrjbbmpx", model.properties().groupId()); + Assertions.assertEquals("kfrexcrseqwjks", model.properties().privateLinkResourceId()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointTests.java index bab7b4aa82513..5fcb56a3529dd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointTests.java @@ -16,22 +16,24 @@ public final class ManagedPrivateEndpointTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ManagedPrivateEndpoint model = BinaryData.fromString( - "{\"connectionState\":{\"actionsRequired\":\"idmhmwf\",\"description\":\"lfmu\",\"status\":\"pckc\"},\"fqdns\":[\"vwe\"],\"groupId\":\"xoy\",\"isReserved\":false,\"privateLinkResourceId\":\"haim\",\"provisioningState\":\"iroqbosh\",\"\":{\"pavbo\":\"datagapyyrmfsv\"}}") + "{\"connectionState\":{\"actionsRequired\":\"qowxwcom\",\"description\":\"kytwvcz\",\"status\":\"wka\"},\"fqdns\":[\"jyfdvlv\",\"b\",\"rnfxtgddp\",\"th\"],\"groupId\":\"n\",\"isReserved\":true,\"privateLinkResourceId\":\"yank\",\"provisioningState\":\"eqswanklty\",\"\":{\"drlktg\":\"datadroznn\",\"waeeczgfb\":\"datacsgguxhemlw\"}}") .toObject(ManagedPrivateEndpoint.class); - Assertions.assertEquals("vwe", model.fqdns().get(0)); - Assertions.assertEquals("xoy", model.groupId()); - Assertions.assertEquals("haim", model.privateLinkResourceId()); + Assertions.assertEquals("jyfdvlv", model.fqdns().get(0)); + Assertions.assertEquals("n", model.groupId()); + Assertions.assertEquals("yank", model.privateLinkResourceId()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ManagedPrivateEndpoint model = new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) - .withFqdns(Arrays.asList("vwe")).withGroupId("xoy").withPrivateLinkResourceId("haim") - .withAdditionalProperties(mapOf("isReserved", false, "provisioningState", "iroqbosh")); + .withFqdns(Arrays.asList("jyfdvlv", "b", "rnfxtgddp", "th")) + .withGroupId("n") + .withPrivateLinkResourceId("yank") + .withAdditionalProperties(mapOf("isReserved", true, "provisioningState", "eqswanklty")); model = BinaryData.fromObject(model).toObject(ManagedPrivateEndpoint.class); - Assertions.assertEquals("vwe", model.fqdns().get(0)); - Assertions.assertEquals("xoy", model.groupId()); - Assertions.assertEquals("haim", model.privateLinkResourceId()); + Assertions.assertEquals("jyfdvlv", model.fqdns().get(0)); + Assertions.assertEquals("n", model.groupId()); + Assertions.assertEquals("yank", model.privateLinkResourceId()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsCreateOrUpdateWithResponseMockTests.java index f74039c6e37b7..54b2cc583c35e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsCreateOrUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsCreateOrUpdateWithResponseMockTests.java @@ -6,16 +6,13 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ConnectionStateProperties; import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpoint; import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpointResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.Arrays; @@ -23,48 +20,36 @@ import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ManagedPrivateEndpointsCreateOrUpdateWithResponseMockTests { @Test public void testCreateOrUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"connectionState\":{\"actionsRequired\":\"bhztqiaydmblpdjt\",\"description\":\"ilhcca\",\"status\":\"iifvindcakansjrz\"},\"fqdns\":[\"csly\",\"hpwtkcebi\",\"ngupphvorxocjsa\"],\"groupId\":\"uvvpdjo\",\"isReserved\":true,\"privateLinkResourceId\":\"vvgsczvzdudfikdu\",\"provisioningState\":\"khma\",\"\":{\"ftz\":\"datacolf\"}},\"name\":\"qwfnlpjivtzshuz\",\"type\":\"nivrr\",\"etag\":\"ijkvopsamtx\",\"id\":\"elwno\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"connectionState\":{\"actionsRequired\":\"jnnm\",\"description\":\"nvln\",\"status\":\"vnfrn\"},\"fqdns\":[\"fc\",\"rxpzniqo\",\"dpbsqovea\"],\"groupId\":\"ynblklykznzf\",\"isReserved\":false,\"privateLinkResourceId\":\"kkkactkmqo\",\"provisioningState\":\"gjwi\",\"\":{\"yvodcbsir\":\"datawgefacbcgssc\"}},\"name\":\"khmjt\",\"type\":\"vpvuaqox\",\"etag\":\"i\",\"id\":\"wfzvnwmbweh\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - ManagedPrivateEndpointResource response = manager.managedPrivateEndpoints().define("xmlb") - .withExistingManagedVirtualNetwork("erhhzjhmxyns", "adgv", "w") + ManagedPrivateEndpointResource response = manager.managedPrivateEndpoints() + .define("q") + .withExistingManagedVirtualNetwork("zubjcnozlx", "pvoyqcqpyntax", "ytoqxajiog") .withProperties(new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) - .withFqdns(Arrays.asList("wditccuzjlcm", "ghaolfupxhrl", "tknmp", "lrcrxxkvuzpsoujc")) - .withGroupId("ubpjwwvies").withPrivateLinkResourceId("zhi") - .withAdditionalProperties(mapOf("isReserved", false, "provisioningState", "lmymncuhqetmpq"))) - .withIfMatch("hdhf").create(); + .withFqdns(Arrays.asList("tgbsj")) + .withGroupId("fnlt") + .withPrivateLinkResourceId("zxfli") + .withAdditionalProperties(mapOf("isReserved", false, "provisioningState", "leihpq"))) + .withIfMatch("vkg") + .create(); - Assertions.assertEquals("elwno", response.id()); - Assertions.assertEquals("csly", response.properties().fqdns().get(0)); - Assertions.assertEquals("uvvpdjo", response.properties().groupId()); - Assertions.assertEquals("vvgsczvzdudfikdu", response.properties().privateLinkResourceId()); + Assertions.assertEquals("wfzvnwmbweh", response.id()); + Assertions.assertEquals("fc", response.properties().fqdns().get(0)); + Assertions.assertEquals("ynblklykznzf", response.properties().groupId()); + Assertions.assertEquals("kkkactkmqo", response.properties().privateLinkResourceId()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsDeleteWithResponseMockTests.java index 80ce8728b5171..8badc6fb3160c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsDeleteWithResponseMockTests.java @@ -6,47 +6,29 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ManagedPrivateEndpointsDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.managedPrivateEndpoints().deleteWithResponse("l", "rctrpun", "bhoety", "wx", - com.azure.core.util.Context.NONE); + manager.managedPrivateEndpoints() + .deleteWithResponse("cojogkufpf", "oajdf", "hlvags", "sto", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsGetWithResponseMockTests.java index 34634b2d95f6c..f676b413552bf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsGetWithResponseMockTests.java @@ -6,55 +6,37 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpointResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ManagedPrivateEndpointsGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"connectionState\":{\"actionsRequired\":\"dvtlygwxilbazru\",\"description\":\"js\",\"status\":\"rowbfslylqzwql\"},\"fqdns\":[\"sjzrifg\"],\"groupId\":\"pnoiwlernc\",\"isReserved\":false,\"privateLinkResourceId\":\"sonkkuaamojzrn\",\"provisioningState\":\"keu\",\"\":{\"cg\":\"datatmssn\"}},\"name\":\"mqgyaknlxwxnn\",\"type\":\"o\",\"etag\":\"ne\",\"id\":\"y\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"connectionState\":{\"actionsRequired\":\"dy\",\"description\":\"lbwpbieeqaozoo\",\"status\":\"ouifsrjpijz\"},\"fqdns\":[\"fqb\"],\"groupId\":\"ngbqpjswucsjp\",\"isReserved\":false,\"privateLinkResourceId\":\"yoiwmvmnahgknon\",\"provisioningState\":\"msocmxfkwfpoemzb\",\"\":{\"gwp\":\"datapysopmahdsq\",\"izxqbaznrqgloe\":\"datampklawc\",\"voawisxik\":\"dataqrolkfihfairpstg\"}},\"name\":\"vrffftr\",\"type\":\"ljscmofhisdloar\",\"etag\":\"zggwuthoqn\",\"id\":\"rveyrqtdl\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); ManagedPrivateEndpointResource response = manager.managedPrivateEndpoints() - .getWithResponse("gjp", "zibgitkowflc", "xqwy", "vuaiqqgay", "y", com.azure.core.util.Context.NONE) + .getWithResponse("kkn", "ba", "eiclxuijmzg", "nvavpyge", "zsnixtwpkngiy", com.azure.core.util.Context.NONE) .getValue(); - Assertions.assertEquals("y", response.id()); - Assertions.assertEquals("sjzrifg", response.properties().fqdns().get(0)); - Assertions.assertEquals("pnoiwlernc", response.properties().groupId()); - Assertions.assertEquals("sonkkuaamojzrn", response.properties().privateLinkResourceId()); + Assertions.assertEquals("rveyrqtdl", response.id()); + Assertions.assertEquals("fqb", response.properties().fqdns().get(0)); + Assertions.assertEquals("ngbqpjswucsjp", response.properties().groupId()); + Assertions.assertEquals("yoiwmvmnahgknon", response.properties().privateLinkResourceId()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsListByFactoryMockTests.java index 846ddfe6b4448..4ab2d578a5d43 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsListByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsListByFactoryMockTests.java @@ -6,55 +6,37 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpointResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ManagedPrivateEndpointsListByFactoryMockTests { @Test public void testListByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"connectionState\":{\"actionsRequired\":\"muvgfkdea\",\"description\":\"xdwwraimjkaz\",\"status\":\"idgoya\"},\"fqdns\":[\"sxkym\"],\"groupId\":\"imbesgi\",\"isReserved\":false,\"privateLinkResourceId\":\"jowgdwcpe\",\"provisioningState\":\"zmxu\",\"\":{\"bseinldmfl\":\"datal\",\"txfob\":\"datagjt\",\"xlptockgjvflc\":\"dataqqleylvy\",\"nlsmfqglvflxltng\":\"datahsbxrsqeywmq\"}},\"name\":\"lpgclo\",\"type\":\"zwuppv\",\"etag\":\"vfzcy\",\"id\":\"bryvlhv\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response = manager.managedPrivateEndpoints() - .listByFactory("ajjyournxq", "uzls", "mbsghzund", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("bryvlhv", response.iterator().next().id()); - Assertions.assertEquals("sxkym", response.iterator().next().properties().fqdns().get(0)); - Assertions.assertEquals("imbesgi", response.iterator().next().properties().groupId()); - Assertions.assertEquals("jowgdwcpe", response.iterator().next().properties().privateLinkResourceId()); + = "{\"value\":[{\"properties\":{\"connectionState\":{\"actionsRequired\":\"vtdtbcrhpnmgnxa\",\"description\":\"egxnjxgoa\",\"status\":\"sutbnziaqszzcp\"},\"fqdns\":[\"kkffuo\",\"jp\"],\"groupId\":\"a\",\"isReserved\":false,\"privateLinkResourceId\":\"irsgogbz\",\"provisioningState\":\"vpzyovimpb\",\"\":{\"ffd\":\"datas\"}},\"name\":\"jcgyl\",\"type\":\"sxcdk\",\"etag\":\"leuzm\",\"id\":\"s\"}]}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + PagedIterable response + = manager.managedPrivateEndpoints().listByFactory("h", "qogcp", "k", com.azure.core.util.Context.NONE); + + Assertions.assertEquals("s", response.iterator().next().id()); + Assertions.assertEquals("kkffuo", response.iterator().next().properties().fqdns().get(0)); + Assertions.assertEquals("a", response.iterator().next().properties().groupId()); + Assertions.assertEquals("irsgogbz", response.iterator().next().properties().privateLinkResourceId()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkListResponseTests.java index 3ae5dd099475d..fc500449ce7b4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkListResponseTests.java @@ -17,21 +17,26 @@ public final class ManagedVirtualNetworkListResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ManagedVirtualNetworkListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"vNetId\":\"o\",\"alias\":\"vmfqhppubo\",\"\":{\"juahokqto\":\"datapdfgkmtdherngbt\",\"hfphwpnulaiywze\":\"datakauxof\",\"wrpqafgfugsnnf\":\"dataywhslwkojpllndnp\",\"coc\":\"datayetefyp\"}},\"name\":\"jgtixr\",\"type\":\"zuyt\",\"etag\":\"mlmuowol\",\"id\":\"uir\"}],\"nextLink\":\"ionszonwp\"}") + "{\"value\":[{\"properties\":{\"vNetId\":\"klmtkhlowkx\",\"alias\":\"vbrdfjmzsyzfho\",\"\":{\"unsjlpjrtwszhvvu\":\"dataikcyyc\"}},\"name\":\"phvtrrmhwrb\",\"type\":\"pyf\",\"etag\":\"bhvjglr\",\"id\":\"uyzlw\"},{\"properties\":{\"vNetId\":\"memhooclutnpq\",\"alias\":\"mczjkm\",\"\":{\"jylmbkzudnigr\":\"datayujxsglhsrrry\",\"refqy\":\"dataihotjewlpxuzzjg\",\"kwpzdqtvh\":\"dataqotoihiqakydiwfb\",\"ipietgbe\":\"dataspodaqax\"}},\"name\":\"fulbmoichdl\",\"type\":\"fpubntnbatz\",\"etag\":\"qs\",\"id\":\"saaelca\"}],\"nextLink\":\"cjuhplrvkm\"}") .toObject(ManagedVirtualNetworkListResponse.class); - Assertions.assertEquals("uir", model.value().get(0).id()); - Assertions.assertEquals("ionszonwp", model.nextLink()); + Assertions.assertEquals("uyzlw", model.value().get(0).id()); + Assertions.assertEquals("cjuhplrvkm", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ManagedVirtualNetworkListResponse model = new ManagedVirtualNetworkListResponse() - .withValue(Arrays.asList(new ManagedVirtualNetworkResourceInner().withId("uir").withProperties( - new ManagedVirtualNetwork().withAdditionalProperties(mapOf("vNetId", "o", "alias", "vmfqhppubo"))))) - .withNextLink("ionszonwp"); + .withValue(Arrays.asList( + new ManagedVirtualNetworkResourceInner().withId("uyzlw") + .withProperties(new ManagedVirtualNetwork() + .withAdditionalProperties(mapOf("vNetId", "klmtkhlowkx", "alias", "vbrdfjmzsyzfho"))), + new ManagedVirtualNetworkResourceInner().withId("saaelca") + .withProperties(new ManagedVirtualNetwork() + .withAdditionalProperties(mapOf("vNetId", "memhooclutnpq", "alias", "mczjkm"))))) + .withNextLink("cjuhplrvkm"); model = BinaryData.fromObject(model).toObject(ManagedVirtualNetworkListResponse.class); - Assertions.assertEquals("uir", model.value().get(0).id()); - Assertions.assertEquals("ionszonwp", model.nextLink()); + Assertions.assertEquals("uyzlw", model.value().get(0).id()); + Assertions.assertEquals("cjuhplrvkm", model.nextLink()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkReferenceTests.java index 243d25c31be87..673b72e824440 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkReferenceTests.java @@ -13,18 +13,19 @@ public final class ManagedVirtualNetworkReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ManagedVirtualNetworkReference model - = BinaryData.fromString("{\"type\":\"ManagedVirtualNetworkReference\",\"referenceName\":\"xawqy\"}") + = BinaryData.fromString("{\"type\":\"ManagedVirtualNetworkReference\",\"referenceName\":\"nims\"}") .toObject(ManagedVirtualNetworkReference.class); Assertions.assertEquals(ManagedVirtualNetworkReferenceType.MANAGED_VIRTUAL_NETWORK_REFERENCE, model.type()); - Assertions.assertEquals("xawqy", model.referenceName()); + Assertions.assertEquals("nims", model.referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ManagedVirtualNetworkReference model = new ManagedVirtualNetworkReference() - .withType(ManagedVirtualNetworkReferenceType.MANAGED_VIRTUAL_NETWORK_REFERENCE).withReferenceName("xawqy"); + .withType(ManagedVirtualNetworkReferenceType.MANAGED_VIRTUAL_NETWORK_REFERENCE) + .withReferenceName("nims"); model = BinaryData.fromObject(model).toObject(ManagedVirtualNetworkReference.class); Assertions.assertEquals(ManagedVirtualNetworkReferenceType.MANAGED_VIRTUAL_NETWORK_REFERENCE, model.type()); - Assertions.assertEquals("xawqy", model.referenceName()); + Assertions.assertEquals("nims", model.referenceName()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkResourceInnerTests.java index fa614545823f4..8c5c8ee9a52cd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkResourceInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkResourceInnerTests.java @@ -15,18 +15,18 @@ public final class ManagedVirtualNetworkResourceInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ManagedVirtualNetworkResourceInner model = BinaryData.fromString( - "{\"properties\":{\"vNetId\":\"ajinnixjawrtmjfj\",\"alias\":\"ccxlzhcoxovnek\",\"\":{\"jvidttge\":\"datalusfnrdtjxtxrdcq\",\"iesfuug\":\"datauslvyjtcvuwkasi\"}},\"name\":\"uqfecj\",\"type\":\"ygtuhx\",\"etag\":\"cbuewmrswnjlxuz\",\"id\":\"wpusxjbaqehg\"}") + "{\"properties\":{\"vNetId\":\"wmj\",\"alias\":\"gfggcvkyylizrzbj\",\"\":{\"vagbwidqlvhukove\":\"dataxsfuztlvt\",\"jfnmjmvlwyz\":\"datafizr\",\"lfojuidjp\":\"dataiblkujr\"}},\"name\":\"yjucejikzoeo\",\"type\":\"tzejetjklnt\",\"etag\":\"yjuzkdb\",\"id\":\"o\"}") .toObject(ManagedVirtualNetworkResourceInner.class); - Assertions.assertEquals("wpusxjbaqehg", model.id()); + Assertions.assertEquals("o", model.id()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ManagedVirtualNetworkResourceInner model - = new ManagedVirtualNetworkResourceInner().withId("wpusxjbaqehg").withProperties(new ManagedVirtualNetwork() - .withAdditionalProperties(mapOf("vNetId", "ajinnixjawrtmjfj", "alias", "ccxlzhcoxovnek"))); + ManagedVirtualNetworkResourceInner model = new ManagedVirtualNetworkResourceInner().withId("o") + .withProperties(new ManagedVirtualNetwork() + .withAdditionalProperties(mapOf("vNetId", "wmj", "alias", "gfggcvkyylizrzbj"))); model = BinaryData.fromObject(model).toObject(ManagedVirtualNetworkResourceInner.class); - Assertions.assertEquals("wpusxjbaqehg", model.id()); + Assertions.assertEquals("o", model.id()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkTests.java index 39ee62f232f43..53819bdee10ea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkTests.java @@ -12,15 +12,16 @@ public final class ManagedVirtualNetworkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - ManagedVirtualNetwork model = BinaryData.fromString( - "{\"vNetId\":\"ohzjqatucoigeb\",\"alias\":\"cnwfepbnwgfmxjg\",\"\":{\"qbctqha\":\"datajbgdlfgtdysnaquf\"}}") + ManagedVirtualNetwork model = BinaryData + .fromString( + "{\"vNetId\":\"rzvh\",\"alias\":\"wtrhtgv\",\"\":{\"rkolawjm\":\"datac\",\"kcdxfzzzw\":\"datasmwr\"}}") .toObject(ManagedVirtualNetwork.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ManagedVirtualNetwork model = new ManagedVirtualNetwork() - .withAdditionalProperties(mapOf("vNetId", "ohzjqatucoigeb", "alias", "cnwfepbnwgfmxjg")); + ManagedVirtualNetwork model + = new ManagedVirtualNetwork().withAdditionalProperties(mapOf("vNetId", "rzvh", "alias", "wtrhtgv")); model = BinaryData.fromObject(model).toObject(ManagedVirtualNetwork.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksCreateOrUpdateWithResponseMockTests.java index b07fab297cb5b..289ad2a128aaa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksCreateOrUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksCreateOrUpdateWithResponseMockTests.java @@ -6,58 +6,41 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetwork; import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.HashMap; import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ManagedVirtualNetworksCreateOrUpdateWithResponseMockTests { @Test public void testCreateOrUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"vNetId\":\"ruydiwsfva\",\"alias\":\"zabbfdhissd\",\"\":{\"fmkf\":\"dataecjmf\"}},\"name\":\"yfthsaf\",\"type\":\"mosiinmc\",\"etag\":\"dtuydynugkjzp\",\"id\":\"prfhpcy\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ManagedVirtualNetworkResource response - = manager.managedVirtualNetworks().define("vpmtpqdfpgsoje").withExistingFactory("aoeghsqplnyp", "ewc") - .withProperties( - new ManagedVirtualNetwork().withAdditionalProperties(mapOf("vNetId", "jnlvcgar", "alias", "qusg"))) - .withIfMatch("bfkelqzcpts").create(); - - Assertions.assertEquals("prfhpcy", response.id()); + = "{\"properties\":{\"vNetId\":\"wnmzxtaxfa\",\"alias\":\"d\",\"\":{\"ihfom\":\"datat\",\"tnsdwyspvps\":\"datak\",\"ynuhknut\":\"datafsirykzcmwze\",\"yccoyxumnd\":\"datank\"}},\"name\":\"znkuilwkfcll\",\"type\":\"mg\",\"etag\":\"msmhrugllxeq\",\"id\":\"boxrezkqqpyhclf\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + ManagedVirtualNetworkResource response = manager.managedVirtualNetworks() + .define("owloe") + .withExistingFactory("aehanntfs", "vdqwh") + .withProperties(new ManagedVirtualNetwork().withAdditionalProperties(mapOf("vNetId", "lfv", "alias", "xu"))) + .withIfMatch("kfecspunfqhhcjd") + .create(); + + Assertions.assertEquals("boxrezkqqpyhclf", response.id()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksGetWithResponseMockTests.java index e41a463b70845..571b54dd4207a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksGetWithResponseMockTests.java @@ -6,51 +6,34 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ManagedVirtualNetworksGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"vNetId\":\"wdaugdgv\",\"alias\":\"fso\",\"\":{\"ns\":\"datajkiajokjuehcryww\",\"dnwafjibaqlwtd\":\"datarcj\"}},\"name\":\"ejyxrrmnoxjwlhu\",\"type\":\"yzvvidokvzqeadk\",\"etag\":\"drcxvolt\",\"id\":\"zolnqkdtvtpwrmm\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"vNetId\":\"sdlpiazl\",\"alias\":\"wumzdtsm\",\"\":{\"zawrsfogzg\":\"datacjxrsf\",\"rwlvjavztk\":\"databnfloykshgxxwo\",\"yvsjmflxq\":\"datajm\",\"uxtyakybzqgbtzno\":\"datajlfdtnmomvsls\"}},\"name\":\"snfrpesvddgu\",\"type\":\"hmaarordga\",\"etag\":\"ubrhscnvknzct\",\"id\":\"hyghcbnnbr\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); ManagedVirtualNetworkResource response = manager.managedVirtualNetworks() - .getWithResponse("jbg", "ipc", "qyapn", "nbyhdtjynus", com.azure.core.util.Context.NONE).getValue(); + .getWithResponse("xowp", "lyehcanp", "defvx", "oitpybtrqilbnv", com.azure.core.util.Context.NONE) + .getValue(); - Assertions.assertEquals("zolnqkdtvtpwrmm", response.id()); + Assertions.assertEquals("hyghcbnnbr", response.id()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksListByFactoryMockTests.java index 22e68bcc6bfd9..c4a1d956d2f30 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksListByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksListByFactoryMockTests.java @@ -6,52 +6,34 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class ManagedVirtualNetworksListByFactoryMockTests { @Test public void testListByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"vNetId\":\"hnoiqtvfr\",\"alias\":\"inavbfkzv\",\"\":{\"rpdveyxcdzuld\":\"datad\"}},\"name\":\"xedm\",\"type\":\"z\",\"etag\":\"hvj\",\"id\":\"jxth\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"properties\":{\"vNetId\":\"vqgjhjie\",\"alias\":\"pv\",\"\":{\"xluungytympf\":\"datapfwhwwdml\",\"kxglnklygrqoqyz\":\"datazyasyhrnwmsznqh\"}},\"name\":\"qqslrvinic\",\"type\":\"bmstrdtcpkdp\",\"etag\":\"rpufxnfmqrlmgql\",\"id\":\"erqkisjjttf\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PagedIterable response = manager.managedVirtualNetworks() - .listByFactory("ptpreputusd", "wnkzwyry", com.azure.core.util.Context.NONE); + .listByFactory("rmynyrtijgn", "jodaonhwlqxhahzs", com.azure.core.util.Context.NONE); - Assertions.assertEquals("jxth", response.iterator().next().id()); + Assertions.assertEquals("erqkisjjttf", response.iterator().next().id()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingTests.java index 9884020974f63..d81b6b4374dcc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingTests.java @@ -17,20 +17,21 @@ public final class MapperAttributeMappingTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MapperAttributeMapping model = BinaryData.fromString( - "{\"name\":\"czwciidjsllfryvd\",\"type\":\"Derived\",\"functionName\":\"dqacfrgnawbabgf\",\"expression\":\"t\",\"attributeReference\":{\"name\":\"fczlfsyqkfrbzgow\",\"entity\":\"qmje\",\"entityConnectionReference\":{\"connectionName\":\"xnyqgxhlusr\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"jceagb\",\"entity\":\"vl\",\"entityConnectionReference\":{\"connectionName\":\"ywzash\",\"type\":\"linkedservicetype\"}}]}") + "{\"name\":\"crunfhi\",\"type\":\"Derived\",\"functionName\":\"fbcpaqktkrumzu\",\"expression\":\"kyzbfvxovqkxiux\",\"attributeReference\":{\"name\":\"vqrnhyhlwcjsqgg\",\"entity\":\"ffbxrqrkij\",\"entityConnectionReference\":{\"connectionName\":\"qlsdxeqztvxwmw\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"na\",\"entity\":\"amecle\",\"entityConnectionReference\":{\"connectionName\":\"ulndhzyoeo\",\"type\":\"linkedservicetype\"}},{\"name\":\"llhsvidmyt\",\"entity\":\"nglxpn\",\"entityConnectionReference\":{\"connectionName\":\"oanfbcswqagyw\",\"type\":\"linkedservicetype\"}}]}") .toObject(MapperAttributeMapping.class); - Assertions.assertEquals("czwciidjsllfryvd", model.name()); + Assertions.assertEquals("crunfhi", model.name()); Assertions.assertEquals(MappingType.DERIVED, model.type()); - Assertions.assertEquals("dqacfrgnawbabgf", model.functionName()); - Assertions.assertEquals("t", model.expression()); - Assertions.assertEquals("fczlfsyqkfrbzgow", model.attributeReference().name()); - Assertions.assertEquals("qmje", model.attributeReference().entity()); - Assertions.assertEquals("xnyqgxhlusr", model.attributeReference().entityConnectionReference().connectionName()); + Assertions.assertEquals("fbcpaqktkrumzu", model.functionName()); + Assertions.assertEquals("kyzbfvxovqkxiux", model.expression()); + Assertions.assertEquals("vqrnhyhlwcjsqgg", model.attributeReference().name()); + Assertions.assertEquals("ffbxrqrkij", model.attributeReference().entity()); + Assertions.assertEquals("qlsdxeqztvxwmw", + model.attributeReference().entityConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.attributeReference().entityConnectionReference().type()); - Assertions.assertEquals("jceagb", model.attributeReferences().get(0).name()); - Assertions.assertEquals("vl", model.attributeReferences().get(0).entity()); - Assertions.assertEquals("ywzash", + Assertions.assertEquals("na", model.attributeReferences().get(0).name()); + Assertions.assertEquals("amecle", model.attributeReferences().get(0).entity()); + Assertions.assertEquals("ulndhzyoeo", model.attributeReferences().get(0).entityConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.attributeReferences().get(0).entityConnectionReference().type()); @@ -38,27 +39,37 @@ public void testDeserialize() throws Exception { @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MapperAttributeMapping model = new MapperAttributeMapping().withName("czwciidjsllfryvd") - .withType(MappingType.DERIVED).withFunctionName("dqacfrgnawbabgf").withExpression("t") - .withAttributeReference(new MapperAttributeReference().withName("fczlfsyqkfrbzgow").withEntity("qmje") - .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("xnyqgxhlusr") + MapperAttributeMapping model = new MapperAttributeMapping().withName("crunfhi") + .withType(MappingType.DERIVED) + .withFunctionName("fbcpaqktkrumzu") + .withExpression("kyzbfvxovqkxiux") + .withAttributeReference(new MapperAttributeReference().withName("vqrnhyhlwcjsqgg") + .withEntity("ffbxrqrkij") + .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("qlsdxeqztvxwmw") .withType(ConnectionType.LINKEDSERVICETYPE))) - .withAttributeReferences(Arrays.asList(new MapperAttributeReference().withName("jceagb").withEntity("vl") - .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("ywzash") - .withType(ConnectionType.LINKEDSERVICETYPE)))); + .withAttributeReferences(Arrays.asList( + new MapperAttributeReference().withName("na") + .withEntity("amecle") + .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("ulndhzyoeo") + .withType(ConnectionType.LINKEDSERVICETYPE)), + new MapperAttributeReference().withName("llhsvidmyt") + .withEntity("nglxpn") + .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("oanfbcswqagyw") + .withType(ConnectionType.LINKEDSERVICETYPE)))); model = BinaryData.fromObject(model).toObject(MapperAttributeMapping.class); - Assertions.assertEquals("czwciidjsllfryvd", model.name()); + Assertions.assertEquals("crunfhi", model.name()); Assertions.assertEquals(MappingType.DERIVED, model.type()); - Assertions.assertEquals("dqacfrgnawbabgf", model.functionName()); - Assertions.assertEquals("t", model.expression()); - Assertions.assertEquals("fczlfsyqkfrbzgow", model.attributeReference().name()); - Assertions.assertEquals("qmje", model.attributeReference().entity()); - Assertions.assertEquals("xnyqgxhlusr", model.attributeReference().entityConnectionReference().connectionName()); + Assertions.assertEquals("fbcpaqktkrumzu", model.functionName()); + Assertions.assertEquals("kyzbfvxovqkxiux", model.expression()); + Assertions.assertEquals("vqrnhyhlwcjsqgg", model.attributeReference().name()); + Assertions.assertEquals("ffbxrqrkij", model.attributeReference().entity()); + Assertions.assertEquals("qlsdxeqztvxwmw", + model.attributeReference().entityConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.attributeReference().entityConnectionReference().type()); - Assertions.assertEquals("jceagb", model.attributeReferences().get(0).name()); - Assertions.assertEquals("vl", model.attributeReferences().get(0).entity()); - Assertions.assertEquals("ywzash", + Assertions.assertEquals("na", model.attributeReferences().get(0).name()); + Assertions.assertEquals("amecle", model.attributeReferences().get(0).entity()); + Assertions.assertEquals("ulndhzyoeo", model.attributeReferences().get(0).entityConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.attributeReferences().get(0).entityConnectionReference().type()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingsTests.java index aab4742573e48..45ab2e324cca6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingsTests.java @@ -18,21 +18,21 @@ public final class MapperAttributeMappingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MapperAttributeMappings model = BinaryData.fromString( - "{\"attributeMappings\":[{\"name\":\"q\",\"type\":\"Direct\",\"functionName\":\"nrgmqsorhce\",\"expression\":\"gnlykm\",\"attributeReference\":{\"name\":\"wzvmdoksqd\",\"entity\":\"wlwxlboncqbazqic\",\"entityConnectionReference\":{\"connectionName\":\"ygtvxbyjanepub\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"vgxiaodetvo\",\"entity\":\"xdxuwsaifmc\",\"entityConnectionReference\":{\"connectionName\":\"s\",\"type\":\"linkedservicetype\"}},{\"name\":\"hg\",\"entity\":\"kb\",\"entityConnectionReference\":{\"connectionName\":\"jolgjyyxpvels\",\"type\":\"linkedservicetype\"}}]},{\"name\":\"zevxoqein\",\"type\":\"Derived\",\"functionName\":\"ljgl\",\"expression\":\"blqwaafrqulhmzy\",\"attributeReference\":{\"name\":\"dvaf\",\"entity\":\"qpjiyrqjcr\",\"entityConnectionReference\":{\"connectionName\":\"wmzwdfkbnrzorpdl\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"jfgxxsaetg\",\"entity\":\"gvpyigdaqqilzdc\",\"entityConnectionReference\":{\"connectionName\":\"joedx\",\"type\":\"linkedservicetype\"}},{\"name\":\"aifpaurwwgil\",\"entity\":\"qqa\",\"entityConnectionReference\":{\"connectionName\":\"kxwxdcvjwcyziake\",\"type\":\"linkedservicetype\"}},{\"name\":\"h\",\"entity\":\"tuicds\",\"entityConnectionReference\":{\"connectionName\":\"fmmp\",\"type\":\"linkedservicetype\"}}]},{\"name\":\"wvywr\",\"type\":\"Aggregate\",\"functionName\":\"ydg\",\"expression\":\"x\",\"attributeReference\":{\"name\":\"kiqaondjr\",\"entity\":\"lamgglvlmfejdo\",\"entityConnectionReference\":{\"connectionName\":\"kgltyg\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"ka\",\"entity\":\"jsxtlgflwfgziiuc\",\"entityConnectionReference\":{\"connectionName\":\"ceatlijjjrtvamca\",\"type\":\"linkedservicetype\"}},{\"name\":\"xk\",\"entity\":\"cxetyvkunmignoh\",\"entityConnectionReference\":{\"connectionName\":\"gqogjwpindedva\",\"type\":\"linkedservicetype\"}},{\"name\":\"hmedeilbjywfcfxz\",\"entity\":\"zzihvwy\",\"entityConnectionReference\":{\"connectionName\":\"u\",\"type\":\"linkedservicetype\"}}]}]}") + "{\"attributeMappings\":[{\"name\":\"oskkfmk\",\"type\":\"Aggregate\",\"functionName\":\"xyxgbkkqv\",\"expression\":\"teoedl\",\"attributeReference\":{\"name\":\"skkzpxvjnzdpvo\",\"entity\":\"jhpcn\",\"entityConnectionReference\":{\"connectionName\":\"zf\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"e\",\"entity\":\"zilmhivzkww\",\"entityConnectionReference\":{\"connectionName\":\"knrzdajlskzpt\",\"type\":\"linkedservicetype\"}},{\"name\":\"weucyrth\",\"entity\":\"lehmcgcjeinueho\",\"entityConnectionReference\":{\"connectionName\":\"vfejvqnttmbqda\",\"type\":\"linkedservicetype\"}},{\"name\":\"vfokpysthhzag\",\"entity\":\"wyy\",\"entityConnectionReference\":{\"connectionName\":\"g\",\"type\":\"linkedservicetype\"}},{\"name\":\"ejgvkvebaqszllrz\",\"entity\":\"mmdqgmihzpimc\",\"entityConnectionReference\":{\"connectionName\":\"nxtminklog\",\"type\":\"linkedservicetype\"}}]},{\"name\":\"zarhzvqnsqktcmbj\",\"type\":\"Direct\",\"functionName\":\"slpkybtg\",\"expression\":\"kzpgaj\",\"attributeReference\":{\"name\":\"cemqbmfuvqarwz\",\"entity\":\"qreblui\",\"entityConnectionReference\":{\"connectionName\":\"wxs\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"vxekraokqkbud\",\"entity\":\"waokbavlytta\",\"entityConnectionReference\":{\"connectionName\":\"wfrke\",\"type\":\"linkedservicetype\"}}]},{\"name\":\"p\",\"type\":\"Direct\",\"functionName\":\"igatolekscbctna\",\"expression\":\"imwbzxpdcldpk\",\"attributeReference\":{\"name\":\"snlaimoux\",\"entity\":\"sqmu\",\"entityConnectionReference\":{\"connectionName\":\"coibicziuswswjrk\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"tqqvy\",\"entity\":\"cyrfwbivqvog\",\"entityConnectionReference\":{\"connectionName\":\"zwvbhlimbyqecro\",\"type\":\"linkedservicetype\"}},{\"name\":\"kcdrdaasaxxobsm\",\"entity\":\"wiyjvzuko\",\"entityConnectionReference\":{\"connectionName\":\"awn\",\"type\":\"linkedservicetype\"}},{\"name\":\"n\",\"entity\":\"ywsxvjabjqqaxu\",\"entityConnectionReference\":{\"connectionName\":\"mc\",\"type\":\"linkedservicetype\"}},{\"name\":\"doabhj\",\"entity\":\"xqweu\",\"entityConnectionReference\":{\"connectionName\":\"pvksmit\",\"type\":\"linkedservicetype\"}}]},{\"name\":\"tltcl\",\"type\":\"Aggregate\",\"functionName\":\"qgfhyrf\",\"expression\":\"kkld\",\"attributeReference\":{\"name\":\"wfcmfcnr\",\"entity\":\"jqmatxjtielnzq\",\"entityConnectionReference\":{\"connectionName\":\"gfb\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"agynoi\",\"entity\":\"n\",\"entityConnectionReference\":{\"connectionName\":\"lin\",\"type\":\"linkedservicetype\"}},{\"name\":\"xzxaqzibmqimiym\",\"entity\":\"uqguhfupetasvvoq\",\"entityConnectionReference\":{\"connectionName\":\"kfla\",\"type\":\"linkedservicetype\"}},{\"name\":\"xsyaowuzowpuoh\",\"entity\":\"cprgukxrztiochl\",\"entityConnectionReference\":{\"connectionName\":\"xmqrudjizc\",\"type\":\"linkedservicetype\"}}]}]}") .toObject(MapperAttributeMappings.class); - Assertions.assertEquals("q", model.attributeMappings().get(0).name()); - Assertions.assertEquals(MappingType.DIRECT, model.attributeMappings().get(0).type()); - Assertions.assertEquals("nrgmqsorhce", model.attributeMappings().get(0).functionName()); - Assertions.assertEquals("gnlykm", model.attributeMappings().get(0).expression()); - Assertions.assertEquals("wzvmdoksqd", model.attributeMappings().get(0).attributeReference().name()); - Assertions.assertEquals("wlwxlboncqbazqic", model.attributeMappings().get(0).attributeReference().entity()); - Assertions.assertEquals("ygtvxbyjanepub", + Assertions.assertEquals("oskkfmk", model.attributeMappings().get(0).name()); + Assertions.assertEquals(MappingType.AGGREGATE, model.attributeMappings().get(0).type()); + Assertions.assertEquals("xyxgbkkqv", model.attributeMappings().get(0).functionName()); + Assertions.assertEquals("teoedl", model.attributeMappings().get(0).expression()); + Assertions.assertEquals("skkzpxvjnzdpvo", model.attributeMappings().get(0).attributeReference().name()); + Assertions.assertEquals("jhpcn", model.attributeMappings().get(0).attributeReference().entity()); + Assertions.assertEquals("zf", model.attributeMappings().get(0).attributeReference().entityConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.attributeMappings().get(0).attributeReference().entityConnectionReference().type()); - Assertions.assertEquals("vgxiaodetvo", model.attributeMappings().get(0).attributeReferences().get(0).name()); - Assertions.assertEquals("xdxuwsaifmc", model.attributeMappings().get(0).attributeReferences().get(0).entity()); - Assertions.assertEquals("s", + Assertions.assertEquals("e", model.attributeMappings().get(0).attributeReferences().get(0).name()); + Assertions.assertEquals("zilmhivzkww", model.attributeMappings().get(0).attributeReferences().get(0).entity()); + Assertions.assertEquals("knrzdajlskzpt", model.attributeMappings().get(0).attributeReferences().get(0).entityConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.attributeMappings().get(0).attributeReferences().get(0).entityConnectionReference().type()); @@ -44,72 +44,125 @@ public void testSerialize() throws Exception { = new MapperAttributeMappings() .withAttributeMappings( Arrays.asList( - new MapperAttributeMapping().withName("q").withType(MappingType.DIRECT) - .withFunctionName("nrgmqsorhce").withExpression("gnlykm") - .withAttributeReference( - new MapperAttributeReference().withName("wzvmdoksqd").withEntity("wlwxlboncqbazqic") - .withEntityConnectionReference(new MapperConnectionReference() - .withConnectionName("ygtvxbyjanepub") - .withType(ConnectionType.LINKEDSERVICETYPE))) - .withAttributeReferences(Arrays.asList( - new MapperAttributeReference().withName("vgxiaodetvo").withEntity("xdxuwsaifmc") - .withEntityConnectionReference(new MapperConnectionReference() - .withConnectionName("s").withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperAttributeReference().withName("hg").withEntity("kb") + new MapperAttributeMapping().withName("oskkfmk") + .withType(MappingType.AGGREGATE) + .withFunctionName("xyxgbkkqv") + .withExpression("teoedl") + .withAttributeReference(new MapperAttributeReference().withName("skkzpxvjnzdpvo") + .withEntity("jhpcn") + .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("zf") + .withType(ConnectionType.LINKEDSERVICETYPE))) + .withAttributeReferences(Arrays + .asList( + new MapperAttributeReference() + .withName("e") + .withEntity("zilmhivzkww") + .withEntityConnectionReference( + new MapperConnectionReference().withConnectionName("knrzdajlskzpt") + .withType(ConnectionType.LINKEDSERVICETYPE)), + new MapperAttributeReference() + .withName("weucyrth") + .withEntity("lehmcgcjeinueho") + .withEntityConnectionReference( + new MapperConnectionReference().withConnectionName("vfejvqnttmbqda") + .withType(ConnectionType.LINKEDSERVICETYPE)), + new MapperAttributeReference() + .withName("vfokpysthhzag") + .withEntity("wyy") + .withEntityConnectionReference( + new MapperConnectionReference().withConnectionName("g") + .withType(ConnectionType.LINKEDSERVICETYPE)), + new MapperAttributeReference().withName("ejgvkvebaqszllrz") + .withEntity("mmdqgmihzpimc") + .withEntityConnectionReference( + new MapperConnectionReference().withConnectionName("nxtminklog") + .withType(ConnectionType.LINKEDSERVICETYPE)))), + new MapperAttributeMapping().withName("zarhzvqnsqktcmbj") + .withType(MappingType.DIRECT) + .withFunctionName("slpkybtg") + .withExpression("kzpgaj") + .withAttributeReference(new MapperAttributeReference() + .withName("cemqbmfuvqarwz") + .withEntity("qreblui") + .withEntityConnectionReference(new MapperConnectionReference() + .withConnectionName("wxs") + .withType(ConnectionType.LINKEDSERVICETYPE))) + .withAttributeReferences( + Arrays.asList(new MapperAttributeReference().withName("vxekraokqkbud") + .withEntity("waokbavlytta") .withEntityConnectionReference(new MapperConnectionReference() - .withConnectionName("jolgjyyxpvels") + .withConnectionName("wfrke") .withType(ConnectionType.LINKEDSERVICETYPE)))), - new MapperAttributeMapping().withName("zevxoqein").withType(MappingType.DERIVED) - .withFunctionName("ljgl").withExpression("blqwaafrqulhmzy") - .withAttributeReference(new MapperAttributeReference() - .withName("dvaf").withEntity("qpjiyrqjcr") + new MapperAttributeMapping().withName("p") + .withType(MappingType.DIRECT) + .withFunctionName("igatolekscbctna") + .withExpression("imwbzxpdcldpk") + .withAttributeReference(new MapperAttributeReference().withName("snlaimoux") + .withEntity("sqmu") .withEntityConnectionReference( - new MapperConnectionReference().withConnectionName("wmzwdfkbnrzorpdl") + new MapperConnectionReference().withConnectionName("coibicziuswswjrk") .withType(ConnectionType.LINKEDSERVICETYPE))) - .withAttributeReferences(Arrays.asList(new MapperAttributeReference() - .withName("jfgxxsaetg").withEntity("gvpyigdaqqilzdc") - .withEntityConnectionReference(new MapperConnectionReference() - .withConnectionName("joedx").withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperAttributeReference().withName("aifpaurwwgil").withEntity("qqa") + .withAttributeReferences(Arrays.asList( + new MapperAttributeReference().withName("tqqvy") + .withEntity("cyrfwbivqvog") .withEntityConnectionReference( - new MapperConnectionReference().withConnectionName("kxwxdcvjwcyziake") + new MapperConnectionReference().withConnectionName("zwvbhlimbyqecro") .withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperAttributeReference().withName("h").withEntity("tuicds") - .withEntityConnectionReference(new MapperConnectionReference() - .withConnectionName("fmmp").withType(ConnectionType.LINKEDSERVICETYPE)))), - new MapperAttributeMapping().withName("wvywr").withType(MappingType.AGGREGATE) - .withFunctionName("ydg").withExpression("x") - .withAttributeReference( new MapperAttributeReference() - .withName("kiqaondjr").withEntity("lamgglvlmfejdo") - .withEntityConnectionReference(new MapperConnectionReference() - .withConnectionName("kgltyg").withType(ConnectionType.LINKEDSERVICETYPE))) - .withAttributeReferences(Arrays.asList( - new MapperAttributeReference().withName("ka").withEntity("jsxtlgflwfgziiuc") - .withEntityConnectionReference(new MapperConnectionReference() - .withConnectionName("ceatlijjjrtvamca") - .withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperAttributeReference().withName("xk").withEntity("cxetyvkunmignoh") + .withName("kcdrdaasaxxobsm") + .withEntity("wiyjvzuko") + .withEntityConnectionReference( + new MapperConnectionReference().withConnectionName("awn") + .withType(ConnectionType.LINKEDSERVICETYPE)), + new MapperAttributeReference() + .withName("n") + .withEntity("ywsxvjabjqqaxu") .withEntityConnectionReference( - new MapperConnectionReference().withConnectionName("gqogjwpindedva") + new MapperConnectionReference().withConnectionName("mc") .withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperAttributeReference().withName("hmedeilbjywfcfxz").withEntity("zzihvwy") + new MapperAttributeReference().withName("doabhj") + .withEntity("xqweu") .withEntityConnectionReference(new MapperConnectionReference() - .withConnectionName("u").withType(ConnectionType.LINKEDSERVICETYPE)))))); + .withConnectionName("pvksmit") + .withType(ConnectionType.LINKEDSERVICETYPE)))), + new MapperAttributeMapping().withName("tltcl") + .withType(MappingType.AGGREGATE) + .withFunctionName("qgfhyrf") + .withExpression("kkld") + .withAttributeReference(new MapperAttributeReference().withName("wfcmfcnr") + .withEntity("jqmatxjtielnzq") + .withEntityConnectionReference(new MapperConnectionReference() + .withConnectionName("gfb") + .withType(ConnectionType.LINKEDSERVICETYPE))) + .withAttributeReferences(Arrays.asList(new MapperAttributeReference().withName("agynoi") + .withEntity("n") + .withEntityConnectionReference(new MapperConnectionReference() + .withConnectionName("lin") + .withType(ConnectionType.LINKEDSERVICETYPE)), + new MapperAttributeReference().withName("xzxaqzibmqimiym") + .withEntity("uqguhfupetasvvoq") + .withEntityConnectionReference( + new MapperConnectionReference().withConnectionName("kfla") + .withType(ConnectionType.LINKEDSERVICETYPE)), + new MapperAttributeReference().withName("xsyaowuzowpuoh") + .withEntity("cprgukxrztiochl") + .withEntityConnectionReference( + new MapperConnectionReference().withConnectionName("xmqrudjizc") + .withType(ConnectionType.LINKEDSERVICETYPE)))))); model = BinaryData.fromObject(model).toObject(MapperAttributeMappings.class); - Assertions.assertEquals("q", model.attributeMappings().get(0).name()); - Assertions.assertEquals(MappingType.DIRECT, model.attributeMappings().get(0).type()); - Assertions.assertEquals("nrgmqsorhce", model.attributeMappings().get(0).functionName()); - Assertions.assertEquals("gnlykm", model.attributeMappings().get(0).expression()); - Assertions.assertEquals("wzvmdoksqd", model.attributeMappings().get(0).attributeReference().name()); - Assertions.assertEquals("wlwxlboncqbazqic", model.attributeMappings().get(0).attributeReference().entity()); - Assertions.assertEquals("ygtvxbyjanepub", + Assertions.assertEquals("oskkfmk", model.attributeMappings().get(0).name()); + Assertions.assertEquals(MappingType.AGGREGATE, model.attributeMappings().get(0).type()); + Assertions.assertEquals("xyxgbkkqv", model.attributeMappings().get(0).functionName()); + Assertions.assertEquals("teoedl", model.attributeMappings().get(0).expression()); + Assertions.assertEquals("skkzpxvjnzdpvo", model.attributeMappings().get(0).attributeReference().name()); + Assertions.assertEquals("jhpcn", model.attributeMappings().get(0).attributeReference().entity()); + Assertions.assertEquals("zf", model.attributeMappings().get(0).attributeReference().entityConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.attributeMappings().get(0).attributeReference().entityConnectionReference().type()); - Assertions.assertEquals("vgxiaodetvo", model.attributeMappings().get(0).attributeReferences().get(0).name()); - Assertions.assertEquals("xdxuwsaifmc", model.attributeMappings().get(0).attributeReferences().get(0).entity()); - Assertions.assertEquals("s", + Assertions.assertEquals("e", model.attributeMappings().get(0).attributeReferences().get(0).name()); + Assertions.assertEquals("zilmhivzkww", model.attributeMappings().get(0).attributeReferences().get(0).entity()); + Assertions.assertEquals("knrzdajlskzpt", model.attributeMappings().get(0).attributeReferences().get(0).entityConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.attributeMappings().get(0).attributeReferences().get(0).entityConnectionReference().type()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeReferenceTests.java index 684e4d1087c23..e41933802b9e2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeReferenceTests.java @@ -14,23 +14,24 @@ public final class MapperAttributeReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MapperAttributeReference model = BinaryData.fromString( - "{\"name\":\"oyjfqipu\",\"entity\":\"znclkfkeebgv\",\"entityConnectionReference\":{\"connectionName\":\"m\",\"type\":\"linkedservicetype\"}}") + "{\"name\":\"gvjrktpgaeuky\",\"entity\":\"ohpmwhqn\",\"entityConnectionReference\":{\"connectionName\":\"klhsidsjtdlpb\",\"type\":\"linkedservicetype\"}}") .toObject(MapperAttributeReference.class); - Assertions.assertEquals("oyjfqipu", model.name()); - Assertions.assertEquals("znclkfkeebgv", model.entity()); - Assertions.assertEquals("m", model.entityConnectionReference().connectionName()); + Assertions.assertEquals("gvjrktpgaeuky", model.name()); + Assertions.assertEquals("ohpmwhqn", model.entity()); + Assertions.assertEquals("klhsidsjtdlpb", model.entityConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.entityConnectionReference().type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MapperAttributeReference model = new MapperAttributeReference().withName("oyjfqipu").withEntity("znclkfkeebgv") - .withEntityConnectionReference( - new MapperConnectionReference().withConnectionName("m").withType(ConnectionType.LINKEDSERVICETYPE)); + MapperAttributeReference model = new MapperAttributeReference().withName("gvjrktpgaeuky") + .withEntity("ohpmwhqn") + .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("klhsidsjtdlpb") + .withType(ConnectionType.LINKEDSERVICETYPE)); model = BinaryData.fromObject(model).toObject(MapperAttributeReference.class); - Assertions.assertEquals("oyjfqipu", model.name()); - Assertions.assertEquals("znclkfkeebgv", model.entity()); - Assertions.assertEquals("m", model.entityConnectionReference().connectionName()); + Assertions.assertEquals("gvjrktpgaeuky", model.name()); + Assertions.assertEquals("ohpmwhqn", model.entity()); + Assertions.assertEquals("klhsidsjtdlpb", model.entityConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.entityConnectionReference().type()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionReferenceTests.java index a3f2e53f871c7..36a29fea3bd81 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionReferenceTests.java @@ -13,18 +13,18 @@ public final class MapperConnectionReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MapperConnectionReference model - = BinaryData.fromString("{\"connectionName\":\"k\",\"type\":\"linkedservicetype\"}") + = BinaryData.fromString("{\"connectionName\":\"zodubtlm\",\"type\":\"linkedservicetype\"}") .toObject(MapperConnectionReference.class); - Assertions.assertEquals("k", model.connectionName()); + Assertions.assertEquals("zodubtlm", model.connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { MapperConnectionReference model - = new MapperConnectionReference().withConnectionName("k").withType(ConnectionType.LINKEDSERVICETYPE); + = new MapperConnectionReference().withConnectionName("zodubtlm").withType(ConnectionType.LINKEDSERVICETYPE); model = BinaryData.fromObject(model).toObject(MapperConnectionReference.class); - Assertions.assertEquals("k", model.connectionName()); + Assertions.assertEquals("zodubtlm", model.connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.type()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionTests.java index 12932f5fe9450..e0791af20f74e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionTests.java @@ -18,32 +18,33 @@ public final class MapperConnectionTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MapperConnection model = BinaryData.fromString( - "{\"linkedService\":{\"referenceName\":\"sdtcjbctvivuzqym\",\"parameters\":{\"zvbrzcdbanfzndsc\":\"datawogtgitsq\"}},\"linkedServiceType\":\"xeatkd\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{\"name\":\"yibqbnao\",\"value\":\"datajrmkuhmaxljalf\"},{\"name\":\"cjmobcanc\",\"value\":\"dataxxqcwgaxf\"},{\"name\":\"aknokzwjjzrl\",\"value\":\"dataxldzyyfytpqsix\"},{\"name\":\"m\",\"value\":\"datajivyqlkjuv\"}]}") + "{\"linkedService\":{\"referenceName\":\"dfmmpzhzzwvy\",\"parameters\":{\"aondjrkclamgg\":\"datayngydgrpxncaki\",\"oqeykglt\":\"datavlmfej\",\"tlgflwfgz\":\"datagxhqfgqkayejs\",\"lijjjrtvam\":\"dataiucijjcea\"}},\"linkedServiceType\":\"szknxkvccxe\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{\"name\":\"ignohi\",\"value\":\"datagqogjwpindedva\"},{\"name\":\"xbhmed\",\"value\":\"datalbjywfcfxzirzzih\"}]}") .toObject(MapperConnection.class); - Assertions.assertEquals("sdtcjbctvivuzqym", model.linkedService().referenceName()); - Assertions.assertEquals("xeatkd", model.linkedServiceType()); + Assertions.assertEquals("dfmmpzhzzwvy", model.linkedService().referenceName()); + Assertions.assertEquals("szknxkvccxe", model.linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.type()); Assertions.assertEquals(false, model.isInlineDataset()); - Assertions.assertEquals("yibqbnao", model.commonDslConnectorProperties().get(0).name()); + Assertions.assertEquals("ignohi", model.commonDslConnectorProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { MapperConnection model = new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("sdtcjbctvivuzqym") - .withParameters(mapOf("zvbrzcdbanfzndsc", "datawogtgitsq"))) - .withLinkedServiceType("xeatkd").withType(ConnectionType.LINKEDSERVICETYPE).withIsInlineDataset(false) + .withLinkedService(new LinkedServiceReference().withReferenceName("dfmmpzhzzwvy") + .withParameters(mapOf("aondjrkclamgg", "datayngydgrpxncaki", "oqeykglt", "datavlmfej", "tlgflwfgz", + "datagxhqfgqkayejs", "lijjjrtvam", "dataiucijjcea"))) + .withLinkedServiceType("szknxkvccxe") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(false) .withCommonDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("yibqbnao").withValue("datajrmkuhmaxljalf"), - new MapperDslConnectorProperties().withName("cjmobcanc").withValue("dataxxqcwgaxf"), - new MapperDslConnectorProperties().withName("aknokzwjjzrl").withValue("dataxldzyyfytpqsix"), - new MapperDslConnectorProperties().withName("m").withValue("datajivyqlkjuv"))); + Arrays.asList(new MapperDslConnectorProperties().withName("ignohi").withValue("datagqogjwpindedva"), + new MapperDslConnectorProperties().withName("xbhmed").withValue("datalbjywfcfxzirzzih"))); model = BinaryData.fromObject(model).toObject(MapperConnection.class); - Assertions.assertEquals("sdtcjbctvivuzqym", model.linkedService().referenceName()); - Assertions.assertEquals("xeatkd", model.linkedServiceType()); + Assertions.assertEquals("dfmmpzhzzwvy", model.linkedService().referenceName()); + Assertions.assertEquals("szknxkvccxe", model.linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.type()); Assertions.assertEquals(false, model.isInlineDataset()); - Assertions.assertEquals("yibqbnao", model.commonDslConnectorProperties().get(0).name()); + Assertions.assertEquals("ignohi", model.commonDslConnectorProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperDslConnectorPropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperDslConnectorPropertiesTests.java index 63dfc22c6442d..ac35b73191980 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperDslConnectorPropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperDslConnectorPropertiesTests.java @@ -11,15 +11,16 @@ public final class MapperDslConnectorPropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - MapperDslConnectorProperties model = BinaryData.fromString("{\"name\":\"lkwq\",\"value\":\"datatv\"}") + MapperDslConnectorProperties model = BinaryData.fromString("{\"name\":\"iqch\",\"value\":\"datatuicds\"}") .toObject(MapperDslConnectorProperties.class); - Assertions.assertEquals("lkwq", model.name()); + Assertions.assertEquals("iqch", model.name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MapperDslConnectorProperties model = new MapperDslConnectorProperties().withName("lkwq").withValue("datatv"); + MapperDslConnectorProperties model + = new MapperDslConnectorProperties().withName("iqch").withValue("datatuicds"); model = BinaryData.fromObject(model).toObject(MapperDslConnectorProperties.class); - Assertions.assertEquals("lkwq", model.name()); + Assertions.assertEquals("iqch", model.name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyRecurrenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyRecurrenceTests.java index 6cdfd9d6ba8ce..36e75b73d5405 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyRecurrenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyRecurrenceTests.java @@ -12,18 +12,18 @@ public final class MapperPolicyRecurrenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - MapperPolicyRecurrence model = BinaryData.fromString("{\"frequency\":\"Hour\",\"interval\":1799145797}") + MapperPolicyRecurrence model = BinaryData.fromString("{\"frequency\":\"Second\",\"interval\":606826553}") .toObject(MapperPolicyRecurrence.class); - Assertions.assertEquals(FrequencyType.HOUR, model.frequency()); - Assertions.assertEquals(1799145797, model.interval()); + Assertions.assertEquals(FrequencyType.SECOND, model.frequency()); + Assertions.assertEquals(606826553, model.interval()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { MapperPolicyRecurrence model - = new MapperPolicyRecurrence().withFrequency(FrequencyType.HOUR).withInterval(1799145797); + = new MapperPolicyRecurrence().withFrequency(FrequencyType.SECOND).withInterval(606826553); model = BinaryData.fromObject(model).toObject(MapperPolicyRecurrence.class); - Assertions.assertEquals(FrequencyType.HOUR, model.frequency()); - Assertions.assertEquals(1799145797, model.interval()); + Assertions.assertEquals(FrequencyType.SECOND, model.frequency()); + Assertions.assertEquals(606826553, model.interval()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyTests.java index 400b34b1b8f3a..d6d09a74aa856 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyTests.java @@ -14,20 +14,20 @@ public final class MapperPolicyTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MapperPolicy model = BinaryData - .fromString("{\"mode\":\"ujlyegq\",\"recurrence\":{\"frequency\":\"Second\",\"interval\":1397511481}}") + .fromString("{\"mode\":\"gazlsvbz\",\"recurrence\":{\"frequency\":\"Minute\",\"interval\":63190145}}") .toObject(MapperPolicy.class); - Assertions.assertEquals("ujlyegq", model.mode()); - Assertions.assertEquals(FrequencyType.SECOND, model.recurrence().frequency()); - Assertions.assertEquals(1397511481, model.recurrence().interval()); + Assertions.assertEquals("gazlsvbz", model.mode()); + Assertions.assertEquals(FrequencyType.MINUTE, model.recurrence().frequency()); + Assertions.assertEquals(63190145, model.recurrence().interval()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MapperPolicy model = new MapperPolicy().withMode("ujlyegq") - .withRecurrence(new MapperPolicyRecurrence().withFrequency(FrequencyType.SECOND).withInterval(1397511481)); + MapperPolicy model = new MapperPolicy().withMode("gazlsvbz") + .withRecurrence(new MapperPolicyRecurrence().withFrequency(FrequencyType.MINUTE).withInterval(63190145)); model = BinaryData.fromObject(model).toObject(MapperPolicy.class); - Assertions.assertEquals("ujlyegq", model.mode()); - Assertions.assertEquals(FrequencyType.SECOND, model.recurrence().frequency()); - Assertions.assertEquals(1397511481, model.recurrence().interval()); + Assertions.assertEquals("gazlsvbz", model.mode()); + Assertions.assertEquals(FrequencyType.MINUTE, model.recurrence().frequency()); + Assertions.assertEquals(63190145, model.recurrence().interval()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperSourceConnectionsInfoTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperSourceConnectionsInfoTests.java index efd69e2dadfdc..c0c377e66ec02 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperSourceConnectionsInfoTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperSourceConnectionsInfoTests.java @@ -21,51 +21,75 @@ public final class MapperSourceConnectionsInfoTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MapperSourceConnectionsInfo model = BinaryData.fromString( - "{\"sourceEntities\":[{\"name\":\"epmywbormcqm\",\"properties\":{\"schema\":[{\"name\":\"qpkzfbojxjmcsmy\",\"dataType\":\"ixvcpwnkwywzwo\"},{\"name\":\"lickduoi\",\"dataType\":\"amt\"},{\"name\":\"sknxrwzawnvsbcf\",\"dataType\":\"agxnvhycvdimw\"},{\"name\":\"regzgyufutrwpwer\",\"dataType\":\"kzkdhmeott\"}],\"dslConnectorProperties\":[{\"name\":\"osxw\",\"value\":\"datanhjtf\"},{\"name\":\"n\",\"value\":\"datamiljpnwynud\"}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"zsauzp\",\"parameters\":{\"ezxlskihm\":\"dataeehuxiqhzlray\"}},\"linkedServiceType\":\"fdsajred\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{\"name\":\"uwg\",\"value\":\"datavuafpwzyifr\"},{\"name\":\"wltxeqipxgzdyims\",\"value\":\"datayorpr\"}]}}") + "{\"sourceEntities\":[{\"name\":\"dkadppyibn\",\"properties\":{\"schema\":[{\"name\":\"ywrxw\",\"dataType\":\"dtluvvadswzsn\"},{\"name\":\"emlowuowhl\",\"dataType\":\"nwyrmouv\"},{\"name\":\"gmokzkltrfowt\",\"dataType\":\"rfmvlihcvjdrqcrj\"}],\"dslConnectorProperties\":[{\"name\":\"tuk\",\"value\":\"datadxlwyojbfqz\"}]}},{\"name\":\"fnjyix\",\"properties\":{\"schema\":[{\"name\":\"tqxmbj\",\"dataType\":\"umzznvalqjrhuz\"},{\"name\":\"xonjtpusl\",\"dataType\":\"wpvtiotzbpdbol\"},{\"name\":\"ryf\",\"dataType\":\"uasigr\"}],\"dslConnectorProperties\":[{\"name\":\"c\",\"value\":\"dataquygdjboqgrmtq\"},{\"name\":\"qevadrmmw\",\"value\":\"dataawvcmjzkxiid\"},{\"name\":\"czskoswoqiqazu\",\"value\":\"datamxzkr\"},{\"name\":\"oiisbamnppcce\",\"value\":\"dataztdsbeza\"}]}},{\"name\":\"f\",\"properties\":{\"schema\":[{\"name\":\"izhyhnep\",\"dataType\":\"etiarxqiubxdukec\"},{\"name\":\"dazvdhctm\",\"dataType\":\"os\"},{\"name\":\"dblnsntrp\",\"dataType\":\"qkio\"},{\"name\":\"btfmhklbnldpvcbh\",\"dataType\":\"zyqu\"}],\"dslConnectorProperties\":[{\"name\":\"xut\",\"value\":\"databrruyuuatv\"},{\"name\":\"bjwcolbmx\",\"value\":\"datanwtpcpahprz\"}]}},{\"name\":\"xhmtfhocnxzc\",\"properties\":{\"schema\":[{\"name\":\"xnoqrxtdisn\",\"dataType\":\"vhdl\"},{\"name\":\"did\",\"dataType\":\"epfwwt\"}],\"dslConnectorProperties\":[{\"name\":\"esxxhmwcdbckyoi\",\"value\":\"datakxhnegknj\"},{\"name\":\"bhtmeplvuk\",\"value\":\"databrlbpgs\"},{\"name\":\"agnchjhgemuowaky\",\"value\":\"datalhjym\"}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"qt\",\"parameters\":{\"jomevtfycnlb\":\"datarclsso\",\"kk\":\"datagjco\",\"ytssikiz\":\"dataji\"}},\"linkedServiceType\":\"ufqbvntnr\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{\"name\":\"cekxgnly\",\"value\":\"dataxcpwzvmdok\"},{\"name\":\"dt\",\"value\":\"datalwxlboncqbazq\"},{\"name\":\"qchygtv\",\"value\":\"datayjanepubdp\"}]}}") .toObject(MapperSourceConnectionsInfo.class); - Assertions.assertEquals("epmywbormcqm", model.sourceEntities().get(0).name()); - Assertions.assertEquals("qpkzfbojxjmcsmy", model.sourceEntities().get(0).schema().get(0).name()); - Assertions.assertEquals("ixvcpwnkwywzwo", model.sourceEntities().get(0).schema().get(0).dataType()); - Assertions.assertEquals("osxw", model.sourceEntities().get(0).dslConnectorProperties().get(0).name()); - Assertions.assertEquals("zsauzp", model.connection().linkedService().referenceName()); - Assertions.assertEquals("fdsajred", model.connection().linkedServiceType()); + Assertions.assertEquals("dkadppyibn", model.sourceEntities().get(0).name()); + Assertions.assertEquals("ywrxw", model.sourceEntities().get(0).schema().get(0).name()); + Assertions.assertEquals("dtluvvadswzsn", model.sourceEntities().get(0).schema().get(0).dataType()); + Assertions.assertEquals("tuk", model.sourceEntities().get(0).dslConnectorProperties().get(0).name()); + Assertions.assertEquals("qt", model.connection().linkedService().referenceName()); + Assertions.assertEquals("ufqbvntnr", model.connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.connection().type()); - Assertions.assertEquals(true, model.connection().isInlineDataset()); - Assertions.assertEquals("uwg", model.connection().commonDslConnectorProperties().get(0).name()); + Assertions.assertEquals(false, model.connection().isInlineDataset()); + Assertions.assertEquals("cekxgnly", model.connection().commonDslConnectorProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MapperSourceConnectionsInfo model - = new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable().withName("epmywbormcqm") - .withSchema(Arrays.asList( - new MapperTableSchema().withName("qpkzfbojxjmcsmy").withDataType("ixvcpwnkwywzwo"), - new MapperTableSchema().withName("lickduoi").withDataType("amt"), - new MapperTableSchema().withName("sknxrwzawnvsbcf").withDataType("agxnvhycvdimw"), - new MapperTableSchema().withName("regzgyufutrwpwer").withDataType("kzkdhmeott"))) + MapperSourceConnectionsInfo model = new MapperSourceConnectionsInfo() + .withSourceEntities(Arrays.asList( + new MapperTable().withName("dkadppyibn") + .withSchema(Arrays.asList(new MapperTableSchema().withName("ywrxw").withDataType("dtluvvadswzsn"), + new MapperTableSchema().withName("emlowuowhl").withDataType("nwyrmouv"), + new MapperTableSchema().withName("gmokzkltrfowt").withDataType("rfmvlihcvjdrqcrj"))) .withDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("osxw").withValue("datanhjtf"), - new MapperDslConnectorProperties().withName("n").withValue("datamiljpnwynud"))))) - .withConnection( - new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("zsauzp") - .withParameters(mapOf("ezxlskihm", "dataeehuxiqhzlray"))) - .withLinkedServiceType("fdsajred").withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays.asList( - new MapperDslConnectorProperties().withName("uwg").withValue("datavuafpwzyifr"), - new MapperDslConnectorProperties().withName("wltxeqipxgzdyims").withValue("datayorpr")))); + Arrays.asList(new MapperDslConnectorProperties().withName("tuk").withValue("datadxlwyojbfqz"))), + new MapperTable().withName("fnjyix") + .withSchema(Arrays.asList(new MapperTableSchema().withName("tqxmbj").withDataType("umzznvalqjrhuz"), + new MapperTableSchema().withName("xonjtpusl").withDataType("wpvtiotzbpdbol"), + new MapperTableSchema().withName("ryf").withDataType("uasigr"))) + .withDslConnectorProperties(Arrays.asList( + new MapperDslConnectorProperties().withName("c").withValue("dataquygdjboqgrmtq"), + new MapperDslConnectorProperties().withName("qevadrmmw").withValue("dataawvcmjzkxiid"), + new MapperDslConnectorProperties().withName("czskoswoqiqazu").withValue("datamxzkr"), + new MapperDslConnectorProperties().withName("oiisbamnppcce").withValue("dataztdsbeza"))), + new MapperTable().withName("f") + .withSchema( + Arrays.asList(new MapperTableSchema().withName("izhyhnep").withDataType("etiarxqiubxdukec"), + new MapperTableSchema().withName("dazvdhctm").withDataType("os"), + new MapperTableSchema().withName("dblnsntrp").withDataType("qkio"), + new MapperTableSchema().withName("btfmhklbnldpvcbh").withDataType("zyqu"))) + .withDslConnectorProperties( + Arrays.asList(new MapperDslConnectorProperties().withName("xut").withValue("databrruyuuatv"), + new MapperDslConnectorProperties().withName("bjwcolbmx").withValue("datanwtpcpahprz"))), + new MapperTable().withName("xhmtfhocnxzc") + .withSchema(Arrays.asList(new MapperTableSchema().withName("xnoqrxtdisn").withDataType("vhdl"), + new MapperTableSchema().withName("did").withDataType("epfwwt"))) + .withDslConnectorProperties(Arrays.asList( + new MapperDslConnectorProperties().withName("esxxhmwcdbckyoi").withValue("datakxhnegknj"), + new MapperDslConnectorProperties().withName("bhtmeplvuk").withValue("databrlbpgs"), + new MapperDslConnectorProperties().withName("agnchjhgemuowaky").withValue("datalhjym"))))) + .withConnection( + new MapperConnection() + .withLinkedService(new LinkedServiceReference().withReferenceName("qt") + .withParameters(mapOf("jomevtfycnlb", "datarclsso", "kk", "datagjco", "ytssikiz", "dataji"))) + .withLinkedServiceType("ufqbvntnr") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(false) + .withCommonDslConnectorProperties(Arrays.asList( + new MapperDslConnectorProperties().withName("cekxgnly").withValue("dataxcpwzvmdok"), + new MapperDslConnectorProperties().withName("dt").withValue("datalwxlboncqbazq"), + new MapperDslConnectorProperties().withName("qchygtv").withValue("datayjanepubdp")))); model = BinaryData.fromObject(model).toObject(MapperSourceConnectionsInfo.class); - Assertions.assertEquals("epmywbormcqm", model.sourceEntities().get(0).name()); - Assertions.assertEquals("qpkzfbojxjmcsmy", model.sourceEntities().get(0).schema().get(0).name()); - Assertions.assertEquals("ixvcpwnkwywzwo", model.sourceEntities().get(0).schema().get(0).dataType()); - Assertions.assertEquals("osxw", model.sourceEntities().get(0).dslConnectorProperties().get(0).name()); - Assertions.assertEquals("zsauzp", model.connection().linkedService().referenceName()); - Assertions.assertEquals("fdsajred", model.connection().linkedServiceType()); + Assertions.assertEquals("dkadppyibn", model.sourceEntities().get(0).name()); + Assertions.assertEquals("ywrxw", model.sourceEntities().get(0).schema().get(0).name()); + Assertions.assertEquals("dtluvvadswzsn", model.sourceEntities().get(0).schema().get(0).dataType()); + Assertions.assertEquals("tuk", model.sourceEntities().get(0).dslConnectorProperties().get(0).name()); + Assertions.assertEquals("qt", model.connection().linkedService().referenceName()); + Assertions.assertEquals("ufqbvntnr", model.connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.connection().type()); - Assertions.assertEquals(true, model.connection().isInlineDataset()); - Assertions.assertEquals("uwg", model.connection().commonDslConnectorProperties().get(0).name()); + Assertions.assertEquals(false, model.connection().isInlineDataset()); + Assertions.assertEquals("cekxgnly", model.connection().commonDslConnectorProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTablePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTablePropertiesTests.java index 98d17c76829c2..3d844a2489457 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTablePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTablePropertiesTests.java @@ -15,25 +15,26 @@ public final class MapperTablePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MapperTableProperties model = BinaryData.fromString( - "{\"schema\":[{\"name\":\"toi\",\"dataType\":\"gygvfltgvdiho\"},{\"name\":\"krxwet\",\"dataType\":\"drcyrucpcun\"}],\"dslConnectorProperties\":[{\"name\":\"qumoeno\",\"value\":\"dataaienhqhsknd\"},{\"name\":\"lqkaadlknwf\",\"value\":\"datanniyopetxi\"}]}") + "{\"schema\":[{\"name\":\"txwaljglzo\",\"dataType\":\"qwaafrqulhm\"},{\"name\":\"qb\",\"dataType\":\"vafjrqpjiyrqj\"}],\"dslConnectorProperties\":[{\"name\":\"xwmzwdfkbnrz\",\"value\":\"datapdltbq\"},{\"name\":\"qjf\",\"value\":\"dataxsaetg\"},{\"name\":\"gvpyigdaqqilzdc\",\"value\":\"datawjoe\"},{\"name\":\"ngucaifpaurww\",\"value\":\"datal\"}]}") .toObject(MapperTableProperties.class); - Assertions.assertEquals("toi", model.schema().get(0).name()); - Assertions.assertEquals("gygvfltgvdiho", model.schema().get(0).dataType()); - Assertions.assertEquals("qumoeno", model.dslConnectorProperties().get(0).name()); + Assertions.assertEquals("txwaljglzo", model.schema().get(0).name()); + Assertions.assertEquals("qwaafrqulhm", model.schema().get(0).dataType()); + Assertions.assertEquals("xwmzwdfkbnrz", model.dslConnectorProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MapperTableProperties model - = new MapperTableProperties() - .withSchema(Arrays.asList(new MapperTableSchema().withName("toi").withDataType("gygvfltgvdiho"), - new MapperTableSchema().withName("krxwet").withDataType("drcyrucpcun"))) - .withDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("qumoeno").withValue("dataaienhqhsknd"), - new MapperDslConnectorProperties().withName("lqkaadlknwf").withValue("datanniyopetxi"))); + MapperTableProperties model = new MapperTableProperties() + .withSchema(Arrays.asList(new MapperTableSchema().withName("txwaljglzo").withDataType("qwaafrqulhm"), + new MapperTableSchema().withName("qb").withDataType("vafjrqpjiyrqj"))) + .withDslConnectorProperties( + Arrays.asList(new MapperDslConnectorProperties().withName("xwmzwdfkbnrz").withValue("datapdltbq"), + new MapperDslConnectorProperties().withName("qjf").withValue("dataxsaetg"), + new MapperDslConnectorProperties().withName("gvpyigdaqqilzdc").withValue("datawjoe"), + new MapperDslConnectorProperties().withName("ngucaifpaurww").withValue("datal"))); model = BinaryData.fromObject(model).toObject(MapperTableProperties.class); - Assertions.assertEquals("toi", model.schema().get(0).name()); - Assertions.assertEquals("gygvfltgvdiho", model.schema().get(0).dataType()); - Assertions.assertEquals("qumoeno", model.dslConnectorProperties().get(0).name()); + Assertions.assertEquals("txwaljglzo", model.schema().get(0).name()); + Assertions.assertEquals("qwaafrqulhm", model.schema().get(0).dataType()); + Assertions.assertEquals("xwmzwdfkbnrz", model.dslConnectorProperties().get(0).name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableSchemaTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableSchemaTests.java index 1946ca2d58aec..6d8d52c76128e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableSchemaTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableSchemaTests.java @@ -11,17 +11,17 @@ public final class MapperTableSchemaTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - MapperTableSchema model - = BinaryData.fromString("{\"name\":\"nrlyxnuc\",\"dataType\":\"p\"}").toObject(MapperTableSchema.class); - Assertions.assertEquals("nrlyxnuc", model.name()); - Assertions.assertEquals("p", model.dataType()); + MapperTableSchema model = BinaryData.fromString("{\"name\":\"qqa\",\"dataType\":\"mkxwxdcvjwcyziak\"}") + .toObject(MapperTableSchema.class); + Assertions.assertEquals("qqa", model.name()); + Assertions.assertEquals("mkxwxdcvjwcyziak", model.dataType()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MapperTableSchema model = new MapperTableSchema().withName("nrlyxnuc").withDataType("p"); + MapperTableSchema model = new MapperTableSchema().withName("qqa").withDataType("mkxwxdcvjwcyziak"); model = BinaryData.fromObject(model).toObject(MapperTableSchema.class); - Assertions.assertEquals("nrlyxnuc", model.name()); - Assertions.assertEquals("p", model.dataType()); + Assertions.assertEquals("qqa", model.name()); + Assertions.assertEquals("mkxwxdcvjwcyziak", model.dataType()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableTests.java index eb90202842e39..e6f84aedbd09d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableTests.java @@ -15,27 +15,24 @@ public final class MapperTableTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MapperTable model = BinaryData.fromString( - "{\"name\":\"kj\",\"properties\":{\"schema\":[{\"name\":\"abnsmj\",\"dataType\":\"ynq\"}],\"dslConnectorProperties\":[{\"name\":\"qs\",\"value\":\"datavwjtqpkevmyltjc\"},{\"name\":\"pxklurccl\",\"value\":\"dataxa\"},{\"name\":\"noytzposewxigp\",\"value\":\"datakqma\"},{\"name\":\"xvpif\",\"value\":\"dataaifyzyzeyuubeids\"}]}}") + "{\"name\":\"yqvgxia\",\"properties\":{\"schema\":[{\"name\":\"ohkxdxuw\",\"dataType\":\"ifmcwnosbzlehg\"}],\"dslConnectorProperties\":[{\"name\":\"cknjolgj\",\"value\":\"dataxpvelszerqzevxo\"}]}}") .toObject(MapperTable.class); - Assertions.assertEquals("kj", model.name()); - Assertions.assertEquals("abnsmj", model.schema().get(0).name()); - Assertions.assertEquals("ynq", model.schema().get(0).dataType()); - Assertions.assertEquals("qs", model.dslConnectorProperties().get(0).name()); + Assertions.assertEquals("yqvgxia", model.name()); + Assertions.assertEquals("ohkxdxuw", model.schema().get(0).name()); + Assertions.assertEquals("ifmcwnosbzlehg", model.schema().get(0).dataType()); + Assertions.assertEquals("cknjolgj", model.dslConnectorProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MapperTable model = new MapperTable().withName("kj") - .withSchema(Arrays.asList(new MapperTableSchema().withName("abnsmj").withDataType("ynq"))) - .withDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("qs").withValue("datavwjtqpkevmyltjc"), - new MapperDslConnectorProperties().withName("pxklurccl").withValue("dataxa"), - new MapperDslConnectorProperties().withName("noytzposewxigp").withValue("datakqma"), - new MapperDslConnectorProperties().withName("xvpif").withValue("dataaifyzyzeyuubeids"))); + MapperTable model = new MapperTable().withName("yqvgxia") + .withSchema(Arrays.asList(new MapperTableSchema().withName("ohkxdxuw").withDataType("ifmcwnosbzlehg"))) + .withDslConnectorProperties(Arrays + .asList(new MapperDslConnectorProperties().withName("cknjolgj").withValue("dataxpvelszerqzevxo"))); model = BinaryData.fromObject(model).toObject(MapperTable.class); - Assertions.assertEquals("kj", model.name()); - Assertions.assertEquals("abnsmj", model.schema().get(0).name()); - Assertions.assertEquals("ynq", model.schema().get(0).dataType()); - Assertions.assertEquals("qs", model.dslConnectorProperties().get(0).name()); + Assertions.assertEquals("yqvgxia", model.name()); + Assertions.assertEquals("ohkxdxuw", model.schema().get(0).name()); + Assertions.assertEquals("ifmcwnosbzlehg", model.schema().get(0).dataType()); + Assertions.assertEquals("cknjolgj", model.dslConnectorProperties().get(0).name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTargetConnectionsInfoTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTargetConnectionsInfoTests.java index 5859305a11d0e..10c071b5ef9dc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTargetConnectionsInfoTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTargetConnectionsInfoTests.java @@ -27,30 +27,30 @@ public final class MapperTargetConnectionsInfoTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MapperTargetConnectionsInfo model = BinaryData.fromString( - "{\"targetEntities\":[{\"name\":\"slzoyov\",\"properties\":{\"schema\":[{\"name\":\"qvybefg\",\"dataType\":\"x\"},{\"name\":\"kcvtl\",\"dataType\":\"seskvcuar\"},{\"name\":\"hunlpirykycnd\",\"dataType\":\"qi\"}],\"dslConnectorProperties\":[{\"name\":\"uykbbmn\",\"value\":\"datagltbxoeeo\"},{\"name\":\"lnf\",\"value\":\"datay\"},{\"name\":\"vqdbpbhfck\",\"value\":\"dataezcrcssbzhddubb\"},{\"name\":\"fblhkalehp\",\"value\":\"dataawugiqjti\"}]}},{\"name\":\"qgdm\",\"properties\":{\"schema\":[{\"name\":\"teajohiyg\",\"dataType\":\"n\"},{\"name\":\"n\",\"dataType\":\"czykmktpvw\"},{\"name\":\"csehchkhufm\",\"dataType\":\"umqy\"}],\"dslConnectorProperties\":[{\"name\":\"zulo\",\"value\":\"dataaeuzanh\"},{\"name\":\"nhsenwphpzfng\",\"value\":\"dataclid\"},{\"name\":\"u\",\"value\":\"datajj\"},{\"name\":\"wbeqrkuor\",\"value\":\"datassruqnmdvhazcvj\"}]}},{\"name\":\"iqswbqer\",\"properties\":{\"schema\":[{\"name\":\"txtd\",\"dataType\":\"kvlbpktgdstyoua\"},{\"name\":\"ewres\",\"dataType\":\"owegmmutey\"}],\"dslConnectorProperties\":[{\"name\":\"uqi\",\"value\":\"datajiitnspxlzdesygr\"},{\"name\":\"waiufanra\",\"value\":\"datafueqfrojs\"},{\"name\":\"grhydk\",\"value\":\"dataywezskiecafyg\"},{\"name\":\"xieqv\",\"value\":\"datamakli\"}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"ah\",\"parameters\":{\"tblxpkkwjdjodqhy\":\"dataalybxawoijpo\",\"mehllizhceu\":\"dataincnr\",\"ibngqladyw\":\"dataoqodkadpp\",\"ds\":\"dataxwhydtluvv\"}},\"linkedServiceType\":\"snuyemlowuowhl\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{\"name\":\"ouvblgmo\",\"value\":\"datakltrfow\"},{\"name\":\"vrfmvlihcvjd\",\"value\":\"datacrjidhftukv\"}]},\"dataMapperMappings\":[{\"targetEntityName\":\"wyojbfqzdkfnjyi\",\"sourceEntityName\":\"afr\",\"sourceConnectionReference\":{\"connectionName\":\"xmbjroum\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{\"name\":\"jrhuzgf\",\"type\":\"Aggregate\",\"functionName\":\"tpusllywp\",\"expression\":\"iotzbpdbollgryfq\",\"attributeReference\":{},\"attributeReferences\":[{},{},{},{}]}]},\"sourceDenormalizeInfo\":\"datagrowsoc\"},{\"targetEntityName\":\"quygdjboqgrmtq\",\"sourceEntityName\":\"qevadrmmw\",\"sourceConnectionReference\":{\"connectionName\":\"wvcmj\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{\"name\":\"scz\",\"type\":\"Aggregate\",\"functionName\":\"woqiqazugamxzkrr\",\"expression\":\"iisb\",\"attributeReference\":{},\"attributeReferences\":[{}]},{\"name\":\"ccek\",\"type\":\"Derived\",\"functionName\":\"sbezaxyfukzxuizh\",\"expression\":\"nepk\",\"attributeReference\":{},\"attributeReferences\":[{},{},{},{}]},{\"name\":\"rx\",\"type\":\"Direct\",\"functionName\":\"xdukecpxd\",\"expression\":\"v\",\"attributeReference\":{},\"attributeReferences\":[{},{}]}]},\"sourceDenormalizeInfo\":\"datamkoszudbl\"}],\"relationships\":[\"datatrpc\",\"dataqkio\",\"datakb\"]}") + "{\"targetEntities\":[{\"name\":\"usuv\",\"properties\":{\"schema\":[{\"name\":\"wci\",\"dataType\":\"jsllfryvd\"},{\"name\":\"x\",\"dataType\":\"q\"},{\"name\":\"frgnawbabgfbktyj\",\"dataType\":\"czlfsy\"},{\"name\":\"frbzgowo\",\"dataType\":\"mj\"}],\"dslConnectorProperties\":[{\"name\":\"xnyqgxhlusr\",\"value\":\"datai\"}]}},{\"name\":\"mjceagbjqvls\",\"properties\":{\"schema\":[{\"name\":\"ashxgonoyjfqi\",\"dataType\":\"b\"}],\"dslConnectorProperties\":[{\"name\":\"lkfk\",\"value\":\"databgvopemt\"},{\"name\":\"qujlyegqa\",\"value\":\"dataigflqqbtnyjp\"},{\"name\":\"xdb\",\"value\":\"dataabmvmsxbaevwjc\"}]}},{\"name\":\"ot\",\"properties\":{\"schema\":[{\"name\":\"vajmailfem\",\"dataType\":\"zakzwjiqullqxbd\"},{\"name\":\"rscmqerndbrnye\",\"dataType\":\"ltfnnxrkad\"},{\"name\":\"ynnfmuiii\",\"dataType\":\"pfoh\"},{\"name\":\"fkx\",\"dataType\":\"cbrwjiutgnjizbe\"}],\"dslConnectorProperties\":[{\"name\":\"ymrvzbju\",\"value\":\"datasrziuct\"},{\"name\":\"gbdsuifrev\",\"value\":\"dataapezkiswqjmdghs\"},{\"name\":\"arybjufptbjc\",\"value\":\"datanciuiyqvldaswvpp\"},{\"name\":\"qqzlgcndhz\",\"value\":\"datarfc\"}]}},{\"name\":\"r\",\"properties\":{\"schema\":[{\"name\":\"nxu\",\"dataType\":\"kpphefsbzx\"},{\"name\":\"zxomeikjclwz\",\"dataType\":\"nmwpf\"}],\"dslConnectorProperties\":[{\"name\":\"aazyqbxyxoyfpuqq\",\"value\":\"dataezxlhdjzqdca\"},{\"name\":\"vpsozjiihjri\",\"value\":\"datamrzoep\"},{\"name\":\"wdvwnjkgvfnmx\",\"value\":\"datarsqftib\"}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"buyvp\",\"parameters\":{\"nrexkxbhxv\":\"dataqjpnqnoowsbed\",\"nfa\":\"datacnulgmnhjevdy\"}},\"linkedServiceType\":\"vkskmqozzk\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{\"name\":\"iizjixlqfhefkwa\",\"value\":\"dataolro\"}]},\"dataMapperMappings\":[{\"targetEntityName\":\"mgnlqxsjxtel\",\"sourceEntityName\":\"hvuqbo\",\"sourceConnectionReference\":{\"connectionName\":\"lzqocark\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{\"name\":\"dtsnxawqytllhdyz\",\"type\":\"Derived\",\"functionName\":\"zexnxakcky\",\"expression\":\"mxgaabjkdtfohfao\",\"attributeReference\":{},\"attributeReferences\":[{},{}]},{\"name\":\"wrsiwdyjqurykc\",\"type\":\"Direct\",\"functionName\":\"eek\",\"expression\":\"uehogdd\",\"attributeReference\":{},\"attributeReferences\":[{},{}]}]},\"sourceDenormalizeInfo\":\"datagydlqidywm\"},{\"targetEntityName\":\"ptyrilkfbnrqqxv\",\"sourceEntityName\":\"pbnfnqtx\",\"sourceConnectionReference\":{\"connectionName\":\"m\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{\"name\":\"ddepldwqjns\",\"type\":\"Direct\",\"functionName\":\"leexahvmywh\",\"expression\":\"rcarycsjj\",\"attributeReference\":{},\"attributeReferences\":[{}]},{\"name\":\"qajuvehzptdmk\",\"type\":\"Derived\",\"functionName\":\"mpfu\",\"expression\":\"be\",\"attributeReference\":{},\"attributeReferences\":[{},{},{},{}]},{\"name\":\"mfbfununmpzkrvfy\",\"type\":\"Aggregate\",\"functionName\":\"schlzvfictnkj\",\"expression\":\"gcwn\",\"attributeReference\":{},\"attributeReferences\":[{}]},{\"name\":\"fyrtogmhmjp\",\"type\":\"Direct\",\"functionName\":\"fp\",\"expression\":\"wtyg\",\"attributeReference\":{},\"attributeReferences\":[{},{},{},{}]}]},\"sourceDenormalizeInfo\":\"datasehar\"},{\"targetEntityName\":\"fv\",\"sourceEntityName\":\"rxtmbpjptnvw\",\"sourceConnectionReference\":{\"connectionName\":\"sidqpxlbtpakft\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{\"name\":\"kyutrymdwmfjhpyc\",\"type\":\"Derived\",\"functionName\":\"vdwkqpldr\",\"expression\":\"fgnaavuagnteta\",\"attributeReference\":{},\"attributeReferences\":[{}]},{\"name\":\"dc\",\"type\":\"Aggregate\",\"functionName\":\"pfe\",\"expression\":\"i\",\"attributeReference\":{},\"attributeReferences\":[{},{}]},{\"name\":\"uyld\",\"type\":\"Aggregate\",\"functionName\":\"ybkcgsuthhllnmwy\",\"expression\":\"fxexlfciatxtjrr\",\"attributeReference\":{},\"attributeReferences\":[{},{},{}]},{\"name\":\"kjhhxdlajf\",\"type\":\"Aggregate\",\"functionName\":\"scv\",\"expression\":\"xlhuavkrmuk\",\"attributeReference\":{},\"attributeReferences\":[{},{},{}]}]},\"sourceDenormalizeInfo\":\"dataxett\"}],\"relationships\":[\"dataojfkq\",\"datadnqtoqxjhqx\",\"datas\"]}") .toObject(MapperTargetConnectionsInfo.class); - Assertions.assertEquals("slzoyov", model.targetEntities().get(0).name()); - Assertions.assertEquals("qvybefg", model.targetEntities().get(0).schema().get(0).name()); - Assertions.assertEquals("x", model.targetEntities().get(0).schema().get(0).dataType()); - Assertions.assertEquals("uykbbmn", model.targetEntities().get(0).dslConnectorProperties().get(0).name()); - Assertions.assertEquals("ah", model.connection().linkedService().referenceName()); - Assertions.assertEquals("snuyemlowuowhl", model.connection().linkedServiceType()); + Assertions.assertEquals("usuv", model.targetEntities().get(0).name()); + Assertions.assertEquals("wci", model.targetEntities().get(0).schema().get(0).name()); + Assertions.assertEquals("jsllfryvd", model.targetEntities().get(0).schema().get(0).dataType()); + Assertions.assertEquals("xnyqgxhlusr", model.targetEntities().get(0).dslConnectorProperties().get(0).name()); + Assertions.assertEquals("buyvp", model.connection().linkedService().referenceName()); + Assertions.assertEquals("vkskmqozzk", model.connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.connection().type()); - Assertions.assertEquals(false, model.connection().isInlineDataset()); - Assertions.assertEquals("ouvblgmo", model.connection().commonDslConnectorProperties().get(0).name()); - Assertions.assertEquals("wyojbfqzdkfnjyi", model.dataMapperMappings().get(0).targetEntityName()); - Assertions.assertEquals("afr", model.dataMapperMappings().get(0).sourceEntityName()); - Assertions.assertEquals("xmbjroum", + Assertions.assertEquals(true, model.connection().isInlineDataset()); + Assertions.assertEquals("iizjixlqfhefkwa", model.connection().commonDslConnectorProperties().get(0).name()); + Assertions.assertEquals("mgnlqxsjxtel", model.dataMapperMappings().get(0).targetEntityName()); + Assertions.assertEquals("hvuqbo", model.dataMapperMappings().get(0).sourceEntityName()); + Assertions.assertEquals("lzqocark", model.dataMapperMappings().get(0).sourceConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.dataMapperMappings().get(0).sourceConnectionReference().type()); - Assertions.assertEquals("jrhuzgf", + Assertions.assertEquals("dtsnxawqytllhdyz", model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).name()); - Assertions.assertEquals(MappingType.AGGREGATE, + Assertions.assertEquals(MappingType.DERIVED, model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).type()); - Assertions.assertEquals("tpusllywp", + Assertions.assertEquals("zexnxakcky", model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).functionName()); - Assertions.assertEquals("iotzbpdbollgryfq", + Assertions.assertEquals("mxgaabjkdtfohfao", model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).expression()); } @@ -58,100 +58,166 @@ public void testDeserialize() throws Exception { public void testSerialize() throws Exception { MapperTargetConnectionsInfo model = new MapperTargetConnectionsInfo() .withTargetEntities(Arrays.asList( - new MapperTable().withName("slzoyov") - .withSchema(Arrays.asList(new MapperTableSchema().withName("qvybefg").withDataType("x"), - new MapperTableSchema().withName("kcvtl").withDataType("seskvcuar"), - new MapperTableSchema().withName("hunlpirykycnd").withDataType("qi"))) + new MapperTable().withName("usuv") + .withSchema(Arrays.asList(new MapperTableSchema().withName("wci").withDataType("jsllfryvd"), + new MapperTableSchema().withName("x").withDataType("q"), + new MapperTableSchema().withName("frgnawbabgfbktyj").withDataType("czlfsy"), + new MapperTableSchema().withName("frbzgowo").withDataType("mj"))) .withDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("uykbbmn").withValue("datagltbxoeeo"), - new MapperDslConnectorProperties().withName("lnf").withValue("datay"), - new MapperDslConnectorProperties().withName("vqdbpbhfck").withValue("dataezcrcssbzhddubb"), - new MapperDslConnectorProperties().withName("fblhkalehp").withValue("dataawugiqjti"))), - new MapperTable().withName("qgdm") - .withSchema(Arrays.asList(new MapperTableSchema().withName("teajohiyg").withDataType("n"), - new MapperTableSchema().withName("n").withDataType("czykmktpvw"), - new MapperTableSchema().withName("csehchkhufm").withDataType("umqy"))) - .withDslConnectorProperties(Arrays.asList( - new MapperDslConnectorProperties().withName("zulo").withValue("dataaeuzanh"), - new MapperDslConnectorProperties().withName("nhsenwphpzfng").withValue("dataclid"), - new MapperDslConnectorProperties().withName("u").withValue("datajj"), - new MapperDslConnectorProperties().withName("wbeqrkuor").withValue("datassruqnmdvhazcvj"))), - new MapperTable().withName("iqswbqer") - .withSchema(Arrays.asList(new MapperTableSchema().withName("txtd").withDataType("kvlbpktgdstyoua"), - new MapperTableSchema().withName("ewres").withDataType("owegmmutey"))) + Arrays.asList(new MapperDslConnectorProperties().withName("xnyqgxhlusr").withValue("datai"))), + new MapperTable().withName("mjceagbjqvls") + .withSchema(Arrays.asList(new MapperTableSchema().withName("ashxgonoyjfqi").withDataType("b"))) + .withDslConnectorProperties( + Arrays.asList(new MapperDslConnectorProperties().withName("lkfk").withValue("databgvopemt"), + new MapperDslConnectorProperties().withName("qujlyegqa").withValue("dataigflqqbtnyjp"), + new MapperDslConnectorProperties().withName("xdb").withValue("dataabmvmsxbaevwjc"))), + new MapperTable().withName("ot") + .withSchema( + Arrays.asList(new MapperTableSchema().withName("vajmailfem").withDataType("zakzwjiqullqxbd"), + new MapperTableSchema().withName("rscmqerndbrnye").withDataType("ltfnnxrkad"), + new MapperTableSchema().withName("ynnfmuiii").withDataType("pfoh"), + new MapperTableSchema().withName("fkx").withDataType("cbrwjiutgnjizbe"))) + .withDslConnectorProperties( + Arrays.asList(new MapperDslConnectorProperties().withName("ymrvzbju").withValue("datasrziuct"), + new MapperDslConnectorProperties().withName("gbdsuifrev").withValue("dataapezkiswqjmdghs"), + new MapperDslConnectorProperties().withName("arybjufptbjc") + .withValue("datanciuiyqvldaswvpp"), + new MapperDslConnectorProperties().withName("qqzlgcndhz").withValue("datarfc"))), + new MapperTable().withName("r") + .withSchema(Arrays.asList(new MapperTableSchema().withName("nxu").withDataType("kpphefsbzx"), + new MapperTableSchema().withName("zxomeikjclwz").withDataType("nmwpf"))) .withDslConnectorProperties(Arrays.asList( - new MapperDslConnectorProperties().withName("uqi").withValue("datajiitnspxlzdesygr"), - new MapperDslConnectorProperties().withName("waiufanra").withValue("datafueqfrojs"), - new MapperDslConnectorProperties().withName("grhydk").withValue("dataywezskiecafyg"), - new MapperDslConnectorProperties().withName("xieqv").withValue("datamakli"))))) + new MapperDslConnectorProperties().withName("aazyqbxyxoyfpuqq").withValue("dataezxlhdjzqdca"), + new MapperDslConnectorProperties().withName("vpsozjiihjri").withValue("datamrzoep"), + new MapperDslConnectorProperties().withName("wdvwnjkgvfnmx").withValue("datarsqftib"))))) .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("ah") - .withParameters(mapOf("tblxpkkwjdjodqhy", "dataalybxawoijpo", "mehllizhceu", "dataincnr", - "ibngqladyw", "dataoqodkadpp", "ds", "dataxwhydtluvv"))) - .withLinkedServiceType("snuyemlowuowhl").withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(false).withCommonDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("ouvblgmo").withValue("datakltrfow"), - new MapperDslConnectorProperties().withName("vrfmvlihcvjd").withValue("datacrjidhftukv")))) + .withLinkedService(new LinkedServiceReference().withReferenceName("buyvp") + .withParameters(mapOf("nrexkxbhxv", "dataqjpnqnoowsbed", "nfa", "datacnulgmnhjevdy"))) + .withLinkedServiceType("vkskmqozzk") + .withType(ConnectionType.LINKEDSERVICETYPE) + .withIsInlineDataset(true) + .withCommonDslConnectorProperties(Arrays + .asList(new MapperDslConnectorProperties().withName("iizjixlqfhefkwa").withValue("dataolro")))) .withDataMapperMappings( - Arrays - .asList( - new DataMapperMapping().withTargetEntityName("wyojbfqzdkfnjyi").withSourceEntityName("afr") - .withSourceConnectionReference(new MapperConnectionReference() - .withConnectionName("xmbjroum").withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo( - new MapperAttributeMappings().withAttributeMappings(Arrays.asList( - new MapperAttributeMapping().withName("jrhuzgf").withType(MappingType.AGGREGATE) - .withFunctionName("tpusllywp").withExpression("iotzbpdbollgryfq") + Arrays.asList( + new DataMapperMapping().withTargetEntityName("mgnlqxsjxtel") + .withSourceEntityName("hvuqbo") + .withSourceConnectionReference(new MapperConnectionReference().withConnectionName("lzqocark") + .withType(ConnectionType.LINKEDSERVICETYPE)) + .withAttributeMappingInfo( + new MapperAttributeMappings() + .withAttributeMappings(Arrays.asList( + new MapperAttributeMapping().withName("dtsnxawqytllhdyz") + .withType(MappingType.DERIVED) + .withFunctionName("zexnxakcky") + .withExpression("mxgaabjkdtfohfao") + .withAttributeReference(new MapperAttributeReference()) + .withAttributeReferences(Arrays.asList(new MapperAttributeReference(), + new MapperAttributeReference())), + new MapperAttributeMapping().withName("wrsiwdyjqurykc") + .withType(MappingType.DIRECT) + .withFunctionName("eek") + .withExpression("uehogdd") .withAttributeReference(new MapperAttributeReference()) .withAttributeReferences(Arrays.asList(new MapperAttributeReference(), - new MapperAttributeReference(), new MapperAttributeReference(), new MapperAttributeReference()))))) - .withSourceDenormalizeInfo("datagrowsoc"), - new DataMapperMapping().withTargetEntityName("quygdjboqgrmtq").withSourceEntityName("qevadrmmw") - .withSourceConnectionReference(new MapperConnectionReference() - .withConnectionName("wvcmj").withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo( - new MapperAttributeMappings().withAttributeMappings(Arrays.asList( - new MapperAttributeMapping().withName("scz").withType(MappingType.AGGREGATE) - .withFunctionName("woqiqazugamxzkrr").withExpression("iisb") + .withSourceDenormalizeInfo("datagydlqidywm"), + new DataMapperMapping().withTargetEntityName("ptyrilkfbnrqqxv") + .withSourceEntityName("pbnfnqtx") + .withSourceConnectionReference(new MapperConnectionReference().withConnectionName("m") + .withType(ConnectionType.LINKEDSERVICETYPE)) + .withAttributeMappingInfo( + new MapperAttributeMappings().withAttributeMappings( + Arrays.asList( + new MapperAttributeMapping().withName("ddepldwqjns") + .withType(MappingType.DIRECT) + .withFunctionName("leexahvmywh") + .withExpression("rcarycsjj") .withAttributeReference(new MapperAttributeReference()) .withAttributeReferences(Arrays.asList(new MapperAttributeReference())), - new MapperAttributeMapping().withName("ccek").withType(MappingType.DERIVED) - .withFunctionName("sbezaxyfukzxuizh").withExpression("nepk") + new MapperAttributeMapping().withName("qajuvehzptdmk") + .withType(MappingType.DERIVED) + .withFunctionName("mpfu") + .withExpression("be") .withAttributeReference(new MapperAttributeReference()) .withAttributeReferences(Arrays.asList(new MapperAttributeReference(), new MapperAttributeReference(), new MapperAttributeReference(), new MapperAttributeReference())), - new MapperAttributeMapping().withName("rx").withType(MappingType.DIRECT) - .withFunctionName("xdukecpxd").withExpression("v") + new MapperAttributeMapping().withName("mfbfununmpzkrvfy") + .withType(MappingType.AGGREGATE) + .withFunctionName("schlzvfictnkj") + .withExpression("gcwn") + .withAttributeReference(new MapperAttributeReference()) + .withAttributeReferences(Arrays.asList(new MapperAttributeReference())), + new MapperAttributeMapping().withName("fyrtogmhmjp") + .withType(MappingType.DIRECT) + .withFunctionName("fp") + .withExpression("wtyg") .withAttributeReference(new MapperAttributeReference()) .withAttributeReferences(Arrays.asList(new MapperAttributeReference(), + new MapperAttributeReference(), new MapperAttributeReference(), new MapperAttributeReference()))))) - .withSourceDenormalizeInfo("datamkoszudbl"))) - .withRelationships(Arrays.asList("datatrpc", "dataqkio", "datakb")); + .withSourceDenormalizeInfo("datasehar"), + new DataMapperMapping().withTargetEntityName("fv") + .withSourceEntityName("rxtmbpjptnvw") + .withSourceConnectionReference( + new MapperConnectionReference().withConnectionName("sidqpxlbtpakft") + .withType(ConnectionType.LINKEDSERVICETYPE)) + .withAttributeMappingInfo( + new MapperAttributeMappings() + .withAttributeMappings(Arrays.asList( + new MapperAttributeMapping().withName("kyutrymdwmfjhpyc") + .withType(MappingType.DERIVED) + .withFunctionName("vdwkqpldr") + .withExpression("fgnaavuagnteta") + .withAttributeReference(new MapperAttributeReference()) + .withAttributeReferences(Arrays.asList(new MapperAttributeReference())), + new MapperAttributeMapping().withName("dc") + .withType(MappingType.AGGREGATE) + .withFunctionName("pfe") + .withExpression("i") + .withAttributeReference(new MapperAttributeReference()) + .withAttributeReferences(Arrays.asList(new MapperAttributeReference(), + new MapperAttributeReference())), + new MapperAttributeMapping().withName("uyld") + .withType(MappingType.AGGREGATE) + .withFunctionName("ybkcgsuthhllnmwy") + .withExpression("fxexlfciatxtjrr") + .withAttributeReference(new MapperAttributeReference()) + .withAttributeReferences(Arrays.asList(new MapperAttributeReference(), + new MapperAttributeReference(), new MapperAttributeReference())), + new MapperAttributeMapping().withName("kjhhxdlajf") + .withType(MappingType.AGGREGATE) + .withFunctionName("scv") + .withExpression("xlhuavkrmuk") + .withAttributeReference(new MapperAttributeReference()) + .withAttributeReferences(Arrays.asList(new MapperAttributeReference(), + new MapperAttributeReference(), new MapperAttributeReference()))))) + .withSourceDenormalizeInfo("dataxett"))) + .withRelationships(Arrays.asList("dataojfkq", "datadnqtoqxjhqx", "datas")); model = BinaryData.fromObject(model).toObject(MapperTargetConnectionsInfo.class); - Assertions.assertEquals("slzoyov", model.targetEntities().get(0).name()); - Assertions.assertEquals("qvybefg", model.targetEntities().get(0).schema().get(0).name()); - Assertions.assertEquals("x", model.targetEntities().get(0).schema().get(0).dataType()); - Assertions.assertEquals("uykbbmn", model.targetEntities().get(0).dslConnectorProperties().get(0).name()); - Assertions.assertEquals("ah", model.connection().linkedService().referenceName()); - Assertions.assertEquals("snuyemlowuowhl", model.connection().linkedServiceType()); + Assertions.assertEquals("usuv", model.targetEntities().get(0).name()); + Assertions.assertEquals("wci", model.targetEntities().get(0).schema().get(0).name()); + Assertions.assertEquals("jsllfryvd", model.targetEntities().get(0).schema().get(0).dataType()); + Assertions.assertEquals("xnyqgxhlusr", model.targetEntities().get(0).dslConnectorProperties().get(0).name()); + Assertions.assertEquals("buyvp", model.connection().linkedService().referenceName()); + Assertions.assertEquals("vkskmqozzk", model.connection().linkedServiceType()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.connection().type()); - Assertions.assertEquals(false, model.connection().isInlineDataset()); - Assertions.assertEquals("ouvblgmo", model.connection().commonDslConnectorProperties().get(0).name()); - Assertions.assertEquals("wyojbfqzdkfnjyi", model.dataMapperMappings().get(0).targetEntityName()); - Assertions.assertEquals("afr", model.dataMapperMappings().get(0).sourceEntityName()); - Assertions.assertEquals("xmbjroum", + Assertions.assertEquals(true, model.connection().isInlineDataset()); + Assertions.assertEquals("iizjixlqfhefkwa", model.connection().commonDslConnectorProperties().get(0).name()); + Assertions.assertEquals("mgnlqxsjxtel", model.dataMapperMappings().get(0).targetEntityName()); + Assertions.assertEquals("hvuqbo", model.dataMapperMappings().get(0).sourceEntityName()); + Assertions.assertEquals("lzqocark", model.dataMapperMappings().get(0).sourceConnectionReference().connectionName()); Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.dataMapperMappings().get(0).sourceConnectionReference().type()); - Assertions.assertEquals("jrhuzgf", + Assertions.assertEquals("dtsnxawqytllhdyz", model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).name()); - Assertions.assertEquals(MappingType.AGGREGATE, + Assertions.assertEquals(MappingType.DERIVED, model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).type()); - Assertions.assertEquals("tpusllywp", + Assertions.assertEquals("zexnxakcky", model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).functionName()); - Assertions.assertEquals("iotzbpdbollgryfq", + Assertions.assertEquals("mxgaabjkdtfohfao", model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).expression()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTests.java index efffbd4ab68b7..fd6744361800d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTests.java @@ -23,192 +23,203 @@ public final class MappingDataFlowTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MappingDataFlow model = BinaryData.fromString( - "{\"type\":\"MappingDataFlow\",\"typeProperties\":{\"sources\":[{\"schemaLinkedService\":{\"referenceName\":\"wdyjqurykcrrauee\",\"parameters\":{\"cbcbgydlqidy\":\"datauehogdd\"}},\"name\":\"mhmpty\",\"description\":\"lkfbnrqqxvztpb\",\"dataset\":{\"referenceName\":\"nqtxjtomalswbnf\",\"parameters\":{\"qjn\":\"datapld\"}},\"linkedService\":{\"referenceName\":\"zygleexahvm\",\"parameters\":{\"sjjzyvoaqajuveh\":\"datasbrcary\",\"be\":\"dataptdmkrrbhmpful\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"bpmfbfununmpzkrv\",\"datasetParameters\":\"dataifkdschlzvf\",\"parameters\":{\"rtogmhmj\":\"datankjjwgcwnphbkgf\",\"fp\":\"datajsc\",\"fv\":\"dataqwtygevgwmseharx\",\"x\":\"datan\"},\"\":{\"wjhrsidqpxlbtpa\":\"datapjptn\",\"ngatwmy\":\"dataf\",\"mfjhpycvjqdvdwkq\":\"datayutrymd\",\"n\":\"dataldrlefgnaavua\"}}},{\"schemaLinkedService\":{\"referenceName\":\"taoutnpdct\",\"parameters\":{\"y\":\"datapfe\",\"tybkcgs\":\"datahduyeuyldph\",\"x\":\"datathhllnmwyne\",\"fciatxtjrr\":\"datax\"}},\"name\":\"kmdskjhhxd\",\"description\":\"jfoxcxscvslxl\",\"dataset\":{\"referenceName\":\"a\",\"parameters\":{\"yjmkxettc\":\"datamuk\",\"xjhqxcsqhtkb\":\"datalojfkqidnqto\",\"dmbi\":\"datanqlrng\",\"qkzn\":\"datapsnaww\"}},\"linkedService\":{\"referenceName\":\"hllxricctkw\",\"parameters\":{\"xhdctrceqnk\":\"dataqoajxeiyglesrwva\",\"lj\":\"datarupobehd\",\"bibnzpphepifex\":\"dataacvumepj\",\"cjclykcgxv\":\"dataeqir\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"lvczu\",\"datasetParameters\":\"dataac\",\"parameters\":{\"koynuiylpckae\":\"dataettepdjxqe\",\"nzhctmjtsgh\":\"datasedveskwxegqphrg\",\"rpzeqac\":\"databcbcpz\",\"zshnuqndaizup\":\"dataldtzmpypefcp\"},\"\":{\"gw\":\"datauytuszxhmtvtv\",\"haokgkskjiv\":\"dataiukvzwydwt\"}}},{\"schemaLinkedService\":{\"referenceName\":\"shajqf\",\"parameters\":{\"hwu\":\"dataeexpgeumi\",\"dbzsx\":\"datatrdexyionofnin\",\"bzbcyksiv\":\"datawqqrsmpcbbprtuga\",\"rftsjcwjjxs\":\"datafogdrtbfcm\"}},\"name\":\"mb\",\"description\":\"vifdxkecifhocjx\",\"dataset\":{\"referenceName\":\"loozrvt\",\"parameters\":{\"cpxxvirye\":\"datamufun\",\"lpmcrdc\":\"datangjgvrquvpyg\",\"x\":\"dataeljtiahxmfqryarv\"}},\"linkedService\":{\"referenceName\":\"bglcjkayspthzodu\",\"parameters\":{\"djxyxgbkkqvjcteo\":\"datamjtgblioskkfmkm\",\"pxvjnzd\":\"datadlrslskk\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"cojhpcnabx\",\"datasetParameters\":\"datasnggytexvzilmhiv\",\"parameters\":{\"cknrzda\":\"dataww\",\"eucyrth\":\"datalskzptjxul\"},\"\":{\"n\":\"dataehmcgcje\",\"qnttmbq\":\"dataehokamvfej\",\"kpysthhzagjf\":\"dataabzfivf\",\"ejgvkvebaqszllrz\":\"datayyrlhgenu\"}}}],\"sinks\":[{\"schemaLinkedService\":{\"referenceName\":\"dqgmih\",\"parameters\":{\"inklogxs\":\"datamcqrhnxt\",\"bjwzzos\":\"datatzarhzvqnsqktc\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"kybtglwkzpgajsqj\",\"parameters\":{\"uqrebluimmbwx\":\"dataqbmfuvqarwz\",\"kraokq\":\"datafgtdmbvx\",\"aokbavlyttaaknwf\":\"databudbt\"}},\"name\":\"ke\",\"description\":\"mhpdu\",\"dataset\":{\"referenceName\":\"igatolekscbctna\",\"parameters\":{\"dpkawnsnl\":\"datamwbzxpdc\",\"bicziuswswj\":\"dataimouxwksqmudmfco\",\"fwbivqvo\":\"datakbqsjhbtqqvyfscy\",\"wvbhlimbyq\":\"datafuy\"}},\"linkedService\":{\"referenceName\":\"r\",\"parameters\":{\"asaxxo\":\"datalikcdrd\",\"kwiy\":\"datasm\",\"ukosrn\":\"datav\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"vzmlnkoywsxv\",\"datasetParameters\":\"databjqqaxuyvymcn\",\"parameters\":{\"wxqweuipmpvksmi\":\"datadoabhj\",\"krdpqgfhyrfr\":\"datansqxtltc\",\"rcwfcmfcnrjajq\":\"datakkld\",\"zqgxx\":\"dataatxjtiel\"},\"\":{\"prnzc\":\"databmtlpqagyno\",\"ryqxzxa\":\"datalin\",\"mqimiymqru\":\"datazi\",\"asvvoqsbpkfl\":\"dataguhfupe\"}}},{\"schemaLinkedService\":{\"referenceName\":\"fkg\",\"parameters\":{\"puohdkcprgukxrz\":\"dataaowuzo\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"ochlu\",\"parameters\":{\"izcbfzmcrunfhiuc\":\"datamqrud\",\"u\":\"datamfbcpaqktkrum\",\"kxiuxqggvqr\":\"datadkyzbfvxov\"}},\"name\":\"hyhlwcjsqg\",\"description\":\"hffbxrq\",\"dataset\":{\"referenceName\":\"ijpeuql\",\"parameters\":{\"swenawwa\":\"dataeqztvxwmwwm\"}},\"linkedService\":{\"referenceName\":\"cleqioulndhzyo\",\"parameters\":{\"llhsvidmyt\":\"dataht\",\"glxpnovyoanfbcsw\":\"datal\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ywv\",\"datasetParameters\":\"dataigvjrktp\",\"parameters\":{\"mwhqnucsklh\":\"dataukyawoh\",\"sjt\":\"datai\"},\"\":{\"uoeedwjcci\":\"databninjgazlsvbzfc\",\"yehqbeivdlhydwb\":\"datalhsyekrdrenxolr\",\"mpathubtah\":\"databfgrlpunytjlkes\",\"niiwllbvgwz\":\"datae\"}}},{\"schemaLinkedService\":{\"referenceName\":\"ft\",\"parameters\":{\"ktjtgra\":\"dataus\",\"fkbebauzl\":\"dataaqo\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"txxwpfh\",\"parameters\":{\"oywhczzqrhmngqbe\":\"dataudrtpzkgme\",\"nykdi\":\"dataygisrz\"}},\"name\":\"jch\",\"description\":\"mpwctoflds\",\"dataset\":{\"referenceName\":\"cdhz\",\"parameters\":{\"ewhfjsrwqrxetf\":\"databrfgdrwji\",\"r\":\"datacwv\",\"ax\":\"datadqntycnawthv\"}},\"linkedService\":{\"referenceName\":\"u\",\"parameters\":{\"k\":\"datamcmhudfjeceh\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"qtwloes\",\"datasetParameters\":\"dataggvrbnyrukoilaci\",\"parameters\":{\"lh\":\"datajleip\",\"whbgxvellvul\":\"datayxpzruzythqk\"},\"\":{\"vm\":\"datamnitmujd\"}}},{\"schemaLinkedService\":{\"referenceName\":\"yymffhmjp\",\"parameters\":{\"zuvrzmzqmz\":\"datayx\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"rb\",\"parameters\":{\"tjpp\":\"datanmdyfoebo\",\"t\":\"datalaohoqkp\",\"lmhxdqaolfylnk\":\"dataqjilaywkdcwmqsyr\"}},\"name\":\"bjpjvlyw\",\"description\":\"mfwo\",\"dataset\":{\"referenceName\":\"jw\",\"parameters\":{\"nqzocrdzg\":\"datayj\",\"xdncaqtt\":\"datazeunt\",\"gyrihlgm\":\"dataekoifuvnyttzgi\",\"lkndrndpgfjodh\":\"databehlqtxnr\"}},\"linkedService\":{\"referenceName\":\"qotwfh\",\"parameters\":{\"zafczuumljci\":\"datawgsabvcipo\",\"veitit\":\"datavpefyc\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"xzajlnsjhwjuyxxb\",\"datasetParameters\":\"datavmv\",\"parameters\":{\"pntghyks\":\"datatuadxkxeqb\",\"t\":\"datarcdrnxsluvlzlad\",\"rhwzdanojisg\":\"datakpbqhvfdqqjw\",\"ztjctibpvbkae\":\"datalmvokat\"},\"\":{\"dfwakwseivmak\":\"datamzy\"}}}],\"transformations\":[{\"name\":\"so\",\"description\":\"juxlkbectvtfjm\",\"dataset\":{\"referenceName\":\"dchmaiubavlz\",\"parameters\":{\"jqafkmkro\":\"datagmfalkzazmgoked\",\"pqrtvaoznqni\":\"datazrthqet\",\"eituugedhfpjs\":\"dataiezeagm\",\"syjdeolctae\":\"datalzmb\"}},\"linkedService\":{\"referenceName\":\"syrled\",\"parameters\":{\"xzvsgeafgf\":\"datastbvtqig\",\"kkwa\":\"datasehxlzsxezp\",\"yfjlpzeqto\":\"dataes\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"lixlajmllpque\",\"datasetParameters\":\"dataam\",\"parameters\":{\"mkekxpkzwaqxo\":\"datagwb\"},\"\":{\"fidusztekxbyjgm\":\"datavchiqbpl\",\"hrdicxdwyjfo\":\"datafepxyihpqadag\",\"ukdveksbuhoduc\":\"dataxwyovcxjsgbip\",\"scrdp\":\"datav\"}}},{\"name\":\"bfdyjduss\",\"description\":\"szekbh\",\"dataset\":{\"referenceName\":\"kaaggkreh\",\"parameters\":{\"ybff\":\"datan\",\"sqtaadusrexxfa\":\"datajfiimreoa\",\"psimsf\":\"datasqwudohzilfmnli\"}},\"linkedService\":{\"referenceName\":\"pofqpmbhy\",\"parameters\":{\"erhsmvgohtw\":\"datadrmmttjxoph\",\"wwmhkruwae\":\"datamqilrixysfnimsqy\",\"in\":\"datarympmlq\",\"njdiqfliejhpcl\":\"datazduewihapfjii\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"dfsbwceivb\",\"datasetParameters\":\"dataipbwxgooo\",\"parameters\":{\"s\":\"datarad\",\"g\":\"dataxknpdgz\",\"wwnbafoctohz\":\"datasugswhgsaod\",\"hoadhrsxqvzv\":\"dataaquvwsxbgnvkervq\"},\"\":{\"klrxhjnltce\":\"databdsrgfajglzrsu\",\"ie\":\"datajdvqy\"}}}],\"script\":\"kw\",\"scriptLines\":[\"wdxvqzxoebwg\",\"xbibanbaupw\",\"zvpaklozkxbzrpej\",\"lssan\"]},\"description\":\"ttkgsux\",\"annotations\":[\"dataswgkpjhboyikebh\",\"datahkslgwlokhueoij\",\"datazcqypzqzufgsyf\",\"datajyvdwtfxptpqayam\"],\"folder\":{\"name\":\"fgybmxs\"}}") + "{\"type\":\"qncjubkhjozfymcw\",\"typeProperties\":{\"sources\":[{\"schemaLinkedService\":{\"referenceName\":\"zipbwxgoooxzp\",\"parameters\":{\"gzigjsugswhgsaod\":\"datamskxknp\",\"aquvwsxbgnvkervq\":\"datawwnbafoctohz\",\"pabdsrg\":\"datahoadhrsxqvzv\",\"hjnlt\":\"dataajglzrsubklr\"}},\"name\":\"etjdvqydieqqkwa\",\"description\":\"wdxvqzxoebwg\",\"dataset\":{\"referenceName\":\"bibanbau\",\"parameters\":{\"lssan\":\"datazvpaklozkxbzrpej\"}},\"linkedService\":{\"referenceName\":\"ttkgsux\",\"parameters\":{\"hkslgwlokhueoij\":\"dataswgkpjhboyikebh\",\"jyvdwtfxptpqayam\":\"datazcqypzqzufgsyf\",\"ullojk\":\"datancfgybmxsnxoc\",\"g\":\"dataoy\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"j\",\"datasetParameters\":\"datadbdljz\",\"parameters\":{\"nq\":\"datarcvuqbsgzlrqhb\",\"jlf\":\"datagdxwbsfpyxx\",\"ilz\":\"dataecominxojjluxxd\",\"za\":\"datadzzqjmu\"},\"\":{\"vwta\":\"dataribqlotokh\",\"nkbtlwljss\":\"datankcqwwxwjyofgw\",\"isubxbteog\":\"datactsnldkpwol\"}}}],\"sinks\":[{\"schemaLinkedService\":{\"referenceName\":\"ij\",\"parameters\":{\"m\":\"datawlefksxqceazfpxg\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"zvluyqqaiosss\",\"parameters\":{\"fxtjdhsoym\":\"dataaifppuacvfyeowp\",\"tkfvvdshx\":\"datapvtyqfttehdpbouj\",\"nquktrfnslnlrxs\":\"datadedsuenygnxcgjtf\"}},\"name\":\"ylt\",\"description\":\"ntf\",\"dataset\":{\"referenceName\":\"bgwjdxwnaz\",\"parameters\":{\"jwzzqseuzu\":\"datardreyzjwhsetw\",\"c\":\"datakykcyqhyqqzz\",\"ewfopazdazg\":\"datakeys\",\"mdpv\":\"datasqgpewqcfu\"}},\"linkedService\":{\"referenceName\":\"glq\",\"parameters\":{\"yowmndcovdw\":\"datanlzclctz\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"uxza\",\"datasetParameters\":\"datamk\",\"parameters\":{\"txudqyeme\":\"datauwkudrbcp\"},\"\":{\"mjauwcgx\":\"dataaucmcirtnee\",\"zerwgudas\":\"datafnohaitran\",\"ngfcocefhpriyl\":\"dataxubvf\"}}},{\"schemaLinkedService\":{\"referenceName\":\"pztraudsvhl\",\"parameters\":{\"egpqtmo\":\"dataul\",\"shqrdgrt\":\"datahvrztnvg\",\"fa\":\"datamewjzlpyk\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"wjcayerzrr\",\"parameters\":{\"zrghsrleink\":\"dataybylpol\"}},\"name\":\"scjfncjwvuagf\",\"description\":\"tltng\",\"dataset\":{\"referenceName\":\"reuptrk\",\"parameters\":{\"s\":\"dataijajwolx\"}},\"linkedService\":{\"referenceName\":\"ghmp\",\"parameters\":{\"bxqla\":\"datalslrcigtzjc\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ssov\",\"datasetParameters\":\"datapavidnievwffc\",\"parameters\":{\"xgcbdsvalpnpt\":\"dataezslp\",\"x\":\"datatrkxgpazwu\"},\"\":{\"zz\":\"datanjobfelhldiu\",\"iudjp\":\"dataqlmfaewz\",\"qhnmhk\":\"datapqht\",\"gcruxspinym\":\"dataezsdsuxheq\"}}},{\"schemaLinkedService\":{\"referenceName\":\"gwokmikpazfbmjxu\",\"parameters\":{\"zaehpphthd\":\"datapfdvhaxdv\",\"tatlakfqoi\":\"datalmv\",\"rpogwphchg\":\"datawgiksbbvtoo\",\"htukfac\":\"datat\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"mbf\",\"parameters\":{\"wcgasgom\":\"datameezbxvqxbnu\",\"qgo\":\"datamjzwx\",\"gfredmlscg\":\"datasxpwwztjfmkkh\"}},\"name\":\"ll\",\"description\":\"na\",\"dataset\":{\"referenceName\":\"jowazhpab\",\"parameters\":{\"wsxnsrqor\":\"datamlyotg\"}},\"linkedService\":{\"referenceName\":\"enmvcebxeetq\",\"parameters\":{\"jkedwqurcgojmrv\":\"datacxxqndc\",\"qqrsil\":\"dataxwjongzse\",\"sbvr\":\"datachskxxka\",\"rulfuct\":\"dataaqgvto\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"thcfj\",\"datasetParameters\":\"dataxlyubqjrostv\",\"parameters\":{\"qrztrxal\":\"dataqmtzzb\",\"rsnrhp\":\"datardhabsr\",\"iwkkvya\":\"dataa\"},\"\":{\"qmmlivrjjxnwx\":\"datavvcsemsvuvdjkqxe\",\"jxlehzlx\":\"datachp\",\"w\":\"datagfquwz\",\"pjxljtxb\":\"dataibelwcerwkw\"}}},{\"schemaLinkedService\":{\"referenceName\":\"qtbxxniuisdzh\",\"parameters\":{\"cnadbuwqrgxf\":\"datagzpags\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"mqiyn\",\"parameters\":{\"vmtumxpym\":\"dataellnkkii\",\"canlduwzorxs\":\"datajfuaxroqvqpilrgu\",\"kqv\":\"datamxaqklxym\",\"ltuubw\":\"dataqepdx\"}},\"name\":\"vpjbowcpjqduq\",\"description\":\"xe\",\"dataset\":{\"referenceName\":\"ydfbwlj\",\"parameters\":{\"iegftc\":\"datauerkjddvrg\",\"fgmwd\":\"databiiftksdwgdnk\",\"buvczldbglzoutb\":\"datac\",\"orbjg\":\"dataaqgzekajclyzgs\"}},\"linkedService\":{\"referenceName\":\"zjotvmrxkhlo\",\"parameters\":{\"yu\":\"datajbhvhdiqayflu\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"uudtelv\",\"datasetParameters\":\"dataibdrqrswhbuubpyr\",\"parameters\":{\"nnctagfyvrt\":\"datajoxztfwfqchvczev\",\"cgkrepdqhqy\":\"dataqpemh\"},\"\":{\"xin\":\"datawemvxqabckmze\",\"eezn\":\"datagreohtwhlpuzjp\",\"bfaxyxzlbc\":\"dataangp\",\"ndktxfv\":\"dataphmsexroq\"}}}],\"transformations\":[{\"name\":\"eeqgpkri\",\"description\":\"bgnixxoww\",\"dataset\":{\"referenceName\":\"yfwnw\",\"parameters\":{\"icrmpepkldmaxxi\":\"dataxe\",\"ws\":\"datavs\",\"wrasekw\":\"datagkjgya\"}},\"linkedService\":{\"referenceName\":\"cvo\",\"parameters\":{\"wyxqiclad\":\"dataoqar\",\"ob\":\"dataatdavuqmcbyms\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"uvjezcjumvps\",\"datasetParameters\":\"dataioyoiglkmiq\",\"parameters\":{\"ydgnha\":\"dataraclibbfqpspkla\",\"gpmnmabe\":\"datatwukex\"},\"\":{\"gq\":\"datailwgdfpfqfpcvstc\",\"jj\":\"datavwerfwxbsmtb\",\"ci\":\"datah\",\"ekqhs\":\"datakwdvbtb\"}}},{\"name\":\"htfpwpqb\",\"description\":\"j\",\"dataset\":{\"referenceName\":\"yqwdqi\",\"parameters\":{\"nztxlujkh\":\"datahg\"}},\"linkedService\":{\"referenceName\":\"cmrnkfmkhcq\",\"parameters\":{\"eahjedv\":\"datalmhjnqt\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"pok\",\"datasetParameters\":\"dataxejhh\",\"parameters\":{\"tbkewkqynspg\":\"datauavt\",\"qokmyrljial\":\"datavoffbkkwvdxae\"},\"\":{\"cjtrpzu\":\"dataobrqlp\",\"lwgdhuruz\":\"dataudivbxnhsqeaeonq\"}}},{\"name\":\"tzarogatmoljiy\",\"description\":\"pinm\",\"dataset\":{\"referenceName\":\"fkneerzztrkn\",\"parameters\":{\"hkeizcp\":\"datalugdybnhrxlelf\",\"wjekptycaydbj\":\"datahtdm\",\"ymlcfnzhmhsurlg\":\"datac\",\"lawiubmomsgvvjhv\":\"dataqkpmmzpstau\"}},\"linkedService\":{\"referenceName\":\"rlohe\",\"parameters\":{\"lqionswaeqk\":\"datamajnkd\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"qxjoshohtotryeg\",\"datasetParameters\":\"datah\",\"parameters\":{\"erufgjblcdrmy\":\"dataexznlwkbfokxkhup\",\"ekdfqnhttwd\":\"datacemftzgyykyalu\",\"mhmnulwemp\":\"datawrczfjjnnuxxrk\"},\"\":{\"lusnawmhhgzotfr\":\"datafrhjulrsulwzp\",\"wxxyxhighctx\":\"datayrgkoekv\",\"skiczd\":\"dataxmolpcqydeyk\"}}},{\"name\":\"rjeizik\",\"description\":\"aboohxbmsgyc\",\"dataset\":{\"referenceName\":\"xrmdvewuyqaeohpj\",\"parameters\":{\"xaex\":\"datajkbvhhdaurgho\",\"xjxjoe\":\"datak\",\"dknkobe\":\"datalqxr\",\"cdgzseznux\":\"datatmbozomtzamicbig\"}},\"linkedService\":{\"referenceName\":\"uairaabmdlqjb\",\"parameters\":{\"xlhupmomihzbdnp\":\"dataf\",\"cghf\":\"datapkcdpreyxely\",\"sfxrkbhammgm\":\"datarufssjyg\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"efgvqcp\",\"datasetParameters\":\"datajgquxweysland\",\"parameters\":{\"nquoxsotireimse\":\"datadjhunhghcgawnr\",\"tcyilbvz\":\"databfsx\"},\"\":{\"b\":\"datacjzlquzexokjxebj\",\"v\":\"datainzabwmvoglj\",\"ehaqidoyzltgio\":\"datapgidnw\"}}}],\"script\":\"oqpe\",\"scriptLines\":[\"eapfse\",\"gdtpe\",\"nacyheqwb\"]},\"description\":\"upyvqyvliq\",\"annotations\":[\"datasejb\",\"datavsi\",\"dataieswhddzydisn\",\"dataepywyjlnldpxottd\"],\"folder\":{\"name\":\"ocqibz\"}}") .toObject(MappingDataFlow.class); - Assertions.assertEquals("ttkgsux", model.description()); - Assertions.assertEquals("fgybmxs", model.folder().name()); - Assertions.assertEquals("mhmpty", model.sources().get(0).name()); - Assertions.assertEquals("lkfbnrqqxvztpb", model.sources().get(0).description()); - Assertions.assertEquals("nqtxjtomalswbnf", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("zygleexahvm", model.sources().get(0).linkedService().referenceName()); + Assertions.assertEquals("upyvqyvliq", model.description()); + Assertions.assertEquals("ocqibz", model.folder().name()); + Assertions.assertEquals("etjdvqydieqqkwa", model.sources().get(0).name()); + Assertions.assertEquals("wdxvqzxoebwg", model.sources().get(0).description()); + Assertions.assertEquals("bibanbau", model.sources().get(0).dataset().referenceName()); + Assertions.assertEquals("ttkgsux", model.sources().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("bpmfbfununmpzkrv", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("wdyjqurykcrrauee", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("ke", model.sinks().get(0).name()); - Assertions.assertEquals("mhpdu", model.sinks().get(0).description()); - Assertions.assertEquals("igatolekscbctna", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("r", model.sinks().get(0).linkedService().referenceName()); + Assertions.assertEquals("j", model.sources().get(0).flowlet().referenceName()); + Assertions.assertEquals("zipbwxgoooxzp", model.sources().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("ylt", model.sinks().get(0).name()); + Assertions.assertEquals("ntf", model.sinks().get(0).description()); + Assertions.assertEquals("bgwjdxwnaz", model.sinks().get(0).dataset().referenceName()); + Assertions.assertEquals("glq", model.sinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("vzmlnkoywsxv", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("dqgmih", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("kybtglwkzpgajsqj", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("so", model.transformations().get(0).name()); - Assertions.assertEquals("juxlkbectvtfjm", model.transformations().get(0).description()); - Assertions.assertEquals("dchmaiubavlz", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("syrled", model.transformations().get(0).linkedService().referenceName()); + Assertions.assertEquals("uxza", model.sinks().get(0).flowlet().referenceName()); + Assertions.assertEquals("ij", model.sinks().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("zvluyqqaiosss", model.sinks().get(0).rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("eeqgpkri", model.transformations().get(0).name()); + Assertions.assertEquals("bgnixxoww", model.transformations().get(0).description()); + Assertions.assertEquals("yfwnw", model.transformations().get(0).dataset().referenceName()); + Assertions.assertEquals("cvo", model.transformations().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("lixlajmllpque", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("kw", model.script()); - Assertions.assertEquals("wdxvqzxoebwg", model.scriptLines().get(0)); + Assertions.assertEquals("uvjezcjumvps", model.transformations().get(0).flowlet().referenceName()); + Assertions.assertEquals("oqpe", model.script()); + Assertions.assertEquals("eapfse", model.scriptLines().get(0)); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MappingDataFlow model = new MappingDataFlow().withDescription("ttkgsux") - .withAnnotations(Arrays - .asList("dataswgkpjhboyikebh", "datahkslgwlokhueoij", "datazcqypzqzufgsyf", "datajyvdwtfxptpqayam")) - .withFolder(new DataFlowFolder().withName("fgybmxs")) - .withSources(Arrays.asList( - new DataFlowSource().withName("mhmpty").withDescription("lkfbnrqqxvztpb") - .withDataset(new DatasetReference().withReferenceName("nqtxjtomalswbnf") - .withParameters(mapOf("qjn", "datapld"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("zygleexahvm") - .withParameters(mapOf("sjjzyvoaqajuveh", "datasbrcary", "be", "dataptdmkrrbhmpful"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("bpmfbfununmpzkrv").withDatasetParameters("dataifkdschlzvf") - .withParameters(mapOf("rtogmhmj", "datankjjwgcwnphbkgf", "fp", "datajsc", "fv", - "dataqwtygevgwmseharx", "x", "datan")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("wdyjqurykcrrauee") - .withParameters(mapOf("cbcbgydlqidy", "datauehogdd"))), - new DataFlowSource().withName("kmdskjhhxd").withDescription("jfoxcxscvslxl") - .withDataset(new DatasetReference().withReferenceName("a") - .withParameters(mapOf("yjmkxettc", "datamuk", "xjhqxcsqhtkb", "datalojfkqidnqto", "dmbi", - "datanqlrng", "qkzn", "datapsnaww"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("hllxricctkw") - .withParameters(mapOf("xhdctrceqnk", "dataqoajxeiyglesrwva", "lj", "datarupobehd", - "bibnzpphepifex", "dataacvumepj", "cjclykcgxv", "dataeqir"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("lvczu").withDatasetParameters("dataac") - .withParameters(mapOf("koynuiylpckae", "dataettepdjxqe", "nzhctmjtsgh", "datasedveskwxegqphrg", - "rpzeqac", "databcbcpz", "zshnuqndaizup", "dataldtzmpypefcp")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("taoutnpdct") - .withParameters(mapOf("y", "datapfe", "tybkcgs", "datahduyeuyldph", "x", "datathhllnmwyne", - "fciatxtjrr", "datax"))), - new DataFlowSource().withName("mb").withDescription("vifdxkecifhocjx") - .withDataset(new DatasetReference().withReferenceName("loozrvt").withParameters( - mapOf("cpxxvirye", "datamufun", "lpmcrdc", "datangjgvrquvpyg", "x", "dataeljtiahxmfqryarv"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("bglcjkayspthzodu") - .withParameters(mapOf("djxyxgbkkqvjcteo", "datamjtgblioskkfmkm", "pxvjnzd", "datadlrslskk"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("cojhpcnabx").withDatasetParameters("datasnggytexvzilmhiv") - .withParameters(mapOf("cknrzda", "dataww", "eucyrth", "datalskzptjxul")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("shajqf") - .withParameters(mapOf("hwu", "dataeexpgeumi", "dbzsx", "datatrdexyionofnin", "bzbcyksiv", - "datawqqrsmpcbbprtuga", "rftsjcwjjxs", "datafogdrtbfcm"))))) + MappingDataFlow model = new MappingDataFlow().withDescription("upyvqyvliq") + .withAnnotations(Arrays.asList("datasejb", "datavsi", "dataieswhddzydisn", "dataepywyjlnldpxottd")) + .withFolder(new DataFlowFolder().withName("ocqibz")) + .withSources(Arrays.asList(new DataFlowSource().withName("etjdvqydieqqkwa") + .withDescription("wdxvqzxoebwg") + .withDataset(new DatasetReference().withReferenceName("bibanbau") + .withParameters(mapOf("lssan", "datazvpaklozkxbzrpej"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("ttkgsux") + .withParameters(mapOf("hkslgwlokhueoij", "dataswgkpjhboyikebh", "jyvdwtfxptpqayam", + "datazcqypzqzufgsyf", "ullojk", "datancfgybmxsnxoc", "g", "dataoy"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("j") + .withDatasetParameters("datadbdljz") + .withParameters(mapOf("nq", "datarcvuqbsgzlrqhb", "jlf", "datagdxwbsfpyxx", "ilz", + "dataecominxojjluxxd", "za", "datadzzqjmu")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("zipbwxgoooxzp") + .withParameters(mapOf("gzigjsugswhgsaod", "datamskxknp", "aquvwsxbgnvkervq", "datawwnbafoctohz", + "pabdsrg", "datahoadhrsxqvzv", "hjnlt", "dataajglzrsubklr"))))) .withSinks(Arrays.asList( - new DataFlowSink().withName("ke").withDescription("mhpdu") - .withDataset(new DatasetReference().withReferenceName("igatolekscbctna") - .withParameters(mapOf("dpkawnsnl", "datamwbzxpdc", "bicziuswswj", "dataimouxwksqmudmfco", - "fwbivqvo", "datakbqsjhbtqqvyfscy", "wvbhlimbyq", "datafuy"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("r") - .withParameters(mapOf("asaxxo", "datalikcdrd", "kwiy", "datasm", "ukosrn", "datav"))) + new DataFlowSink().withName("ylt") + .withDescription("ntf") + .withDataset(new DatasetReference().withReferenceName("bgwjdxwnaz") + .withParameters(mapOf("jwzzqseuzu", "datardreyzjwhsetw", "c", "datakykcyqhyqqzz", "ewfopazdazg", + "datakeys", "mdpv", "datasqgpewqcfu"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("glq") + .withParameters(mapOf("yowmndcovdw", "datanlzclctz"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("vzmlnkoywsxv").withDatasetParameters("databjqqaxuyvymcn") - .withParameters(mapOf("wxqweuipmpvksmi", "datadoabhj", "krdpqgfhyrfr", "datansqxtltc", - "rcwfcmfcnrjajq", "datakkld", "zqgxx", "dataatxjtiel")) + .withReferenceName("uxza") + .withDatasetParameters("datamk") + .withParameters(mapOf("txudqyeme", "datauwkudrbcp")) .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("dqgmih") - .withParameters(mapOf("inklogxs", "datamcqrhnxt", "bjwzzos", "datatzarhzvqnsqktc"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("kybtglwkzpgajsqj") - .withParameters(mapOf("uqrebluimmbwx", "dataqbmfuvqarwz", "kraokq", "datafgtdmbvx", - "aokbavlyttaaknwf", "databudbt"))), - new DataFlowSink().withName("hyhlwcjsqg").withDescription("hffbxrq") - .withDataset(new DatasetReference().withReferenceName("ijpeuql") - .withParameters(mapOf("swenawwa", "dataeqztvxwmwwm"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("cleqioulndhzyo") - .withParameters(mapOf("llhsvidmyt", "dataht", "glxpnovyoanfbcsw", "datal"))) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ij") + .withParameters(mapOf("m", "datawlefksxqceazfpxg"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("zvluyqqaiosss") + .withParameters(mapOf("fxtjdhsoym", "dataaifppuacvfyeowp", "tkfvvdshx", "datapvtyqfttehdpbouj", + "nquktrfnslnlrxs", "datadedsuenygnxcgjtf"))), + new DataFlowSink().withName("scjfncjwvuagf") + .withDescription("tltng") + .withDataset( + new DatasetReference().withReferenceName("reuptrk").withParameters(mapOf("s", "dataijajwolx"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("ghmp") + .withParameters(mapOf("bxqla", "datalslrcigtzjc"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ywv").withDatasetParameters("dataigvjrktp") - .withParameters(mapOf("mwhqnucsklh", "dataukyawoh", "sjt", "datai")) + .withReferenceName("ssov") + .withDatasetParameters("datapavidnievwffc") + .withParameters(mapOf("xgcbdsvalpnpt", "dataezslp", "x", "datatrkxgpazwu")) .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("fkg") - .withParameters(mapOf("puohdkcprgukxrz", "dataaowuzo"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("ochlu") - .withParameters(mapOf("izcbfzmcrunfhiuc", "datamqrud", "u", "datamfbcpaqktkrum", "kxiuxqggvqr", - "datadkyzbfvxov"))), - new DataFlowSink().withName("jch").withDescription("mpwctoflds") - .withDataset(new DatasetReference().withReferenceName("cdhz").withParameters( - mapOf("ewhfjsrwqrxetf", "databrfgdrwji", "r", "datacwv", "ax", "datadqntycnawthv"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("u") - .withParameters(mapOf("k", "datamcmhudfjeceh"))) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("pztraudsvhl") + .withParameters(mapOf("egpqtmo", "dataul", "shqrdgrt", "datahvrztnvg", "fa", "datamewjzlpyk"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("wjcayerzrr") + .withParameters(mapOf("zrghsrleink", "dataybylpol"))), + new DataFlowSink().withName("ll") + .withDescription("na") + .withDataset(new DatasetReference().withReferenceName("jowazhpab") + .withParameters(mapOf("wsxnsrqor", "datamlyotg"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("enmvcebxeetq") + .withParameters(mapOf("jkedwqurcgojmrv", "datacxxqndc", "qqrsil", "dataxwjongzse", "sbvr", + "datachskxxka", "rulfuct", "dataaqgvto"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("qtwloes").withDatasetParameters("dataggvrbnyrukoilaci") - .withParameters(mapOf("lh", "datajleip", "whbgxvellvul", "datayxpzruzythqk")) + .withReferenceName("thcfj") + .withDatasetParameters("dataxlyubqjrostv") + .withParameters(mapOf("qrztrxal", "dataqmtzzb", "rsnrhp", "datardhabsr", "iwkkvya", "dataa")) .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ft") - .withParameters(mapOf("ktjtgra", "dataus", "fkbebauzl", "dataaqo"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("txxwpfh") - .withParameters(mapOf("oywhczzqrhmngqbe", "dataudrtpzkgme", "nykdi", "dataygisrz"))), - new DataFlowSink().withName("bjpjvlyw").withDescription("mfwo") - .withDataset(new DatasetReference().withReferenceName("jw") - .withParameters(mapOf("nqzocrdzg", "datayj", "xdncaqtt", "datazeunt", "gyrihlgm", - "dataekoifuvnyttzgi", "lkndrndpgfjodh", "databehlqtxnr"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("qotwfh") - .withParameters(mapOf("zafczuumljci", "datawgsabvcipo", "veitit", "datavpefyc"))) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("gwokmikpazfbmjxu") + .withParameters(mapOf("zaehpphthd", "datapfdvhaxdv", "tatlakfqoi", "datalmv", "rpogwphchg", + "datawgiksbbvtoo", "htukfac", "datat"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("mbf") + .withParameters(mapOf("wcgasgom", "datameezbxvqxbnu", "qgo", "datamjzwx", "gfredmlscg", + "datasxpwwztjfmkkh"))), + new DataFlowSink().withName("vpjbowcpjqduq") + .withDescription("xe") + .withDataset(new DatasetReference().withReferenceName("ydfbwlj") + .withParameters(mapOf("iegftc", "datauerkjddvrg", "fgmwd", "databiiftksdwgdnk", + "buvczldbglzoutb", "datac", "orbjg", "dataaqgzekajclyzgs"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("zjotvmrxkhlo") + .withParameters(mapOf("yu", "datajbhvhdiqayflu"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("xzajlnsjhwjuyxxb").withDatasetParameters("datavmv") - .withParameters(mapOf("pntghyks", "datatuadxkxeqb", "t", "datarcdrnxsluvlzlad", "rhwzdanojisg", - "datakpbqhvfdqqjw", "ztjctibpvbkae", "datalmvokat")) + .withReferenceName("uudtelv") + .withDatasetParameters("dataibdrqrswhbuubpyr") + .withParameters(mapOf("nnctagfyvrt", "datajoxztfwfqchvczev", "cgkrepdqhqy", "dataqpemh")) .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("yymffhmjp") - .withParameters(mapOf("zuvrzmzqmz", "datayx"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("rb") - .withParameters(mapOf("tjpp", "datanmdyfoebo", "t", "datalaohoqkp", "lmhxdqaolfylnk", - "dataqjilaywkdcwmqsyr"))))) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("qtbxxniuisdzh") + .withParameters(mapOf("cnadbuwqrgxf", "datagzpags"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("mqiyn") + .withParameters(mapOf("vmtumxpym", "dataellnkkii", "canlduwzorxs", "datajfuaxroqvqpilrgu", + "kqv", "datamxaqklxym", "ltuubw", "dataqepdx"))))) .withTransformations(Arrays.asList( - new Transformation().withName("so").withDescription("juxlkbectvtfjm") - .withDataset(new DatasetReference().withReferenceName("dchmaiubavlz") - .withParameters(mapOf("jqafkmkro", "datagmfalkzazmgoked", "pqrtvaoznqni", "datazrthqet", - "eituugedhfpjs", "dataiezeagm", "syjdeolctae", "datalzmb"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("syrled").withParameters( - mapOf("xzvsgeafgf", "datastbvtqig", "kkwa", "datasehxlzsxezp", "yfjlpzeqto", "dataes"))) + new Transformation().withName("eeqgpkri") + .withDescription("bgnixxoww") + .withDataset(new DatasetReference().withReferenceName("yfwnw") + .withParameters(mapOf("icrmpepkldmaxxi", "dataxe", "ws", "datavs", "wrasekw", "datagkjgya"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("cvo") + .withParameters(mapOf("wyxqiclad", "dataoqar", "ob", "dataatdavuqmcbyms"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("uvjezcjumvps") + .withDatasetParameters("dataioyoiglkmiq") + .withParameters(mapOf("ydgnha", "dataraclibbfqpspkla", "gpmnmabe", "datatwukex")) + .withAdditionalProperties(mapOf())), + new Transformation().withName("htfpwpqb") + .withDescription("j") + .withDataset( + new DatasetReference().withReferenceName("yqwdqi").withParameters(mapOf("nztxlujkh", "datahg"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("cmrnkfmkhcq") + .withParameters(mapOf("eahjedv", "datalmhjnqt"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("pok") + .withDatasetParameters("dataxejhh") + .withParameters(mapOf("tbkewkqynspg", "datauavt", "qokmyrljial", "datavoffbkkwvdxae")) + .withAdditionalProperties(mapOf())), + new Transformation().withName("tzarogatmoljiy") + .withDescription("pinm") + .withDataset(new DatasetReference().withReferenceName("fkneerzztrkn") + .withParameters(mapOf("hkeizcp", "datalugdybnhrxlelf", "wjekptycaydbj", "datahtdm", + "ymlcfnzhmhsurlg", "datac", "lawiubmomsgvvjhv", "dataqkpmmzpstau"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("rlohe") + .withParameters(mapOf("lqionswaeqk", "datamajnkd"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("lixlajmllpque").withDatasetParameters("dataam") - .withParameters(mapOf("mkekxpkzwaqxo", "datagwb")).withAdditionalProperties(mapOf())), - new Transformation().withName("bfdyjduss").withDescription("szekbh") - .withDataset(new DatasetReference().withReferenceName("kaaggkreh").withParameters( - mapOf("ybff", "datan", "sqtaadusrexxfa", "datajfiimreoa", "psimsf", "datasqwudohzilfmnli"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("pofqpmbhy") - .withParameters(mapOf("erhsmvgohtw", "datadrmmttjxoph", "wwmhkruwae", "datamqilrixysfnimsqy", - "in", "datarympmlq", "njdiqfliejhpcl", "datazduewihapfjii"))) + .withReferenceName("qxjoshohtotryeg") + .withDatasetParameters("datah") + .withParameters(mapOf("erufgjblcdrmy", "dataexznlwkbfokxkhup", "ekdfqnhttwd", + "datacemftzgyykyalu", "mhmnulwemp", "datawrczfjjnnuxxrk")) + .withAdditionalProperties(mapOf())), + new Transformation().withName("rjeizik") + .withDescription("aboohxbmsgyc") + .withDataset(new DatasetReference().withReferenceName("xrmdvewuyqaeohpj") + .withParameters(mapOf("xaex", "datajkbvhhdaurgho", "xjxjoe", "datak", "dknkobe", "datalqxr", + "cdgzseznux", "datatmbozomtzamicbig"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("uairaabmdlqjb") + .withParameters(mapOf("xlhupmomihzbdnp", "dataf", "cghf", "datapkcdpreyxely", "sfxrkbhammgm", + "datarufssjyg"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("dfsbwceivb").withDatasetParameters("dataipbwxgooo") - .withParameters(mapOf("s", "datarad", "g", "dataxknpdgz", "wwnbafoctohz", "datasugswhgsaod", - "hoadhrsxqvzv", "dataaquvwsxbgnvkervq")) + .withReferenceName("efgvqcp") + .withDatasetParameters("datajgquxweysland") + .withParameters(mapOf("nquoxsotireimse", "datadjhunhghcgawnr", "tcyilbvz", "databfsx")) .withAdditionalProperties(mapOf())))) - .withScript("kw") - .withScriptLines(Arrays.asList("wdxvqzxoebwg", "xbibanbaupw", "zvpaklozkxbzrpej", "lssan")); + .withScript("oqpe") + .withScriptLines(Arrays.asList("eapfse", "gdtpe", "nacyheqwb")); model = BinaryData.fromObject(model).toObject(MappingDataFlow.class); - Assertions.assertEquals("ttkgsux", model.description()); - Assertions.assertEquals("fgybmxs", model.folder().name()); - Assertions.assertEquals("mhmpty", model.sources().get(0).name()); - Assertions.assertEquals("lkfbnrqqxvztpb", model.sources().get(0).description()); - Assertions.assertEquals("nqtxjtomalswbnf", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("zygleexahvm", model.sources().get(0).linkedService().referenceName()); + Assertions.assertEquals("upyvqyvliq", model.description()); + Assertions.assertEquals("ocqibz", model.folder().name()); + Assertions.assertEquals("etjdvqydieqqkwa", model.sources().get(0).name()); + Assertions.assertEquals("wdxvqzxoebwg", model.sources().get(0).description()); + Assertions.assertEquals("bibanbau", model.sources().get(0).dataset().referenceName()); + Assertions.assertEquals("ttkgsux", model.sources().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("bpmfbfununmpzkrv", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("wdyjqurykcrrauee", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("ke", model.sinks().get(0).name()); - Assertions.assertEquals("mhpdu", model.sinks().get(0).description()); - Assertions.assertEquals("igatolekscbctna", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("r", model.sinks().get(0).linkedService().referenceName()); + Assertions.assertEquals("j", model.sources().get(0).flowlet().referenceName()); + Assertions.assertEquals("zipbwxgoooxzp", model.sources().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("ylt", model.sinks().get(0).name()); + Assertions.assertEquals("ntf", model.sinks().get(0).description()); + Assertions.assertEquals("bgwjdxwnaz", model.sinks().get(0).dataset().referenceName()); + Assertions.assertEquals("glq", model.sinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("vzmlnkoywsxv", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("dqgmih", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("kybtglwkzpgajsqj", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("so", model.transformations().get(0).name()); - Assertions.assertEquals("juxlkbectvtfjm", model.transformations().get(0).description()); - Assertions.assertEquals("dchmaiubavlz", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("syrled", model.transformations().get(0).linkedService().referenceName()); + Assertions.assertEquals("uxza", model.sinks().get(0).flowlet().referenceName()); + Assertions.assertEquals("ij", model.sinks().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("zvluyqqaiosss", model.sinks().get(0).rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("eeqgpkri", model.transformations().get(0).name()); + Assertions.assertEquals("bgnixxoww", model.transformations().get(0).description()); + Assertions.assertEquals("yfwnw", model.transformations().get(0).dataset().referenceName()); + Assertions.assertEquals("cvo", model.transformations().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("lixlajmllpque", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("kw", model.script()); - Assertions.assertEquals("wdxvqzxoebwg", model.scriptLines().get(0)); + Assertions.assertEquals("uvjezcjumvps", model.transformations().get(0).flowlet().referenceName()); + Assertions.assertEquals("oqpe", model.script()); + Assertions.assertEquals("eapfse", model.scriptLines().get(0)); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTypePropertiesTests.java index 2d1c97fe72244..9a7c6a5208476 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTypePropertiesTests.java @@ -22,205 +22,142 @@ public final class MappingDataFlowTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MappingDataFlowTypeProperties model = BinaryData.fromString( - "{\"sources\":[{\"schemaLinkedService\":{\"referenceName\":\"uullojkp\",\"parameters\":{\"wdjuxdbdljzgdy\":\"datag\"}},\"name\":\"cvuq\",\"description\":\"gzlrqhbj\",\"dataset\":{\"referenceName\":\"ogdxwbsfpyxxtjlf\",\"parameters\":{\"xdhilz\":\"dataominxojjlu\",\"za\":\"datadzzqjmu\",\"otokhtvwtaznk\":\"dataovribq\",\"wjyofgwhnkbtl\":\"dataqww\"}},\"linkedService\":{\"referenceName\":\"jssmctsnldkpwo\",\"parameters\":{\"bxbteogfgfiijry\":\"datas\",\"m\":\"datawlefksxqceazfpxg\",\"aiossscyvaifp\":\"datavzvluyq\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"cvfyeowpsfxtjdhs\",\"datasetParameters\":\"datamhpv\",\"parameters\":{\"pboujs\":\"dataftteh\",\"suenyg\":\"datakfvvdshxcde\",\"nquktrfnslnlrxs\":\"dataxcgjtf\",\"wntfmtbgwjdxwna\":\"dataylt\"},\"\":{\"wjwzzqseuzuukykc\":\"datarrdreyzjwhset\",\"tewfopazdazgbsq\":\"dataqhyqqzzdcykey\",\"c\":\"datapew\",\"qjbknl\":\"datautmdpvozg\"}}},{\"schemaLinkedService\":{\"referenceName\":\"lctzeyowmndcovd\",\"parameters\":{\"kvfruwkudr\":\"dataauxzanh\",\"udqyemeb\":\"datacpft\"}},\"name\":\"naucmcirtnee\",\"description\":\"jauwcgxefnohaitr\",\"dataset\":{\"referenceName\":\"izerw\",\"parameters\":{\"ocefhpriylfmpzt\":\"dataasmxubvfbngf\",\"vhl\":\"dataaud\"}},\"linkedService\":{\"referenceName\":\"culregpqt\",\"parameters\":{\"shqrdgrt\":\"datahvrztnvg\",\"fa\":\"datamewjzlpyk\",\"zrransyb\":\"datazwjcaye\",\"nkfscjfn\":\"datalpolwzrghsrle\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"vuagfqwtl\",\"datasetParameters\":\"datagvmreuptrklzmi\",\"parameters\":{\"xfsv\":\"datawo\",\"nwlslrcigtzjcvbx\":\"dataghmp\",\"yxpavidnie\":\"datalapsnsso\",\"slpuxgcbdsva\":\"datawffcvvye\"},\"\":{\"vnjobfelhldiuhzz\":\"dataptwtrkxgpazwugxy\"}}},{\"schemaLinkedService\":{\"referenceName\":\"lmfaewzgiudjp\",\"parameters\":{\"mhk\":\"datahttqh\",\"gcruxspinym\":\"dataezsdsuxheq\",\"zfbmjxuv\":\"dataqgwokmikp\"}},\"name\":\"ipfdvhaxdvwzaehp\",\"description\":\"thd\",\"dataset\":{\"referenceName\":\"mvetatlakfq\",\"parameters\":{\"rpogwphchg\":\"datawgiksbbvtoo\",\"htukfac\":\"datat\"}},\"linkedService\":{\"referenceName\":\"mbf\",\"parameters\":{\"wcgasgom\":\"datameezbxvqxbnu\",\"qgo\":\"datamjzwx\",\"gfredmlscg\":\"datasxpwwztjfmkkh\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ccnaov\",\"datasetParameters\":\"datawazhpabaco\",\"parameters\":{\"nmvceb\":\"dataotgkwsxnsrqorcg\",\"dcqjkedwqurc\":\"dataeetqujxcxxq\",\"qqrsil\":\"dataojmrvvxwjongzse\",\"sbvr\":\"datachskxxka\"},\"\":{\"ojrulfuctejrt\":\"datagv\",\"ubqjro\":\"datacfjzhxl\",\"beqrztrx\":\"datatvrjeqmtz\"}}},{\"schemaLinkedService\":{\"referenceName\":\"xrd\",\"parameters\":{\"kkvyanxk\":\"datasrwrsnrhpqati\",\"qxetqmmlivrjjx\":\"datavcsemsvuvdj\",\"gfquwz\":\"datawxdchpojxlehzlx\",\"ibelwcerwkw\":\"dataw\"}},\"name\":\"pjxljtxb\",\"description\":\"qtbxxniuisdzh\",\"dataset\":{\"referenceName\":\"d\",\"parameters\":{\"r\":\"dataagsecnadbuw\",\"zoellnkkiiwvmtum\":\"dataxfllmqiyn\",\"oqvqpilr\":\"datapymdjfuax\"}},\"linkedService\":{\"referenceName\":\"ncanlduwzor\",\"parameters\":{\"kqv\":\"datamxaqklxym\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"pdxcltuubwy\",\"datasetParameters\":\"datajbowcpj\",\"parameters\":{\"exkydfb\":\"dataqgi\",\"vhuerkjddvrglieg\":\"datalj\"},\"\":{\"fgmwd\":\"datavbiiftksdwgdnk\",\"buvczldbglzoutb\":\"datac\",\"orbjg\":\"dataaqgzekajclyzgs\"}}}],\"sinks\":[{\"schemaLinkedService\":{\"referenceName\":\"otvmrxk\",\"parameters\":{\"yfluiyuosnuudte\":\"databvvjbhvhdiq\",\"buubpyrowt\":\"datavhyibdrqrsw\",\"czevjnn\":\"dataoxztfwfqch\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"agfyvrtpqpe\",\"parameters\":{\"wqwemvxqabckmze\":\"datacgkrepdqhqy\"}},\"name\":\"xin\",\"description\":\"re\",\"dataset\":{\"referenceName\":\"twhlpuzjpce\",\"parameters\":{\"phmsexroq\":\"datazangprbfaxyxzlbc\",\"nfee\":\"datandktxfv\",\"bgnixxoww\":\"datagpkrie\"}},\"linkedService\":{\"referenceName\":\"yfwnw\",\"parameters\":{\"icrmpepkldmaxxi\":\"dataxe\",\"ws\":\"datavs\",\"wrasekw\":\"datagkjgya\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"voinwo\",\"datasetParameters\":\"datartwy\",\"parameters\":{\"msfobjlquvj\":\"datacladvatdavuqmcb\"},\"\":{\"mvpsimioyo\":\"dataj\",\"clibbfqpsp\":\"dataglkmiqwnnr\"}}},{\"schemaLinkedService\":{\"referenceName\":\"adydg\",\"parameters\":{\"mnmabeddqil\":\"datautwukexzg\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"d\",\"parameters\":{\"vstclg\":\"dataqfp\"}},\"name\":\"rvwerfwxbsmtb\",\"description\":\"jehhci\",\"dataset\":{\"referenceName\":\"wdv\",\"parameters\":{\"hsqhtf\":\"datarek\",\"yejuwyqwdqigmghg\":\"datawpq\",\"jcmrnkfm\":\"datanztxlujkh\"}},\"linkedService\":{\"referenceName\":\"cqtwmlmhjnqtq\",\"parameters\":{\"dvragpokddxejhh\":\"dataj\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"uavt\",\"datasetParameters\":\"databkew\",\"parameters\":{\"pgb\":\"datan\"},\"\":{\"kmyrljialzbnobr\":\"datafbkkwvdxaexq\",\"yudivbxnhsqeaeo\":\"datalpbcjtrpz\",\"ogatmoljiy\":\"dataqelwgdhuruzytza\",\"knsjulugd\":\"datampinmzvfkneerzzt\"}}},{\"schemaLinkedService\":{\"referenceName\":\"nhrxlel\",\"parameters\":{\"izcpihtdmiw\":\"datak\",\"caydbjzcqymlcfnz\":\"dataekpt\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"hsurlgw\",\"parameters\":{\"tauolawiubmom\":\"datammzp\",\"ohewjj\":\"datagvvjhvvlr\"}},\"name\":\"ajnkdflqionswae\",\"description\":\"zfz\",\"dataset\":{\"referenceName\":\"jo\",\"parameters\":{\"otryegp\":\"datah\",\"rmexznlwkb\":\"datah\",\"fgjblcd\":\"dataokxkhupzer\"}},\"linkedService\":{\"referenceName\":\"yfcemftz\",\"parameters\":{\"ugekdfqn\":\"datakya\",\"owrczfjjnnuxxr\":\"datattw\",\"frhjulrsulwzp\":\"datakmhmnulwempdc\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"snawmhhgzotfri\",\"datasetParameters\":\"datagkoekvzwxxyxh\",\"parameters\":{\"y\":\"datactxbxmolpcqyd\",\"rjeizik\":\"datavskiczd\",\"ycqsxr\":\"dataqaboohxbms\",\"ewuyqa\":\"datad\"},\"\":{\"hhdau\":\"datapjhgejkb\",\"exbkhx\":\"dataghoox\"}}}],\"transformations\":[{\"name\":\"oez\",\"description\":\"xrkdknkobektm\",\"dataset\":{\"referenceName\":\"z\",\"parameters\":{\"gwcd\":\"datazamicb\",\"m\":\"datazseznuxkeuairaa\",\"ihzbdnpxpk\":\"datalqjbedpfixlhupmo\"}},\"linkedService\":{\"referenceName\":\"pre\",\"parameters\":{\"ssjyghsfx\":\"datalyicghflru\",\"ammgmqfmefgv\":\"datakb\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"dwj\",\"datasetParameters\":\"datauxweyslandkd\",\"parameters\":{\"hcgawn\":\"datahunh\",\"ireimseobf\":\"datarnquoxso\",\"xcjzlquze\":\"dataxstcyilbvzm\"},\"\":{\"b\":\"datajxebj\",\"v\":\"datainzabwmvoglj\"}}},{\"name\":\"pgidnw\",\"description\":\"haqidoyzltgiomqo\",\"dataset\":{\"referenceName\":\"epiaeapfsergd\",\"parameters\":{\"b\":\"dataqnacyheq\"}},\"linkedService\":{\"referenceName\":\"qncjubkhjozfymcw\",\"parameters\":{\"li\":\"datapyvqy\",\"hddzydisnuepy\":\"dataiipsejbsvsiaies\",\"dpxot\":\"datayjln\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"i\",\"datasetParameters\":\"datacqibzj\",\"parameters\":{\"iphryvcjwqwo\":\"dataee\",\"pijhfrzgdkk\":\"datasratjhdhzyb\",\"ukhsusmmorf\":\"datagv\"},\"\":{\"neyttl\":\"datawilzzhnijmriprlk\",\"bkut\":\"datacxiv\"}}},{\"name\":\"umltwjflu\",\"description\":\"nbpvzlq\",\"dataset\":{\"referenceName\":\"auyqnj\",\"parameters\":{\"u\":\"datamocgjshg\",\"xqqggljky\":\"datarhwv\",\"rbctbhpjhxpcvrd\":\"datasjrclrvtzq\"}},\"linkedService\":{\"referenceName\":\"eitaneqadynzjahw\",\"parameters\":{\"xwspcaxikhfjq\":\"dataomzczfkiceevsa\",\"ysemtmesrfsvpin\":\"databglcxkxgzzromvy\",\"swxspvckojaz\":\"datazpatqtd\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"spftesubzpv\",\"datasetParameters\":\"datadylytcovq\",\"parameters\":{\"lbmuos\":\"datasrfjbdxzfxnx\"},\"\":{\"yzlwhbwzjnufzrf\":\"datamdihdcy\",\"qgnnbz\":\"datam\",\"ubjtvgjsxmtyjjv\":\"datatftedz\",\"sffofwanmhksca\":\"datavdpwwobtdphti\"}}},{\"name\":\"w\",\"description\":\"cgwdfriwgybjp\",\"dataset\":{\"referenceName\":\"ok\",\"parameters\":{\"k\":\"datagllixdgbyfgwew\",\"xlcskltez\":\"datavxprwpxsoohu\",\"srtmdylperpiltt\":\"dataugggzlfbgrdcgu\"}},\"linkedService\":{\"referenceName\":\"gczfc\",\"parameters\":{\"uvftwaivmuqk\":\"datafbodetresrgvts\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"gjypanhxmpdxxze\",\"datasetParameters\":\"datawzjwotnxlkfhg\",\"parameters\":{\"pcs\":\"datafoxqwecrsn\"},\"\":{\"rmlccmet\":\"dataxovppqibukklvzr\",\"vfqbqna\":\"datascz\",\"yvdgxlyzk\":\"datadsyenzsieuscpl\"}}}],\"script\":\"tdsh\",\"scriptLines\":[\"vkolrupjovmo\",\"sayebra\"]}") + "{\"sources\":[{\"schemaLinkedService\":{\"referenceName\":\"ee\",\"parameters\":{\"jhdhz\":\"datahryvcjwqwoqsra\"}},\"name\":\"b\",\"description\":\"ijh\",\"dataset\":{\"referenceName\":\"zgdkkagvwu\",\"parameters\":{\"mzhwilzzhni\":\"datausmmor\",\"neyttl\":\"datamriprlk\",\"bkut\":\"datacxiv\",\"ynbpvzlqywauy\":\"dataumltwjflu\"}},\"linkedService\":{\"referenceName\":\"jckhmocgj\",\"parameters\":{\"bsjrclrv\":\"dataouarhwvixqqggljk\",\"nrbctbhpjhxpcvrd\":\"dataz\",\"it\":\"datay\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"qady\",\"datasetParameters\":\"datajahwriuomzczf\",\"parameters\":{\"xwspcaxikhfjq\":\"dataeevsa\",\"ysemtmesrfsvpin\":\"databglcxkxgzzromvy\"},\"\":{\"zbbgspftesubzpvp\":\"dataatqtdiswxspvckoj\"}}},{\"schemaLinkedService\":{\"referenceName\":\"ylytcovqseusr\",\"parameters\":{\"zfxnxmlbmuos\":\"datad\",\"zlwhbwzjnufzrfgm\":\"datakjmdihdcyy\",\"tftedz\":\"dataqgnnbz\",\"vdpwwobtdphti\":\"dataubjtvgjsxmtyjjv\"}},\"name\":\"sffofwanmhksca\",\"description\":\"azcg\",\"dataset\":{\"referenceName\":\"friwgybjp\",\"parameters\":{\"wewqkj\":\"datakscvgllixdgbyf\"}},\"linkedService\":{\"referenceName\":\"prwpxsoohu\",\"parameters\":{\"lfb\":\"datacskltezuuggg\",\"srtmdylperpiltt\":\"datardcgu\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"czfcmfpfbod\",\"datasetParameters\":\"dataresrgvtshuvft\",\"parameters\":{\"zgjypanhxmpdxxz\":\"datavmuqke\",\"tnxlkfhglhr\":\"datatwwzjw\",\"qxovppqibukk\":\"dataoxqwecrsnhpcse\",\"etjsczivfqbqna\":\"datavzrlrmlcc\"},\"\":{\"yvdgxlyzk\":\"datayenzsieuscpl\"}}}],\"sinks\":[{\"schemaLinkedService\":{\"referenceName\":\"shezs\",\"parameters\":{\"sayebra\":\"datalrupjovmo\",\"ykykip\":\"datawzlpzbtz\",\"mbezacfpztg\":\"datasdyepfno\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"wyqejgaao\",\"parameters\":{\"ngdyfcixrhlcq\":\"datagkppgkqzkcyzmf\",\"goiutgw\":\"datahoe\",\"yntacihnco\":\"datamkahpqha\",\"mliqmvlb\":\"datamip\"}},\"name\":\"i\",\"description\":\"aqgrv\",\"dataset\":{\"referenceName\":\"omxp\",\"parameters\":{\"wiqbuoutm\":\"datatsdfjyieso\",\"gbww\":\"datayzgleofjs\"}},\"linkedService\":{\"referenceName\":\"dajfwnncfma\",\"parameters\":{\"l\":\"datagjj\",\"uxtndopgjttbasua\":\"dataiqlwixvtbou\",\"lndbeaqbkixv\":\"dataap\",\"spugzfeuzjljmph\":\"datalwynpbbfqvzfj\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ezolgjzm\",\"datasetParameters\":\"datauydoccnx\",\"parameters\":{\"hbatecaatsdohzni\":\"datanzbuia\",\"wiinjdllw\":\"datacbdaom\"},\"\":{\"owavvqxuajgcqw\":\"datae\"}}}],\"transformations\":[{\"name\":\"nkgfcfdr\",\"description\":\"si\",\"dataset\":{\"referenceName\":\"xtclhuulriqbyok\",\"parameters\":{\"cdjwsu\":\"databzsxebrslttfy\",\"holhujbfw\":\"dataardnagttpufpbpgn\",\"whdmcvhtbbz\":\"dataiplkysolsyjprxs\"}},\"linkedService\":{\"referenceName\":\"f\",\"parameters\":{\"zgihotje\":\"datawzbxpc\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"mx\",\"datasetParameters\":\"datalrrskap\",\"parameters\":{\"uyader\":\"dataiee\"},\"\":{\"yimyccgrvk\":\"dataokyk\",\"ersejeg\":\"dataxzznnui\"}}},{\"name\":\"rkjguwrjmwvvbt\",\"description\":\"kxxi\",\"dataset\":{\"referenceName\":\"gxql\",\"parameters\":{\"vjaqu\":\"dataotjgxieqfkyfhi\"}},\"linkedService\":{\"referenceName\":\"yynvskpa\",\"parameters\":{\"bxcbccwkqm\":\"datageumexm\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"p\",\"datasetParameters\":\"dataiscr\",\"parameters\":{\"dahlfxlmuifmuadj\":\"datagftrqrejdaahuqim\",\"skiioshjgczetybn\":\"datafsn\",\"j\":\"datagztlcgc\"},\"\":{\"mywj\":\"dataj\",\"cljkxpyl\":\"dataebecuvlbefv\"}}}],\"script\":\"oxz\",\"scriptLines\":[\"syxjijeyp\",\"vrbkerdkdkgaw\"]}") .toObject(MappingDataFlowTypeProperties.class); - Assertions.assertEquals("cvuq", model.sources().get(0).name()); - Assertions.assertEquals("gzlrqhbj", model.sources().get(0).description()); - Assertions.assertEquals("ogdxwbsfpyxxtjlf", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("jssmctsnldkpwo", model.sources().get(0).linkedService().referenceName()); + Assertions.assertEquals("b", model.sources().get(0).name()); + Assertions.assertEquals("ijh", model.sources().get(0).description()); + Assertions.assertEquals("zgdkkagvwu", model.sources().get(0).dataset().referenceName()); + Assertions.assertEquals("jckhmocgj", model.sources().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("cvfyeowpsfxtjdhs", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("uullojkp", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("xin", model.sinks().get(0).name()); - Assertions.assertEquals("re", model.sinks().get(0).description()); - Assertions.assertEquals("twhlpuzjpce", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("yfwnw", model.sinks().get(0).linkedService().referenceName()); + Assertions.assertEquals("qady", model.sources().get(0).flowlet().referenceName()); + Assertions.assertEquals("ee", model.sources().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("i", model.sinks().get(0).name()); + Assertions.assertEquals("aqgrv", model.sinks().get(0).description()); + Assertions.assertEquals("omxp", model.sinks().get(0).dataset().referenceName()); + Assertions.assertEquals("dajfwnncfma", model.sinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("voinwo", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("otvmrxk", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("agfyvrtpqpe", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("oez", model.transformations().get(0).name()); - Assertions.assertEquals("xrkdknkobektm", model.transformations().get(0).description()); - Assertions.assertEquals("z", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("pre", model.transformations().get(0).linkedService().referenceName()); + Assertions.assertEquals("ezolgjzm", model.sinks().get(0).flowlet().referenceName()); + Assertions.assertEquals("shezs", model.sinks().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("wyqejgaao", model.sinks().get(0).rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("nkgfcfdr", model.transformations().get(0).name()); + Assertions.assertEquals("si", model.transformations().get(0).description()); + Assertions.assertEquals("xtclhuulriqbyok", model.transformations().get(0).dataset().referenceName()); + Assertions.assertEquals("f", model.transformations().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("dwj", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("tdsh", model.script()); - Assertions.assertEquals("vkolrupjovmo", model.scriptLines().get(0)); + Assertions.assertEquals("mx", model.transformations().get(0).flowlet().referenceName()); + Assertions.assertEquals("oxz", model.script()); + Assertions.assertEquals("syxjijeyp", model.scriptLines().get(0)); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MappingDataFlowTypeProperties model = new MappingDataFlowTypeProperties() - .withSources(Arrays.asList( - new DataFlowSource().withName("cvuq").withDescription("gzlrqhbj") - .withDataset(new DatasetReference().withReferenceName("ogdxwbsfpyxxtjlf") - .withParameters(mapOf("xdhilz", "dataominxojjlu", "za", "datadzzqjmu", "otokhtvwtaznk", - "dataovribq", "wjyofgwhnkbtl", "dataqww"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("jssmctsnldkpwo").withParameters( - mapOf("bxbteogfgfiijry", "datas", "m", "datawlefksxqceazfpxg", "aiossscyvaifp", "datavzvluyq"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("cvfyeowpsfxtjdhs").withDatasetParameters("datamhpv") - .withParameters(mapOf("pboujs", "dataftteh", "suenyg", "datakfvvdshxcde", "nquktrfnslnlrxs", - "dataxcgjtf", "wntfmtbgwjdxwna", "dataylt")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("uullojkp") - .withParameters(mapOf("wdjuxdbdljzgdy", "datag"))), - new DataFlowSource().withName("naucmcirtnee").withDescription("jauwcgxefnohaitr") - .withDataset(new DatasetReference().withReferenceName("izerw") - .withParameters(mapOf("ocefhpriylfmpzt", "dataasmxubvfbngf", "vhl", "dataaud"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("culregpqt") - .withParameters(mapOf("shqrdgrt", "datahvrztnvg", "fa", "datamewjzlpyk", "zrransyb", - "datazwjcaye", "nkfscjfn", "datalpolwzrghsrle"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("vuagfqwtl").withDatasetParameters("datagvmreuptrklzmi") - .withParameters(mapOf("xfsv", "datawo", "nwlslrcigtzjcvbx", "dataghmp", "yxpavidnie", - "datalapsnsso", "slpuxgcbdsva", "datawffcvvye")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("lctzeyowmndcovd") - .withParameters(mapOf("kvfruwkudr", "dataauxzanh", "udqyemeb", "datacpft"))), - new DataFlowSource().withName("ipfdvhaxdvwzaehp").withDescription("thd") - .withDataset(new DatasetReference().withReferenceName("mvetatlakfq") - .withParameters(mapOf("rpogwphchg", "datawgiksbbvtoo", "htukfac", "datat"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("mbf").withParameters( - mapOf("wcgasgom", "datameezbxvqxbnu", "qgo", "datamjzwx", "gfredmlscg", "datasxpwwztjfmkkh"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ccnaov").withDatasetParameters("datawazhpabaco") - .withParameters(mapOf("nmvceb", "dataotgkwsxnsrqorcg", "dcqjkedwqurc", "dataeetqujxcxxq", - "qqrsil", "dataojmrvvxwjongzse", "sbvr", "datachskxxka")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService( - new LinkedServiceReference().withReferenceName("lmfaewzgiudjp") - .withParameters(mapOf("mhk", "datahttqh", "gcruxspinym", "dataezsdsuxheq", "zfbmjxuv", - "dataqgwokmikp"))), - new DataFlowSource().withName("pjxljtxb").withDescription("qtbxxniuisdzh") - .withDataset(new DatasetReference().withReferenceName("d") - .withParameters(mapOf("r", "dataagsecnadbuw", "zoellnkkiiwvmtum", "dataxfllmqiyn", "oqvqpilr", - "datapymdjfuax"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("ncanlduwzor") - .withParameters(mapOf("kqv", "datamxaqklxym"))) - .withFlowlet( - new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("pdxcltuubwy").withDatasetParameters("datajbowcpj") - .withParameters( - mapOf("exkydfb", "dataqgi", "vhuerkjddvrglieg", "datalj")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("xrd") - .withParameters(mapOf("kkvyanxk", "datasrwrsnrhpqati", "qxetqmmlivrjjx", "datavcsemsvuvdj", - "gfquwz", "datawxdchpojxlehzlx", "ibelwcerwkw", "dataw"))))) - .withSinks( - Arrays.asList( - new DataFlowSink().withName("xin").withDescription("re") - .withDataset(new DatasetReference().withReferenceName("twhlpuzjpce") - .withParameters(mapOf("phmsexroq", "datazangprbfaxyxzlbc", "nfee", "datandktxfv", - "bgnixxoww", "datagpkrie"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("yfwnw").withParameters( - mapOf("icrmpepkldmaxxi", "dataxe", "ws", "datavs", "wrasekw", "datagkjgya"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("voinwo").withDatasetParameters("datartwy") - .withParameters(mapOf("msfobjlquvj", "datacladvatdavuqmcb")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("otvmrxk") - .withParameters(mapOf("yfluiyuosnuudte", "databvvjbhvhdiq", "buubpyrowt", "datavhyibdrqrsw", - "czevjnn", "dataoxztfwfqch"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("agfyvrtpqpe") - .withParameters(mapOf("wqwemvxqabckmze", "datacgkrepdqhqy"))), - new DataFlowSink().withName("rvwerfwxbsmtb").withDescription("jehhci") - .withDataset(new DatasetReference().withReferenceName("wdv").withParameters( - mapOf("hsqhtf", "datarek", "yejuwyqwdqigmghg", "datawpq", "jcmrnkfm", "datanztxlujkh"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("cqtwmlmhjnqtq") - .withParameters(mapOf("dvragpokddxejhh", "dataj"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("uavt").withDatasetParameters("databkew") - .withParameters(mapOf("pgb", "datan")).withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("adydg") - .withParameters(mapOf("mnmabeddqil", "datautwukexzg"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("d") - .withParameters(mapOf("vstclg", "dataqfp"))), - new DataFlowSink().withName("ajnkdflqionswae").withDescription("zfz") - .withDataset(new DatasetReference().withReferenceName("jo").withParameters( - mapOf("otryegp", "datah", "rmexznlwkb", "datah", "fgjblcd", "dataokxkhupzer"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("yfcemftz") - .withParameters(mapOf("ugekdfqn", "datakya", "owrczfjjnnuxxr", "datattw", "frhjulrsulwzp", - "datakmhmnulwempdc"))) + MappingDataFlowTypeProperties model + = new MappingDataFlowTypeProperties() + .withSources( + Arrays.asList( + new DataFlowSource().withName("b") + .withDescription("ijh") + .withDataset(new DatasetReference().withReferenceName("zgdkkagvwu") + .withParameters(mapOf("mzhwilzzhni", "datausmmor", "neyttl", "datamriprlk", "bkut", + "datacxiv", "ynbpvzlqywauy", "dataumltwjflu"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("jckhmocgj") + .withParameters(mapOf("bsjrclrv", "dataouarhwvixqqggljk", "nrbctbhpjhxpcvrd", "dataz", + "it", "datay"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("qady") + .withDatasetParameters("datajahwriuomzczf") + .withParameters( + mapOf("xwspcaxikhfjq", "dataeevsa", "ysemtmesrfsvpin", "databglcxkxgzzromvy")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ee") + .withParameters(mapOf("jhdhz", "datahryvcjwqwoqsra"))), + new DataFlowSource().withName("sffofwanmhksca") + .withDescription("azcg") + .withDataset(new DatasetReference().withReferenceName("friwgybjp") + .withParameters(mapOf("wewqkj", "datakscvgllixdgbyf"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("prwpxsoohu") + .withParameters(mapOf("lfb", "datacskltezuuggg", "srtmdylperpiltt", "datardcgu"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("czfcmfpfbod") + .withDatasetParameters("dataresrgvtshuvft") + .withParameters(mapOf("zgjypanhxmpdxxz", "datavmuqke", "tnxlkfhglhr", "datatwwzjw", + "qxovppqibukk", "dataoxqwecrsnhpcse", "etjsczivfqbqna", "datavzrlrmlcc")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ylytcovqseusr") + .withParameters(mapOf("zfxnxmlbmuos", "datad", "zlwhbwzjnufzrfgm", "datakjmdihdcyy", + "tftedz", "dataqgnnbz", "vdpwwobtdphti", "dataubjtvgjsxmtyjjv"))))) + .withSinks( + Arrays.asList(new DataFlowSink().withName("i") + .withDescription("aqgrv") + .withDataset(new DatasetReference().withReferenceName("omxp") + .withParameters(mapOf("wiqbuoutm", "datatsdfjyieso", "gbww", "datayzgleofjs"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("dajfwnncfma") + .withParameters(mapOf("l", "datagjj", "uxtndopgjttbasua", "dataiqlwixvtbou", "lndbeaqbkixv", + "dataap", "spugzfeuzjljmph", "datalwynpbbfqvzfj"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("snawmhhgzotfri").withDatasetParameters("datagkoekvzwxxyxh") - .withParameters(mapOf("y", "datactxbxmolpcqyd", "rjeizik", "datavskiczd", "ycqsxr", - "dataqaboohxbms", "ewuyqa", "datad")) + .withReferenceName("ezolgjzm") + .withDatasetParameters("datauydoccnx") + .withParameters(mapOf("hbatecaatsdohzni", "datanzbuia", "wiinjdllw", "datacbdaom")) .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("nhrxlel") - .withParameters(mapOf("izcpihtdmiw", "datak", "caydbjzcqymlcfnz", "dataekpt"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("hsurlgw") - .withParameters(mapOf("tauolawiubmom", "datammzp", "ohewjj", "datagvvjhvvlr"))))) - .withTransformations( - Arrays.asList( - new Transformation().withName("oez").withDescription("xrkdknkobektm") - .withDataset(new DatasetReference().withReferenceName("z") - .withParameters(mapOf("gwcd", "datazamicb", "m", "datazseznuxkeuairaa", "ihzbdnpxpk", - "datalqjbedpfixlhupmo"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("pre") - .withParameters(mapOf("ssjyghsfx", "datalyicghflru", "ammgmqfmefgv", "datakb"))) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("shezs") + .withParameters(mapOf("sayebra", "datalrupjovmo", "ykykip", "datawzlpzbtz", "mbezacfpztg", + "datasdyepfno"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("wyqejgaao") + .withParameters(mapOf("ngdyfcixrhlcq", "datagkppgkqzkcyzmf", "goiutgw", "datahoe", + "yntacihnco", "datamkahpqha", "mliqmvlb", "datamip"))))) + .withTransformations(Arrays.asList( + new Transformation().withName("nkgfcfdr") + .withDescription("si") + .withDataset(new DatasetReference().withReferenceName("xtclhuulriqbyok") + .withParameters(mapOf("cdjwsu", "databzsxebrslttfy", "holhujbfw", "dataardnagttpufpbpgn", + "whdmcvhtbbz", "dataiplkysolsyjprxs"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("f") + .withParameters(mapOf("zgihotje", "datawzbxpc"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("dwj").withDatasetParameters("datauxweyslandkd") - .withParameters(mapOf("hcgawn", "datahunh", "ireimseobf", "datarnquoxso", "xcjzlquze", - "dataxstcyilbvzm")) + .withReferenceName("mx") + .withDatasetParameters("datalrrskap") + .withParameters(mapOf("uyader", "dataiee")) .withAdditionalProperties(mapOf())), - new Transformation().withName("pgidnw").withDescription("haqidoyzltgiomqo") - .withDataset(new DatasetReference().withReferenceName("epiaeapfsergd") - .withParameters(mapOf("b", "dataqnacyheq"))) - .withLinkedService( - new LinkedServiceReference().withReferenceName("qncjubkhjozfymcw").withParameters( - mapOf("li", "datapyvqy", "hddzydisnuepy", "dataiipsejbsvsiaies", "dpxot", "datayjln"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("i").withDatasetParameters("datacqibzj") - .withParameters(mapOf("iphryvcjwqwo", "dataee", "pijhfrzgdkk", "datasratjhdhzyb", - "ukhsusmmorf", "datagv")) - .withAdditionalProperties(mapOf())), - new Transformation().withName("umltwjflu").withDescription("nbpvzlq") - .withDataset(new DatasetReference().withReferenceName("auyqnj").withParameters( - mapOf("u", "datamocgjshg", "xqqggljky", "datarhwv", "rbctbhpjhxpcvrd", "datasjrclrvtzq"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("eitaneqadynzjahw") - .withParameters(mapOf("xwspcaxikhfjq", "dataomzczfkiceevsa", "ysemtmesrfsvpin", - "databglcxkxgzzromvy", "swxspvckojaz", "datazpatqtd"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("spftesubzpv").withDatasetParameters("datadylytcovq") - .withParameters(mapOf("lbmuos", "datasrfjbdxzfxnx")).withAdditionalProperties(mapOf())), - new Transformation().withName("w").withDescription("cgwdfriwgybjp") - .withDataset(new DatasetReference().withReferenceName("ok") - .withParameters(mapOf("k", "datagllixdgbyfgwew", "xlcskltez", "datavxprwpxsoohu", - "srtmdylperpiltt", "dataugggzlfbgrdcgu"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("gczfc") - .withParameters(mapOf("uvftwaivmuqk", "datafbodetresrgvts"))) + new Transformation().withName("rkjguwrjmwvvbt") + .withDescription("kxxi") + .withDataset(new DatasetReference().withReferenceName("gxql") + .withParameters(mapOf("vjaqu", "dataotjgxieqfkyfhi"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("yynvskpa") + .withParameters(mapOf("bxcbccwkqm", "datageumexm"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("gjypanhxmpdxxze").withDatasetParameters("datawzjwotnxlkfhg") - .withParameters(mapOf("pcs", "datafoxqwecrsn")).withAdditionalProperties(mapOf())))) - .withScript("tdsh").withScriptLines(Arrays.asList("vkolrupjovmo", "sayebra")); + .withReferenceName("p") + .withDatasetParameters("dataiscr") + .withParameters(mapOf("dahlfxlmuifmuadj", "datagftrqrejdaahuqim", "skiioshjgczetybn", + "datafsn", "j", "datagztlcgc")) + .withAdditionalProperties(mapOf())))) + .withScript("oxz") + .withScriptLines(Arrays.asList("syxjijeyp", "vrbkerdkdkgaw")); model = BinaryData.fromObject(model).toObject(MappingDataFlowTypeProperties.class); - Assertions.assertEquals("cvuq", model.sources().get(0).name()); - Assertions.assertEquals("gzlrqhbj", model.sources().get(0).description()); - Assertions.assertEquals("ogdxwbsfpyxxtjlf", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("jssmctsnldkpwo", model.sources().get(0).linkedService().referenceName()); + Assertions.assertEquals("b", model.sources().get(0).name()); + Assertions.assertEquals("ijh", model.sources().get(0).description()); + Assertions.assertEquals("zgdkkagvwu", model.sources().get(0).dataset().referenceName()); + Assertions.assertEquals("jckhmocgj", model.sources().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("cvfyeowpsfxtjdhs", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("uullojkp", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("xin", model.sinks().get(0).name()); - Assertions.assertEquals("re", model.sinks().get(0).description()); - Assertions.assertEquals("twhlpuzjpce", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("yfwnw", model.sinks().get(0).linkedService().referenceName()); + Assertions.assertEquals("qady", model.sources().get(0).flowlet().referenceName()); + Assertions.assertEquals("ee", model.sources().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("i", model.sinks().get(0).name()); + Assertions.assertEquals("aqgrv", model.sinks().get(0).description()); + Assertions.assertEquals("omxp", model.sinks().get(0).dataset().referenceName()); + Assertions.assertEquals("dajfwnncfma", model.sinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("voinwo", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("otvmrxk", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("agfyvrtpqpe", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("oez", model.transformations().get(0).name()); - Assertions.assertEquals("xrkdknkobektm", model.transformations().get(0).description()); - Assertions.assertEquals("z", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("pre", model.transformations().get(0).linkedService().referenceName()); + Assertions.assertEquals("ezolgjzm", model.sinks().get(0).flowlet().referenceName()); + Assertions.assertEquals("shezs", model.sinks().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("wyqejgaao", model.sinks().get(0).rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("nkgfcfdr", model.transformations().get(0).name()); + Assertions.assertEquals("si", model.transformations().get(0).description()); + Assertions.assertEquals("xtclhuulriqbyok", model.transformations().get(0).dataset().referenceName()); + Assertions.assertEquals("f", model.transformations().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("dwj", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("tdsh", model.script()); - Assertions.assertEquals("vkolrupjovmo", model.scriptLines().get(0)); + Assertions.assertEquals("mx", model.transformations().get(0).flowlet().referenceName()); + Assertions.assertEquals("oxz", model.script()); + Assertions.assertEquals("syxjijeyp", model.scriptLines().get(0)); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBSourceTests.java index 497b40587514d..4fb7ab264e898 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBSourceTests.java @@ -11,15 +11,19 @@ public final class MariaDBSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MariaDBSource model = BinaryData.fromString( - "{\"type\":\"MariaDBSource\",\"query\":\"datamezfyelf\",\"queryTimeout\":\"databkbhjdkqfj\",\"additionalColumns\":\"datayzj\",\"sourceRetryCount\":\"dataa\",\"sourceRetryWait\":\"datagatynkihb\",\"maxConcurrentConnections\":\"dataxybtowjz\",\"disableMetricsCollection\":\"datapzaenlzjxztg\",\"\":{\"tczzv\":\"dataunvwvaolfg\"}}") + "{\"type\":\"s\",\"query\":\"databbwtagxhriru\",\"queryTimeout\":\"datad\",\"additionalColumns\":\"dataxjxl\",\"sourceRetryCount\":\"datarolagbellp\",\"sourceRetryWait\":\"datafbrsmyisndf\",\"maxConcurrentConnections\":\"datahgowhnvcqhmuv\",\"disableMetricsCollection\":\"datastohurktod\",\"\":{\"phnxxwble\":\"datatyijlvc\",\"ebw\":\"databdkwzbkhvlsahj\",\"ihfxtbvhmsvcmce\":\"dataqnluszilkrcpxl\"}}") .toObject(MariaDBSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MariaDBSource model = new MariaDBSource().withSourceRetryCount("dataa").withSourceRetryWait("datagatynkihb") - .withMaxConcurrentConnections("dataxybtowjz").withDisableMetricsCollection("datapzaenlzjxztg") - .withQueryTimeout("databkbhjdkqfj").withAdditionalColumns("datayzj").withQuery("datamezfyelf"); + MariaDBSource model = new MariaDBSource().withSourceRetryCount("datarolagbellp") + .withSourceRetryWait("datafbrsmyisndf") + .withMaxConcurrentConnections("datahgowhnvcqhmuv") + .withDisableMetricsCollection("datastohurktod") + .withQueryTimeout("datad") + .withAdditionalColumns("dataxjxl") + .withQuery("databbwtagxhriru"); model = BinaryData.fromObject(model).toObject(MariaDBSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBTableDatasetTests.java index a09a5bbbce662..97a4b4dcaec0e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBTableDatasetTests.java @@ -19,32 +19,35 @@ public final class MariaDBTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MariaDBTableDataset model = BinaryData.fromString( - "{\"type\":\"MariaDBTable\",\"typeProperties\":{\"tableName\":\"datarmrfyyqjcni\"},\"description\":\"qrsdd\",\"structure\":\"dataqddldao\",\"schema\":\"datafztqewq\",\"linkedServiceName\":{\"referenceName\":\"ojesxjhtyzzwqocy\",\"parameters\":{\"trgu\":\"dataineuaxpmez\",\"oyxfoafzdypzlx\":\"datalw\",\"jzqx\":\"datamndhgwhlbpju\",\"lyhbujys\":\"datavmitn\"}},\"parameters\":{\"gnqtjtnnrjewih\":{\"type\":\"Bool\",\"defaultValue\":\"dataddbhatmabt\"},\"hmdfspkdn\":{\"type\":\"String\",\"defaultValue\":\"dataa\"},\"tertnzrrwsc\":{\"type\":\"Int\",\"defaultValue\":\"dataz\"}},\"annotations\":[\"datahdwi\",\"datanvtolzj\"],\"folder\":{\"name\":\"ryxsg\"},\"\":{\"wppvihbmwrv\":\"datanklth\",\"ob\":\"datavdrohu\"}}") + "{\"type\":\"kmzv\",\"typeProperties\":{\"tableName\":\"datatq\"},\"description\":\"qxfblsxy\",\"structure\":\"datag\",\"schema\":\"datad\",\"linkedServiceName\":{\"referenceName\":\"kllefmizdcsrvb\",\"parameters\":{\"cgqbmxbpqcn\":\"datasgfyxhs\"}},\"parameters\":{\"lfpiuuf\":{\"type\":\"String\",\"defaultValue\":\"dataojvmazu\"},\"qrgivbhmnimj\":{\"type\":\"Float\",\"defaultValue\":\"datanco\"},\"fnmmib\":{\"type\":\"Float\",\"defaultValue\":\"datab\"},\"aygumqeobr\":{\"type\":\"String\",\"defaultValue\":\"dataduyrg\"}},\"annotations\":[\"dataudgsawhamigs\"],\"folder\":{\"name\":\"fkiaagwkrx\"},\"\":{\"enubrf\":\"datag\"}}") .toObject(MariaDBTableDataset.class); - Assertions.assertEquals("qrsdd", model.description()); - Assertions.assertEquals("ojesxjhtyzzwqocy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("gnqtjtnnrjewih").type()); - Assertions.assertEquals("ryxsg", model.folder().name()); + Assertions.assertEquals("qxfblsxy", model.description()); + Assertions.assertEquals("kllefmizdcsrvb", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("lfpiuuf").type()); + Assertions.assertEquals("fkiaagwkrx", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MariaDBTableDataset model - = new MariaDBTableDataset().withDescription("qrsdd").withStructure("dataqddldao").withSchema("datafztqewq") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ojesxjhtyzzwqocy") - .withParameters(mapOf("trgu", "dataineuaxpmez", "oyxfoafzdypzlx", "datalw", "jzqx", - "datamndhgwhlbpju", "lyhbujys", "datavmitn"))) - .withParameters(mapOf("gnqtjtnnrjewih", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataddbhatmabt"), - "hmdfspkdn", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataa"), - "tertnzrrwsc", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataz"))) - .withAnnotations(Arrays.asList("datahdwi", "datanvtolzj")) - .withFolder(new DatasetFolder().withName("ryxsg")).withTableName("datarmrfyyqjcni"); + MariaDBTableDataset model = new MariaDBTableDataset().withDescription("qxfblsxy") + .withStructure("datag") + .withSchema("datad") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("kllefmizdcsrvb") + .withParameters(mapOf("cgqbmxbpqcn", "datasgfyxhs"))) + .withParameters(mapOf("lfpiuuf", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataojvmazu"), + "qrgivbhmnimj", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datanco"), + "fnmmib", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datab"), + "aygumqeobr", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataduyrg"))) + .withAnnotations(Arrays.asList("dataudgsawhamigs")) + .withFolder(new DatasetFolder().withName("fkiaagwkrx")) + .withTableName("datatq"); model = BinaryData.fromObject(model).toObject(MariaDBTableDataset.class); - Assertions.assertEquals("qrsdd", model.description()); - Assertions.assertEquals("ojesxjhtyzzwqocy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("gnqtjtnnrjewih").type()); - Assertions.assertEquals("ryxsg", model.folder().name()); + Assertions.assertEquals("qxfblsxy", model.description()); + Assertions.assertEquals("kllefmizdcsrvb", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("lfpiuuf").type()); + Assertions.assertEquals("fkiaagwkrx", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoObjectDatasetTests.java index 1237ecd4cdb3d..b919b6a1cfc80 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoObjectDatasetTests.java @@ -19,29 +19,32 @@ public final class MarketoObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MarketoObjectDataset model = BinaryData.fromString( - "{\"type\":\"MarketoObject\",\"typeProperties\":{\"tableName\":\"dataewltono\"},\"description\":\"femiwfhhawbabhz\",\"structure\":\"datacdikqnxydgzfoiqz\",\"schema\":\"dataspa\",\"linkedServiceName\":{\"referenceName\":\"w\",\"parameters\":{\"eafkxfmuwdbvyt\":\"dataqeron\",\"u\":\"dataavouymkd\"}},\"parameters\":{\"tth\":{\"type\":\"Float\",\"defaultValue\":\"datapfdkaxgbiwpgopql\"}},\"annotations\":[\"datarmt\",\"datax\",\"datajmpdvrjzwaw\",\"dataewajccsdjuz\"],\"folder\":{\"name\":\"jtickzovguzpr\"},\"\":{\"qlrzhtocjzfp\":\"datahboigzxko\",\"jwgiitvjcmimbmsw\":\"dataexuvatzwn\"}}") + "{\"type\":\"vuylyumbwep\",\"typeProperties\":{\"tableName\":\"datafo\"},\"description\":\"nuomsx\",\"structure\":\"datakhmemxkahapesn\",\"schema\":\"dataoullyfzmnxrmxx\",\"linkedServiceName\":{\"referenceName\":\"vwbatjgzkmwfw\",\"parameters\":{\"i\":\"datapxfmdj\",\"cbdnpfcg\":\"datammdzphxulx\",\"bzxp\":\"datadttowqxxc\",\"wtc\":\"dataloovhati\"}},\"parameters\":{\"qpzksdpfvls\":{\"type\":\"Object\",\"defaultValue\":\"datakfeomotquqlqeyis\"}},\"annotations\":[\"dataetwtlafnkjt\",\"datafbdpn\",\"datavhgcgr\",\"datalyyfsmoc\"],\"folder\":{\"name\":\"rchmetvzhuugd\"},\"\":{\"cawwayqtsrn\":\"datadskwvb\"}}") .toObject(MarketoObjectDataset.class); - Assertions.assertEquals("femiwfhhawbabhz", model.description()); - Assertions.assertEquals("w", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("tth").type()); - Assertions.assertEquals("jtickzovguzpr", model.folder().name()); + Assertions.assertEquals("nuomsx", model.description()); + Assertions.assertEquals("vwbatjgzkmwfw", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("qpzksdpfvls").type()); + Assertions.assertEquals("rchmetvzhuugd", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MarketoObjectDataset model = new MarketoObjectDataset().withDescription("femiwfhhawbabhz") - .withStructure("datacdikqnxydgzfoiqz").withSchema("dataspa") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("w") - .withParameters(mapOf("eafkxfmuwdbvyt", "dataqeron", "u", "dataavouymkd"))) - .withParameters(mapOf("tth", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datapfdkaxgbiwpgopql"))) - .withAnnotations(Arrays.asList("datarmt", "datax", "datajmpdvrjzwaw", "dataewajccsdjuz")) - .withFolder(new DatasetFolder().withName("jtickzovguzpr")).withTableName("dataewltono"); + MarketoObjectDataset model = new MarketoObjectDataset().withDescription("nuomsx") + .withStructure("datakhmemxkahapesn") + .withSchema("dataoullyfzmnxrmxx") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vwbatjgzkmwfw") + .withParameters(mapOf("i", "datapxfmdj", "cbdnpfcg", "datammdzphxulx", "bzxp", "datadttowqxxc", "wtc", + "dataloovhati"))) + .withParameters(mapOf("qpzksdpfvls", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datakfeomotquqlqeyis"))) + .withAnnotations(Arrays.asList("dataetwtlafnkjt", "datafbdpn", "datavhgcgr", "datalyyfsmoc")) + .withFolder(new DatasetFolder().withName("rchmetvzhuugd")) + .withTableName("datafo"); model = BinaryData.fromObject(model).toObject(MarketoObjectDataset.class); - Assertions.assertEquals("femiwfhhawbabhz", model.description()); - Assertions.assertEquals("w", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("tth").type()); - Assertions.assertEquals("jtickzovguzpr", model.folder().name()); + Assertions.assertEquals("nuomsx", model.description()); + Assertions.assertEquals("vwbatjgzkmwfw", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("qpzksdpfvls").type()); + Assertions.assertEquals("rchmetvzhuugd", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoSourceTests.java index 5d4f393344149..3579e072ab8f4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoSourceTests.java @@ -11,16 +11,19 @@ public final class MarketoSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MarketoSource model = BinaryData.fromString( - "{\"type\":\"MarketoSource\",\"query\":\"dataylvrofhhitjhh\",\"queryTimeout\":\"datavwrc\",\"additionalColumns\":\"datahllmblls\",\"sourceRetryCount\":\"datafdrimoopfr\",\"sourceRetryWait\":\"datajjrhxornuoqpob\",\"maxConcurrentConnections\":\"datarsdx\",\"disableMetricsCollection\":\"datamq\",\"\":{\"lseoixqp\":\"databqyavcxj\",\"fsuwcmzpwkca\":\"datamsfqntakroxku\",\"zq\":\"datafq\"}}") + "{\"type\":\"l\",\"query\":\"dataddpbt\",\"queryTimeout\":\"datacrmptjsixawipj\",\"additionalColumns\":\"datacyxnza\",\"sourceRetryCount\":\"datakhgdzrcq\",\"sourceRetryWait\":\"dataapohemine\",\"maxConcurrentConnections\":\"datajrdxhlovmxhztdca\",\"disableMetricsCollection\":\"datamvqgqmi\",\"\":{\"tcihupoeljfni\":\"dataagmhhwcyasziuh\"}}") .toObject(MarketoSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MarketoSource model - = new MarketoSource().withSourceRetryCount("datafdrimoopfr").withSourceRetryWait("datajjrhxornuoqpob") - .withMaxConcurrentConnections("datarsdx").withDisableMetricsCollection("datamq") - .withQueryTimeout("datavwrc").withAdditionalColumns("datahllmblls").withQuery("dataylvrofhhitjhh"); + MarketoSource model = new MarketoSource().withSourceRetryCount("datakhgdzrcq") + .withSourceRetryWait("dataapohemine") + .withMaxConcurrentConnections("datajrdxhlovmxhztdca") + .withDisableMetricsCollection("datamvqgqmi") + .withQueryTimeout("datacrmptjsixawipj") + .withAdditionalColumns("datacyxnza") + .withQuery("dataddpbt"); model = BinaryData.fromObject(model).toObject(MarketoSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MetadataItemTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MetadataItemTests.java index f026640cb977c..625538f0d4a92 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MetadataItemTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MetadataItemTests.java @@ -11,12 +11,12 @@ public final class MetadataItemTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MetadataItem model - = BinaryData.fromString("{\"name\":\"datagyxkg\",\"value\":\"datay\"}").toObject(MetadataItem.class); + = BinaryData.fromString("{\"name\":\"datavej\",\"value\":\"datayu\"}").toObject(MetadataItem.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MetadataItem model = new MetadataItem().withName("datagyxkg").withValue("datay"); + MetadataItem model = new MetadataItem().withName("datavej").withValue("datayu"); model = BinaryData.fromObject(model).toObject(MetadataItem.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSinkTests.java index de0cece932b05..bea45005a7237 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSinkTests.java @@ -11,16 +11,19 @@ public final class MicrosoftAccessSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MicrosoftAccessSink model = BinaryData.fromString( - "{\"type\":\"MicrosoftAccessSink\",\"preCopyScript\":\"datailyxpqxnlifhjym\",\"writeBatchSize\":\"datajliivyatyzwy\",\"writeBatchTimeout\":\"dataaycjphozymcypdbu\",\"sinkRetryCount\":\"datanktlzngidgwsc\",\"sinkRetryWait\":\"datamhgzapcgdk\",\"maxConcurrentConnections\":\"dataa\",\"disableMetricsCollection\":\"datacpohlfvsb\",\"\":{\"bjbbyoud\":\"datag\",\"mcumuomdlspsb\":\"datatdlkucxtyufsouh\"}}") + "{\"type\":\"lgpepxbjjnxdgn\",\"preCopyScript\":\"datavjw\",\"writeBatchSize\":\"datakltlpbb\",\"writeBatchTimeout\":\"datamm\",\"sinkRetryCount\":\"datamvada\",\"sinkRetryWait\":\"datae\",\"maxConcurrentConnections\":\"datawtdzgngnuuz\",\"disableMetricsCollection\":\"datagfojdb\",\"\":{\"vqmxzdi\":\"datamnelqlqn\",\"nrpqsj\":\"datan\",\"reqbwa\":\"datagncyksb\"}}") .toObject(MicrosoftAccessSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MicrosoftAccessSink model = new MicrosoftAccessSink().withWriteBatchSize("datajliivyatyzwy") - .withWriteBatchTimeout("dataaycjphozymcypdbu").withSinkRetryCount("datanktlzngidgwsc") - .withSinkRetryWait("datamhgzapcgdk").withMaxConcurrentConnections("dataa") - .withDisableMetricsCollection("datacpohlfvsb").withPreCopyScript("datailyxpqxnlifhjym"); + MicrosoftAccessSink model = new MicrosoftAccessSink().withWriteBatchSize("datakltlpbb") + .withWriteBatchTimeout("datamm") + .withSinkRetryCount("datamvada") + .withSinkRetryWait("datae") + .withMaxConcurrentConnections("datawtdzgngnuuz") + .withDisableMetricsCollection("datagfojdb") + .withPreCopyScript("datavjw"); model = BinaryData.fromObject(model).toObject(MicrosoftAccessSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSourceTests.java index e460cd14fc394..1e5fd1de09e8f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSourceTests.java @@ -11,16 +11,18 @@ public final class MicrosoftAccessSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MicrosoftAccessSource model = BinaryData.fromString( - "{\"type\":\"MicrosoftAccessSource\",\"query\":\"datadljqjstncjwz\",\"additionalColumns\":\"datatezltlundkj\",\"sourceRetryCount\":\"datavhhxivshjuxm\",\"sourceRetryWait\":\"dataythxearlp\",\"maxConcurrentConnections\":\"datajjticly\",\"disableMetricsCollection\":\"dataduxbungmpn\",\"\":{\"goazzy\":\"datagucdfxglrcj\",\"nyhzestt\":\"datajcwuzanpoyrqjoni\",\"gzdbonep\":\"datacloq\"}}") + "{\"type\":\"hsodofsxjik\",\"query\":\"datawkwxdgcfcfkyyrj\",\"additionalColumns\":\"datahslrbwwkbyw\",\"sourceRetryCount\":\"datasquhu\",\"sourceRetryWait\":\"dataqwoggwcxdm\",\"maxConcurrentConnections\":\"datauwldfahkyft\",\"disableMetricsCollection\":\"datakbgsgopyckmncru\",\"\":{\"qkgixfnrneyav\":\"datadjmda\",\"iizbwfjumulhf\":\"datadovpwrqcfzokplz\",\"qcapbkfvowzbk\":\"datadgnchahldnrpt\"}}") .toObject(MicrosoftAccessSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MicrosoftAccessSource model - = new MicrosoftAccessSource().withSourceRetryCount("datavhhxivshjuxm").withSourceRetryWait("dataythxearlp") - .withMaxConcurrentConnections("datajjticly").withDisableMetricsCollection("dataduxbungmpn") - .withQuery("datadljqjstncjwz").withAdditionalColumns("datatezltlundkj"); + MicrosoftAccessSource model = new MicrosoftAccessSource().withSourceRetryCount("datasquhu") + .withSourceRetryWait("dataqwoggwcxdm") + .withMaxConcurrentConnections("datauwldfahkyft") + .withDisableMetricsCollection("datakbgsgopyckmncru") + .withQuery("datawkwxdgcfcfkyyrj") + .withAdditionalColumns("datahslrbwwkbyw"); model = BinaryData.fromObject(model).toObject(MicrosoftAccessSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTests.java index bdda954ab2539..8f5b891c5c586 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTests.java @@ -19,34 +19,32 @@ public final class MicrosoftAccessTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MicrosoftAccessTableDataset model = BinaryData.fromString( - "{\"type\":\"MicrosoftAccessTable\",\"typeProperties\":{\"tableName\":\"datanrkbnv\"},\"description\":\"cklzhznfgvlxy\",\"structure\":\"datanctigpksywi\",\"schema\":\"datalktgkdp\",\"linkedServiceName\":{\"referenceName\":\"tqjytdc\",\"parameters\":{\"gmlamoaxc\":\"datauhbdwbvjs\",\"kvbpbl\":\"dataytn\",\"exheeocnqo\":\"datacw\"}},\"parameters\":{\"xyfhxohzbzhhavzf\":{\"type\":\"Object\",\"defaultValue\":\"datavlryszfh\"},\"cofuvtfu\":{\"type\":\"Array\",\"defaultValue\":\"datavkds\"},\"subzsspmj\":{\"type\":\"Bool\",\"defaultValue\":\"datauisaklhjfddxqfu\"},\"wbztrt\":{\"type\":\"Int\",\"defaultValue\":\"datalfauyvxpqwlkqd\"}},\"annotations\":[\"datawvoglff\",\"datadhg\"],\"folder\":{\"name\":\"rmhbtofcv\"},\"\":{\"g\":\"datalhcnsdylmnqunk\"}}") + "{\"type\":\"suivmrf\",\"typeProperties\":{\"tableName\":\"databmseesacuicnvq\"},\"description\":\"tnd\",\"structure\":\"datamnlhnkmx\",\"schema\":\"datakekc\",\"linkedServiceName\":{\"referenceName\":\"aviiebeqrfz\",\"parameters\":{\"r\":\"dataymcwtsiucepl\",\"e\":\"datagkuorwpqbst\"}},\"parameters\":{\"noyjyflsm\":{\"type\":\"Float\",\"defaultValue\":\"datacruykkielay\"},\"gdfz\":{\"type\":\"Array\",\"defaultValue\":\"datalyoi\"}},\"annotations\":[\"datahykcvoevcwf\",\"dataotkxxlwwooxg\",\"datasdz\",\"datagcvypjhu\"],\"folder\":{\"name\":\"gobxehujcqg\"},\"\":{\"klqrljdc\":\"dataxwiuaoibm\",\"hgxuwudgcyqruvum\":\"datakylaxrjiqoqovqhg\",\"cqwnjz\":\"datayddnqivah\"}}") .toObject(MicrosoftAccessTableDataset.class); - Assertions.assertEquals("cklzhznfgvlxy", model.description()); - Assertions.assertEquals("tqjytdc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("xyfhxohzbzhhavzf").type()); - Assertions.assertEquals("rmhbtofcv", model.folder().name()); + Assertions.assertEquals("tnd", model.description()); + Assertions.assertEquals("aviiebeqrfz", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("noyjyflsm").type()); + Assertions.assertEquals("gobxehujcqg", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MicrosoftAccessTableDataset model = new MicrosoftAccessTableDataset().withDescription("cklzhznfgvlxy") - .withStructure("datanctigpksywi").withSchema("datalktgkdp") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("tqjytdc") - .withParameters(mapOf("gmlamoaxc", "datauhbdwbvjs", "kvbpbl", "dataytn", "exheeocnqo", "datacw"))) - .withParameters(mapOf("xyfhxohzbzhhavzf", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datavlryszfh"), - "cofuvtfu", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datavkds"), - "subzsspmj", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datauisaklhjfddxqfu"), - "wbztrt", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datalfauyvxpqwlkqd"))) - .withAnnotations(Arrays.asList("datawvoglff", "datadhg")) - .withFolder(new DatasetFolder().withName("rmhbtofcv")).withTableName("datanrkbnv"); + MicrosoftAccessTableDataset model = new MicrosoftAccessTableDataset().withDescription("tnd") + .withStructure("datamnlhnkmx") + .withSchema("datakekc") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("aviiebeqrfz") + .withParameters(mapOf("r", "dataymcwtsiucepl", "e", "datagkuorwpqbst"))) + .withParameters(mapOf("noyjyflsm", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datacruykkielay"), "gdfz", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datalyoi"))) + .withAnnotations(Arrays.asList("datahykcvoevcwf", "dataotkxxlwwooxg", "datasdz", "datagcvypjhu")) + .withFolder(new DatasetFolder().withName("gobxehujcqg")) + .withTableName("databmseesacuicnvq"); model = BinaryData.fromObject(model).toObject(MicrosoftAccessTableDataset.class); - Assertions.assertEquals("cklzhznfgvlxy", model.description()); - Assertions.assertEquals("tqjytdc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("xyfhxohzbzhhavzf").type()); - Assertions.assertEquals("rmhbtofcv", model.folder().name()); + Assertions.assertEquals("tnd", model.description()); + Assertions.assertEquals("aviiebeqrfz", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("noyjyflsm").type()); + Assertions.assertEquals("gobxehujcqg", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTypePropertiesTests.java index a58660ea54f07..c5f043af88930 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTypePropertiesTests.java @@ -10,14 +10,14 @@ public final class MicrosoftAccessTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - MicrosoftAccessTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datatmsgkwedwl\"}") + MicrosoftAccessTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datapicciyoypoe\"}") .toObject(MicrosoftAccessTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { MicrosoftAccessTableDatasetTypeProperties model - = new MicrosoftAccessTableDatasetTypeProperties().withTableName("datatmsgkwedwl"); + = new MicrosoftAccessTableDatasetTypeProperties().withTableName("datapicciyoypoe"); model = BinaryData.fromObject(model).toObject(MicrosoftAccessTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTests.java index d4b969ab5dc01..e1484c55b8279 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTests.java @@ -19,30 +19,32 @@ public final class MongoDbAtlasCollectionDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbAtlasCollectionDataset model = BinaryData.fromString( - "{\"type\":\"MongoDbAtlasCollection\",\"typeProperties\":{\"collection\":\"datafpohimgckycjpeeb\"},\"description\":\"bznxsuloutnpbm\",\"structure\":\"dataoqohgp\",\"schema\":\"datadmwk\",\"linkedServiceName\":{\"referenceName\":\"upf\",\"parameters\":{\"dzauiunyev\":\"datad\",\"uynfxkcgsfcmvh\":\"datayzdsytcikswhcam\",\"atvyrkljqkqws\":\"datadrp\",\"bypnkteiidlbov\":\"datajtvjkowggxawwd\"}},\"parameters\":{\"rekyjulskwwn\":{\"type\":\"String\",\"defaultValue\":\"datargeganihkjcn\"}},\"annotations\":[\"datalqgpwxtvceba\"],\"folder\":{\"name\":\"vxwve\"},\"\":{\"csmwevguy\":\"datalr\",\"rj\":\"datalnxe\",\"owwe\":\"datafzcde\",\"sfqbirtybcelfjn\":\"datahyfkdilbwqlqa\"}}") + "{\"type\":\"jrnogykugdl\",\"typeProperties\":{\"collection\":\"datalkyhtrrqwfyybpt\"},\"description\":\"sav\",\"structure\":\"datahk\",\"schema\":\"datageuufkb\",\"linkedServiceName\":{\"referenceName\":\"bfbxj\",\"parameters\":{\"sazdjmofsvpz\":\"datajybdnbycsbto\",\"mlfjymgw\":\"datagnywxu\",\"q\":\"datamszcfy\",\"re\":\"datae\"}},\"parameters\":{\"vrgihl\":{\"type\":\"Array\",\"defaultValue\":\"datapagknxma\"}},\"annotations\":[\"datafewvqk\"],\"folder\":{\"name\":\"cgeipqxxsdyaf\"},\"\":{\"clejqzhpv\":\"datadsmmabh\",\"eullgfyog\":\"dataxpbadj\"}}") .toObject(MongoDbAtlasCollectionDataset.class); - Assertions.assertEquals("bznxsuloutnpbm", model.description()); - Assertions.assertEquals("upf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("rekyjulskwwn").type()); - Assertions.assertEquals("vxwve", model.folder().name()); + Assertions.assertEquals("sav", model.description()); + Assertions.assertEquals("bfbxj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("vrgihl").type()); + Assertions.assertEquals("cgeipqxxsdyaf", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MongoDbAtlasCollectionDataset model = new MongoDbAtlasCollectionDataset().withDescription("bznxsuloutnpbm") - .withStructure("dataoqohgp").withSchema("datadmwk") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("upf") - .withParameters(mapOf("dzauiunyev", "datad", "uynfxkcgsfcmvh", "datayzdsytcikswhcam", "atvyrkljqkqws", - "datadrp", "bypnkteiidlbov", "datajtvjkowggxawwd"))) - .withParameters(mapOf("rekyjulskwwn", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datargeganihkjcn"))) - .withAnnotations(Arrays.asList("datalqgpwxtvceba")).withFolder(new DatasetFolder().withName("vxwve")) - .withCollection("datafpohimgckycjpeeb"); + MongoDbAtlasCollectionDataset model = new MongoDbAtlasCollectionDataset().withDescription("sav") + .withStructure("datahk") + .withSchema("datageuufkb") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bfbxj") + .withParameters(mapOf("sazdjmofsvpz", "datajybdnbycsbto", "mlfjymgw", "datagnywxu", "q", "datamszcfy", + "re", "datae"))) + .withParameters(mapOf("vrgihl", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datapagknxma"))) + .withAnnotations(Arrays.asList("datafewvqk")) + .withFolder(new DatasetFolder().withName("cgeipqxxsdyaf")) + .withCollection("datalkyhtrrqwfyybpt"); model = BinaryData.fromObject(model).toObject(MongoDbAtlasCollectionDataset.class); - Assertions.assertEquals("bznxsuloutnpbm", model.description()); - Assertions.assertEquals("upf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("rekyjulskwwn").type()); - Assertions.assertEquals("vxwve", model.folder().name()); + Assertions.assertEquals("sav", model.description()); + Assertions.assertEquals("bfbxj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("vrgihl").type()); + Assertions.assertEquals("cgeipqxxsdyaf", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTypePropertiesTests.java index 7cc429c86e6ca..1fc2cece29caa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class MongoDbAtlasCollectionDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbAtlasCollectionDatasetTypeProperties model - = BinaryData.fromString("{\"collection\":\"dataodnjyhzfaxskdv\"}") + = BinaryData.fromString("{\"collection\":\"dataqscjpvqerqxk\"}") .toObject(MongoDbAtlasCollectionDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { MongoDbAtlasCollectionDatasetTypeProperties model - = new MongoDbAtlasCollectionDatasetTypeProperties().withCollection("dataodnjyhzfaxskdv"); + = new MongoDbAtlasCollectionDatasetTypeProperties().withCollection("dataqscjpvqerqxk"); model = BinaryData.fromObject(model).toObject(MongoDbAtlasCollectionDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTests.java index 30e9d8bb502d2..1354a17fac794 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTests.java @@ -18,29 +18,34 @@ public final class MongoDbAtlasLinkedServiceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbAtlasLinkedService model = BinaryData.fromString( - "{\"type\":\"MongoDbAtlas\",\"typeProperties\":{\"connectionString\":\"datakubwkdi\",\"database\":\"dataslnbyi\",\"driverVersion\":\"dataetwwjvxycygcv\"},\"connectVia\":{\"referenceName\":\"bfeitya\",\"parameters\":{\"pd\":\"datacvttkg\",\"icucqtecie\":\"dataqn\",\"zhjjlypwgwbycfam\":\"datascx\",\"irhhbogxw\":\"dataikisppygothi\"}},\"description\":\"pymyerpus\",\"parameters\":{\"gbzjoyyptnok\":{\"type\":\"String\",\"defaultValue\":\"datatjliwbnwd\"},\"zxkflz\":{\"type\":\"Int\",\"defaultValue\":\"dataxkzeowizvje\"}},\"annotations\":[\"dataxxboauvkkc\",\"datawgnwhldctn\"],\"\":{\"rvhed\":\"datauqmdynmtnlcvl\",\"yag\":\"databzzb\"}}") + "{\"type\":\"fshhc\",\"typeProperties\":{\"connectionString\":\"datayvjispkgkhvpvbz\",\"database\":\"datayo\",\"driverVersion\":\"dataxstxsfztlvs\"},\"connectVia\":{\"referenceName\":\"sowyhxwhdyfgtwx\",\"parameters\":{\"im\":\"databzfiacmwmc\",\"tnolziohdxyuk\":\"datahrfmcjjxxwzdwmju\",\"sffpizef\":\"dataplfwykrpojen\",\"zcevf\":\"datajgblehxpeuahvxf\"}},\"description\":\"ekyd\",\"parameters\":{\"pejhyrzzxqtcg\":{\"type\":\"Bool\",\"defaultValue\":\"datafkneckvyrvdszri\"},\"juingnfunhtzgxs\":{\"type\":\"Bool\",\"defaultValue\":\"datahzcicitykzyi\"},\"dhjztbwzjbqzqwb\":{\"type\":\"Object\",\"defaultValue\":\"datamopk\"}},\"annotations\":[\"datahqzdbzlkd\",\"databekvprkwpvxieqc\"],\"\":{\"aeiuex\":\"datazshfafbeh\",\"ecrizkabafdlsiza\":\"datapgrmwdwlraeplpf\"}}") .toObject(MongoDbAtlasLinkedService.class); - Assertions.assertEquals("bfeitya", model.connectVia().referenceName()); - Assertions.assertEquals("pymyerpus", model.description()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("gbzjoyyptnok").type()); + Assertions.assertEquals("sowyhxwhdyfgtwx", model.connectVia().referenceName()); + Assertions.assertEquals("ekyd", model.description()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("pejhyrzzxqtcg").type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { MongoDbAtlasLinkedService model = new MongoDbAtlasLinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("bfeitya") - .withParameters(mapOf("pd", "datacvttkg", "icucqtecie", "dataqn", "zhjjlypwgwbycfam", "datascx", - "irhhbogxw", "dataikisppygothi"))) - .withDescription("pymyerpus") - .withParameters(mapOf("gbzjoyyptnok", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datatjliwbnwd"), "zxkflz", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataxkzeowizvje"))) - .withAnnotations(Arrays.asList("dataxxboauvkkc", "datawgnwhldctn")).withConnectionString("datakubwkdi") - .withDatabase("dataslnbyi").withDriverVersion("dataetwwjvxycygcv"); + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("sowyhxwhdyfgtwx") + .withParameters(mapOf("im", "databzfiacmwmc", "tnolziohdxyuk", "datahrfmcjjxxwzdwmju", "sffpizef", + "dataplfwykrpojen", "zcevf", "datajgblehxpeuahvxf"))) + .withDescription("ekyd") + .withParameters(mapOf("pejhyrzzxqtcg", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datafkneckvyrvdszri"), + "juingnfunhtzgxs", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datahzcicitykzyi"), + "dhjztbwzjbqzqwb", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datamopk"))) + .withAnnotations(Arrays.asList("datahqzdbzlkd", "databekvprkwpvxieqc")) + .withConnectionString("datayvjispkgkhvpvbz") + .withDatabase("datayo") + .withDriverVersion("dataxstxsfztlvs"); model = BinaryData.fromObject(model).toObject(MongoDbAtlasLinkedService.class); - Assertions.assertEquals("bfeitya", model.connectVia().referenceName()); - Assertions.assertEquals("pymyerpus", model.description()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("gbzjoyyptnok").type()); + Assertions.assertEquals("sowyhxwhdyfgtwx", model.connectVia().referenceName()); + Assertions.assertEquals("ekyd", model.description()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("pejhyrzzxqtcg").type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTypePropertiesTests.java index 49f4785254adf..7ab54e0157d24 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTypePropertiesTests.java @@ -11,14 +11,16 @@ public final class MongoDbAtlasLinkedServiceTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbAtlasLinkedServiceTypeProperties model = BinaryData.fromString( - "{\"connectionString\":\"dataxwcndkdlxdlh\",\"database\":\"dataalz\",\"driverVersion\":\"dataawkyxownoypqu\"}") + "{\"connectionString\":\"databsgpdbhbdx\",\"database\":\"datajsoxuuwuungdvv\",\"driverVersion\":\"datarcpq\"}") .toObject(MongoDbAtlasLinkedServiceTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MongoDbAtlasLinkedServiceTypeProperties model = new MongoDbAtlasLinkedServiceTypeProperties() - .withConnectionString("dataxwcndkdlxdlh").withDatabase("dataalz").withDriverVersion("dataawkyxownoypqu"); + MongoDbAtlasLinkedServiceTypeProperties model + = new MongoDbAtlasLinkedServiceTypeProperties().withConnectionString("databsgpdbhbdx") + .withDatabase("datajsoxuuwuungdvv") + .withDriverVersion("datarcpq"); model = BinaryData.fromObject(model).toObject(MongoDbAtlasLinkedServiceTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSinkTests.java index 84eac473050a9..6923fa0a48a36 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSinkTests.java @@ -11,16 +11,19 @@ public final class MongoDbAtlasSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbAtlasSink model = BinaryData.fromString( - "{\"type\":\"MongoDbAtlasSink\",\"writeBehavior\":\"datakhgdzrcq\",\"writeBatchSize\":\"dataapohemine\",\"writeBatchTimeout\":\"datajrdxhlovmxhztdca\",\"sinkRetryCount\":\"datamvqgqmi\",\"sinkRetryWait\":\"datapa\",\"maxConcurrentConnections\":\"datah\",\"disableMetricsCollection\":\"datacyasz\",\"\":{\"ni\":\"datamtcihupoelj\",\"awbsdeqqbdcbnrg\":\"datayoxajit\",\"mtgtnb\":\"datapnor\",\"rwldeinhnsd\":\"datasopuwesmxodyto\"}}") + "{\"type\":\"eftcinjcrayoasku\",\"writeBehavior\":\"datauxvujuxvl\",\"writeBatchSize\":\"dataqweoob\",\"writeBatchTimeout\":\"datagymbzaw\",\"sinkRetryCount\":\"datatzxqbqzplzyjktc\",\"sinkRetryWait\":\"datawvhiaxkmditcz\",\"maxConcurrentConnections\":\"datacqobujfxyfhlw\",\"disableMetricsCollection\":\"dataaaxpwkm\",\"\":{\"ccbduwsw\":\"dataccmjo\",\"gmewdmlk\":\"databqycubmeih\",\"pts\":\"datawchslb\"}}") .toObject(MongoDbAtlasSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MongoDbAtlasSink model - = new MongoDbAtlasSink().withWriteBatchSize("dataapohemine").withWriteBatchTimeout("datajrdxhlovmxhztdca") - .withSinkRetryCount("datamvqgqmi").withSinkRetryWait("datapa").withMaxConcurrentConnections("datah") - .withDisableMetricsCollection("datacyasz").withWriteBehavior("datakhgdzrcq"); + MongoDbAtlasSink model = new MongoDbAtlasSink().withWriteBatchSize("dataqweoob") + .withWriteBatchTimeout("datagymbzaw") + .withSinkRetryCount("datatzxqbqzplzyjktc") + .withSinkRetryWait("datawvhiaxkmditcz") + .withMaxConcurrentConnections("datacqobujfxyfhlw") + .withDisableMetricsCollection("dataaaxpwkm") + .withWriteBehavior("datauxvujuxvl"); model = BinaryData.fromObject(model).toObject(MongoDbAtlasSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSourceTests.java index c0479d76c1bca..abf8527469bc2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSourceTests.java @@ -14,19 +14,25 @@ public final class MongoDbAtlasSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbAtlasSource model = BinaryData.fromString( - "{\"type\":\"MongoDbAtlasSource\",\"filter\":\"datawthvu\",\"cursorMethods\":{\"project\":\"datazznvdjnspy\",\"sort\":\"dataoygutqtjwiv\",\"skip\":\"datamavfzjwdww\",\"limit\":\"dataxehndcpiwcgcwmsh\",\"\":{\"xopzclka\":\"dataxjxhdwj\",\"mga\":\"datapu\"}},\"batchSize\":\"datac\",\"queryTimeout\":\"datajjfmzv\",\"additionalColumns\":\"databflyzc\",\"sourceRetryCount\":\"datamlybsy\",\"sourceRetryWait\":\"dataon\",\"maxConcurrentConnections\":\"datavbfpu\",\"disableMetricsCollection\":\"dataobtdhum\",\"\":{\"jefclih\":\"datawckapoetdfzj\",\"lqzopvhwmtdbfrj\":\"datanawipdqozv\",\"uv\":\"dataq\",\"feagordbs\":\"dataps\"}}") + "{\"type\":\"f\",\"filter\":\"datavufqukjuosajq\",\"cursorMethods\":{\"project\":\"datacqdthmlqamdlcu\",\"sort\":\"datamrvryakc\",\"skip\":\"datasnprda\",\"limit\":\"dataqgabbxexacgmt\",\"\":{\"jlsztpygqwkdlx\":\"databwobovexsnmwwhbm\"}},\"batchSize\":\"datakmkcimksfejzmyv\",\"queryTimeout\":\"datazmngxzp\",\"additionalColumns\":\"databjovhddvtnbtvlg\",\"sourceRetryCount\":\"dataoewimyiz\",\"sourceRetryWait\":\"datalzzauf\",\"maxConcurrentConnections\":\"datawvyxy\",\"disableMetricsCollection\":\"datakngnt\",\"\":{\"gsjyjnhwbbhw\":\"datayv\",\"wzuerrvpamfpini\":\"datanc\"}}") .toObject(MongoDbAtlasSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MongoDbAtlasSource model = new MongoDbAtlasSource().withSourceRetryCount("datamlybsy") - .withSourceRetryWait("dataon").withMaxConcurrentConnections("datavbfpu") - .withDisableMetricsCollection("dataobtdhum").withFilter("datawthvu") - .withCursorMethods( - new MongoDbCursorMethodsProperties().withProject("datazznvdjnspy").withSort("dataoygutqtjwiv") - .withSkip("datamavfzjwdww").withLimit("dataxehndcpiwcgcwmsh").withAdditionalProperties(mapOf())) - .withBatchSize("datac").withQueryTimeout("datajjfmzv").withAdditionalColumns("databflyzc"); + MongoDbAtlasSource model = new MongoDbAtlasSource().withSourceRetryCount("dataoewimyiz") + .withSourceRetryWait("datalzzauf") + .withMaxConcurrentConnections("datawvyxy") + .withDisableMetricsCollection("datakngnt") + .withFilter("datavufqukjuosajq") + .withCursorMethods(new MongoDbCursorMethodsProperties().withProject("datacqdthmlqamdlcu") + .withSort("datamrvryakc") + .withSkip("datasnprda") + .withLimit("dataqgabbxexacgmt") + .withAdditionalProperties(mapOf())) + .withBatchSize("datakmkcimksfejzmyv") + .withQueryTimeout("datazmngxzp") + .withAdditionalColumns("databjovhddvtnbtvlg"); model = BinaryData.fromObject(model).toObject(MongoDbAtlasSource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTests.java index 91a9af3d6b2cb..82b59a1f07993 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTests.java @@ -19,38 +19,34 @@ public final class MongoDbCollectionDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbCollectionDataset model = BinaryData.fromString( - "{\"type\":\"MongoDbCollection\",\"typeProperties\":{\"collectionName\":\"datad\"},\"description\":\"sjqb\",\"structure\":\"dataqmxv\",\"schema\":\"datariwbwggij\",\"linkedServiceName\":{\"referenceName\":\"spzjn\",\"parameters\":{\"htrgz\":\"dataikwsbzrhdugq\",\"jfhrjhiycbause\":\"dataru\",\"ihvtuwyjsqw\":\"datanczk\",\"oszjgz\":\"datas\"}},\"parameters\":{\"hczavojmsl\":{\"type\":\"String\",\"defaultValue\":\"datayskwwun\"},\"uqalpcufjjfxt\":{\"type\":\"Array\",\"defaultValue\":\"datacukvbljpxprrvchy\"},\"rcwbaae\":{\"type\":\"Object\",\"defaultValue\":\"dataqdstahhhsaaxxsri\"},\"xwoqotiiqbgpasr\":{\"type\":\"Bool\",\"defaultValue\":\"dataef\"}},\"annotations\":[\"datatistyikjhorlx\",\"datapypkennycntrq\",\"dataxwtdmbqjtsuhqh\"],\"folder\":{\"name\":\"tdyqav\"},\"\":{\"npaami\":\"dataqmzxsyaks\",\"hvwt\":\"datawb\",\"kiy\":\"datapbgchcgsfzhb\",\"cfferznzc\":\"dataqbjsdjpgxeysgw\"}}") + "{\"type\":\"cjomipvwkaujtt\",\"typeProperties\":{\"collectionName\":\"datar\"},\"description\":\"k\",\"structure\":\"datavbwofxxdplre\",\"schema\":\"datakvgahcbtu\",\"linkedServiceName\":{\"referenceName\":\"lbpxrhrfjenrazwe\",\"parameters\":{\"tostjixyzsecig\":\"datatlhqas\",\"fomcsau\":\"datazdwjtacfvvtdpcbp\"}},\"parameters\":{\"tkfysunt\":{\"type\":\"Int\",\"defaultValue\":\"dataiw\"},\"hcvasyy\":{\"type\":\"Array\",\"defaultValue\":\"dataklx\"},\"ixyxxhwrlqomaqs\":{\"type\":\"Array\",\"defaultValue\":\"dataokjbmsr\"}},\"annotations\":[\"datapzzbrwn\"],\"folder\":{\"name\":\"zsxagysokli\"},\"\":{\"bhujcydyl\":\"dataybvrrbnhyl\"}}") .toObject(MongoDbCollectionDataset.class); - Assertions.assertEquals("sjqb", model.description()); - Assertions.assertEquals("spzjn", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("hczavojmsl").type()); - Assertions.assertEquals("tdyqav", model.folder().name()); + Assertions.assertEquals("k", model.description()); + Assertions.assertEquals("lbpxrhrfjenrazwe", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("tkfysunt").type()); + Assertions.assertEquals("zsxagysokli", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MongoDbCollectionDataset model - = new MongoDbCollectionDataset().withDescription("sjqb").withStructure("dataqmxv") - .withSchema("datariwbwggij") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("spzjn") - .withParameters(mapOf("htrgz", "dataikwsbzrhdugq", "jfhrjhiycbause", "dataru", "ihvtuwyjsqw", - "datanczk", "oszjgz", "datas"))) - .withParameters(mapOf("hczavojmsl", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datayskwwun"), - "uqalpcufjjfxt", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datacukvbljpxprrvchy"), - "rcwbaae", - new ParameterSpecification().withType(ParameterType.OBJECT) - .withDefaultValue("dataqdstahhhsaaxxsri"), - "xwoqotiiqbgpasr", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataef"))) - .withAnnotations(Arrays.asList("datatistyikjhorlx", "datapypkennycntrq", "dataxwtdmbqjtsuhqh")) - .withFolder(new DatasetFolder().withName("tdyqav")).withCollectionName("datad"); + MongoDbCollectionDataset model = new MongoDbCollectionDataset().withDescription("k") + .withStructure("datavbwofxxdplre") + .withSchema("datakvgahcbtu") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lbpxrhrfjenrazwe") + .withParameters(mapOf("tostjixyzsecig", "datatlhqas", "fomcsau", "datazdwjtacfvvtdpcbp"))) + .withParameters( + mapOf("tkfysunt", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataiw"), + "hcvasyy", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataklx"), + "ixyxxhwrlqomaqs", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataokjbmsr"))) + .withAnnotations(Arrays.asList("datapzzbrwn")) + .withFolder(new DatasetFolder().withName("zsxagysokli")) + .withCollectionName("datar"); model = BinaryData.fromObject(model).toObject(MongoDbCollectionDataset.class); - Assertions.assertEquals("sjqb", model.description()); - Assertions.assertEquals("spzjn", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("hczavojmsl").type()); - Assertions.assertEquals("tdyqav", model.folder().name()); + Assertions.assertEquals("k", model.description()); + Assertions.assertEquals("lbpxrhrfjenrazwe", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("tkfysunt").type()); + Assertions.assertEquals("zsxagysokli", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTypePropertiesTests.java index 8813dd10a5cb6..f9acabab71697 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTypePropertiesTests.java @@ -10,15 +10,14 @@ public final class MongoDbCollectionDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - MongoDbCollectionDatasetTypeProperties model - = BinaryData.fromString("{\"collectionName\":\"dataivoveomkhfeqcoop\"}") - .toObject(MongoDbCollectionDatasetTypeProperties.class); + MongoDbCollectionDatasetTypeProperties model = BinaryData.fromString("{\"collectionName\":\"datamxvps\"}") + .toObject(MongoDbCollectionDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { MongoDbCollectionDatasetTypeProperties model - = new MongoDbCollectionDatasetTypeProperties().withCollectionName("dataivoveomkhfeqcoop"); + = new MongoDbCollectionDatasetTypeProperties().withCollectionName("datamxvps"); model = BinaryData.fromObject(model).toObject(MongoDbCollectionDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCursorMethodsPropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCursorMethodsPropertiesTests.java index 660c8ad6f093d..d28f2a912b1a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCursorMethodsPropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCursorMethodsPropertiesTests.java @@ -13,15 +13,17 @@ public final class MongoDbCursorMethodsPropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbCursorMethodsProperties model = BinaryData.fromString( - "{\"project\":\"datacntdwijx\",\"sort\":\"dataltowdwiffagfe\",\"skip\":\"datambpgcbltthsuzx\",\"limit\":\"datavifl\",\"\":{\"scobhhblj\":\"datak\",\"us\":\"datavpokvhobygffuzh\"}}") + "{\"project\":\"databfkmfbruuh\",\"sort\":\"dataqgeovnlbjfsollr\",\"skip\":\"dataulnhxrcjshicvrm\",\"limit\":\"datagpcalqbxpp\",\"\":{\"pboaevtxi\":\"datasrfshkjg\",\"hqnprbvruhdjziv\":\"datarooogijiqwxwpub\",\"xmksxxbdtjvvngn\":\"dataaxiimqnqmbfptz\",\"ic\":\"datakkzulmqx\"}}") .toObject(MongoDbCursorMethodsProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MongoDbCursorMethodsProperties model - = new MongoDbCursorMethodsProperties().withProject("datacntdwijx").withSort("dataltowdwiffagfe") - .withSkip("datambpgcbltthsuzx").withLimit("datavifl").withAdditionalProperties(mapOf()); + MongoDbCursorMethodsProperties model = new MongoDbCursorMethodsProperties().withProject("databfkmfbruuh") + .withSort("dataqgeovnlbjfsollr") + .withSkip("dataulnhxrcjshicvrm") + .withLimit("datagpcalqbxpp") + .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(MongoDbCursorMethodsProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbSourceTests.java index 10e349dcc9b9e..1975c0f67f726 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbSourceTests.java @@ -11,15 +11,18 @@ public final class MongoDbSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbSource model = BinaryData.fromString( - "{\"type\":\"MongoDbSource\",\"query\":\"datarbgnyvypu\",\"additionalColumns\":\"dataxhowwe\",\"sourceRetryCount\":\"datayw\",\"sourceRetryWait\":\"datahiuwv\",\"maxConcurrentConnections\":\"datagejytqnzrcbh\",\"disableMetricsCollection\":\"datahctjvlwfnzgz\",\"\":{\"fuhsmuclxgcedus\":\"datayvytydrdcwbaiaq\",\"vykagsxhzhervv\":\"datayq\",\"zrvf\":\"dataibrolqxloed\",\"fgwuj\":\"datafsyq\"}}") + "{\"type\":\"fhhayfxkfgx\",\"query\":\"datawc\",\"additionalColumns\":\"datazztv\",\"sourceRetryCount\":\"datafzlig\",\"sourceRetryWait\":\"dataqoszcmfmynljig\",\"maxConcurrentConnections\":\"datank\",\"disableMetricsCollection\":\"datam\",\"\":{\"wpnpunr\":\"datavskn\"}}") .toObject(MongoDbSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MongoDbSource model = new MongoDbSource().withSourceRetryCount("datayw").withSourceRetryWait("datahiuwv") - .withMaxConcurrentConnections("datagejytqnzrcbh").withDisableMetricsCollection("datahctjvlwfnzgz") - .withQuery("datarbgnyvypu").withAdditionalColumns("dataxhowwe"); + MongoDbSource model = new MongoDbSource().withSourceRetryCount("datafzlig") + .withSourceRetryWait("dataqoszcmfmynljig") + .withMaxConcurrentConnections("datank") + .withDisableMetricsCollection("datam") + .withQuery("datawc") + .withAdditionalColumns("datazztv"); model = BinaryData.fromObject(model).toObject(MongoDbSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTests.java index b9c84d529d24b..42240c6a4fbe2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTests.java @@ -19,30 +19,36 @@ public final class MongoDbV2CollectionDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbV2CollectionDataset model = BinaryData.fromString( - "{\"type\":\"MongoDbV2Collection\",\"typeProperties\":{\"collection\":\"datael\"},\"description\":\"odpegq\",\"structure\":\"dataorchazrqoxzyh\",\"schema\":\"dataeqvhskbmpw\",\"linkedServiceName\":{\"referenceName\":\"slajgg\",\"parameters\":{\"hawkmibuydwi\":\"dataae\"}},\"parameters\":{\"u\":{\"type\":\"Int\",\"defaultValue\":\"dataupdyttqm\"},\"s\":{\"type\":\"Array\",\"defaultValue\":\"datal\"}},\"annotations\":[\"datahhtuqmtxynof\",\"dataqobfixngxebihe\"],\"folder\":{\"name\":\"kingiqcdolrpgu\"},\"\":{\"dafbncuy\":\"datalbsm\",\"fzxjzi\":\"dataeykcnhpplzh\",\"wnuwkkfzzetl\":\"dataucrln\",\"vwywjvrlgqpwwlzp\":\"datahdyxz\"}}") + "{\"type\":\"pwwvmbjecfw\",\"typeProperties\":{\"collection\":\"datacuex\"},\"description\":\"ghnkfrwxohlydsn\",\"structure\":\"datapchiypbfhmih\",\"schema\":\"datatqozewbrsrjzgkbr\",\"linkedServiceName\":{\"referenceName\":\"uxboufqnnqbjxgj\",\"parameters\":{\"ri\":\"dataerukbuu\",\"ssybzbe\":\"datawkwkjxlaacedikqe\",\"zyjj\":\"datagbnrommkiqhypwt\",\"omzq\":\"datau\"}},\"parameters\":{\"ialezaydpu\":{\"type\":\"Bool\",\"defaultValue\":\"datawiijcfqiywhxpsba\"},\"smhssfnwh\":{\"type\":\"Object\",\"defaultValue\":\"dataegefxlieggot\"},\"ckn\":{\"type\":\"Array\",\"defaultValue\":\"datahh\"},\"vhzfkdnwy\":{\"type\":\"Array\",\"defaultValue\":\"datacufthdgwuzrono\"}},\"annotations\":[\"datacikgxkk\",\"datazfzdjekeb\"],\"folder\":{\"name\":\"xz\"},\"\":{\"ixirgcjfaiw\":\"datay\",\"akrxi\":\"datalpj\"}}") .toObject(MongoDbV2CollectionDataset.class); - Assertions.assertEquals("odpegq", model.description()); - Assertions.assertEquals("slajgg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("u").type()); - Assertions.assertEquals("kingiqcdolrpgu", model.folder().name()); + Assertions.assertEquals("ghnkfrwxohlydsn", model.description()); + Assertions.assertEquals("uxboufqnnqbjxgj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("ialezaydpu").type()); + Assertions.assertEquals("xz", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MongoDbV2CollectionDataset model = new MongoDbV2CollectionDataset().withDescription("odpegq") - .withStructure("dataorchazrqoxzyh").withSchema("dataeqvhskbmpw") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("slajgg") - .withParameters(mapOf("hawkmibuydwi", "dataae"))) - .withParameters( - mapOf("u", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataupdyttqm"), - "s", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datal"))) - .withAnnotations(Arrays.asList("datahhtuqmtxynof", "dataqobfixngxebihe")) - .withFolder(new DatasetFolder().withName("kingiqcdolrpgu")).withCollection("datael"); + MongoDbV2CollectionDataset model = new MongoDbV2CollectionDataset().withDescription("ghnkfrwxohlydsn") + .withStructure("datapchiypbfhmih") + .withSchema("datatqozewbrsrjzgkbr") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("uxboufqnnqbjxgj") + .withParameters(mapOf("ri", "dataerukbuu", "ssybzbe", "datawkwkjxlaacedikqe", "zyjj", + "datagbnrommkiqhypwt", "omzq", "datau"))) + .withParameters(mapOf("ialezaydpu", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datawiijcfqiywhxpsba"), + "smhssfnwh", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataegefxlieggot"), "ckn", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datahh"), "vhzfkdnwy", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datacufthdgwuzrono"))) + .withAnnotations(Arrays.asList("datacikgxkk", "datazfzdjekeb")) + .withFolder(new DatasetFolder().withName("xz")) + .withCollection("datacuex"); model = BinaryData.fromObject(model).toObject(MongoDbV2CollectionDataset.class); - Assertions.assertEquals("odpegq", model.description()); - Assertions.assertEquals("slajgg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("u").type()); - Assertions.assertEquals("kingiqcdolrpgu", model.folder().name()); + Assertions.assertEquals("ghnkfrwxohlydsn", model.description()); + Assertions.assertEquals("uxboufqnnqbjxgj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("ialezaydpu").type()); + Assertions.assertEquals("xz", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTypePropertiesTests.java index 87b8bec07e641..b4250bd320b56 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTypePropertiesTests.java @@ -10,14 +10,15 @@ public final class MongoDbV2CollectionDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - MongoDbV2CollectionDatasetTypeProperties model = BinaryData.fromString("{\"collection\":\"datadarcb\"}") - .toObject(MongoDbV2CollectionDatasetTypeProperties.class); + MongoDbV2CollectionDatasetTypeProperties model + = BinaryData.fromString("{\"collection\":\"dataqnfforxsqtzngxbs\"}") + .toObject(MongoDbV2CollectionDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { MongoDbV2CollectionDatasetTypeProperties model - = new MongoDbV2CollectionDatasetTypeProperties().withCollection("datadarcb"); + = new MongoDbV2CollectionDatasetTypeProperties().withCollection("dataqnfforxsqtzngxbs"); model = BinaryData.fromObject(model).toObject(MongoDbV2CollectionDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTests.java index 49ab06f618350..e10421a590cdf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTests.java @@ -18,29 +18,31 @@ public final class MongoDbV2LinkedServiceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbV2LinkedService model = BinaryData.fromString( - "{\"type\":\"MongoDbV2\",\"typeProperties\":{\"connectionString\":\"datasxiaj\",\"database\":\"datacrsdgtj\"},\"connectVia\":{\"referenceName\":\"opnccaxtp\",\"parameters\":{\"feabzjp\":\"datazehijlw\",\"ceqaklsfxlrx\":\"datahjmg\"}},\"description\":\"ymfqmvnhsfjxtusb\",\"parameters\":{\"onalpwa\":{\"type\":\"Float\",\"defaultValue\":\"datakkdptsppgtp\"},\"wnwnjdrnfgtmupbl\":{\"type\":\"Array\",\"defaultValue\":\"datawuyas\"}},\"annotations\":[\"dataqvz\",\"dataikxufuwcajyezlk\",\"dataiveftugiwsvlfp\",\"databpnrgnxwrfu\"],\"\":{\"fzuvuoxzy\":\"datay\"}}") + "{\"type\":\"ev\",\"typeProperties\":{\"connectionString\":\"datarpeli\",\"database\":\"datamitmtkcqixgqxs\"},\"connectVia\":{\"referenceName\":\"uvupdsafqaghw\",\"parameters\":{\"vkodkqffhuxoxq\":\"dataecqyianjm\",\"p\":\"datagzvzcfmwfogjrhmt\"}},\"description\":\"joclvfz\",\"parameters\":{\"ixhulfjlmwhv\":{\"type\":\"String\",\"defaultValue\":\"dataptapyqeesgin\"},\"izviswixlvnwznf\":{\"type\":\"Bool\",\"defaultValue\":\"datawrvtflot\"},\"bdtrwkpely\":{\"type\":\"Object\",\"defaultValue\":\"dataylsl\"}},\"annotations\":[\"datawmajllatbl\",\"datahcfhw\",\"dataithxna\",\"datanssvrkzslyliog\"],\"\":{\"bblwkqztqrnreyjf\":\"datahrmcpbmyghqtth\",\"jdclugv\":\"datanrweevtu\"}}") .toObject(MongoDbV2LinkedService.class); - Assertions.assertEquals("opnccaxtp", model.connectVia().referenceName()); - Assertions.assertEquals("ymfqmvnhsfjxtusb", model.description()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("onalpwa").type()); + Assertions.assertEquals("uvupdsafqaghw", model.connectVia().referenceName()); + Assertions.assertEquals("joclvfz", model.description()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("ixhulfjlmwhv").type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { MongoDbV2LinkedService model = new MongoDbV2LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("opnccaxtp") - .withParameters(mapOf("feabzjp", "datazehijlw", "ceqaklsfxlrx", "datahjmg"))) - .withDescription("ymfqmvnhsfjxtusb") - .withParameters(mapOf("onalpwa", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datakkdptsppgtp"), - "wnwnjdrnfgtmupbl", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datawuyas"))) - .withAnnotations(Arrays.asList("dataqvz", "dataikxufuwcajyezlk", "dataiveftugiwsvlfp", "databpnrgnxwrfu")) - .withConnectionString("datasxiaj").withDatabase("datacrsdgtj"); + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("uvupdsafqaghw") + .withParameters(mapOf("vkodkqffhuxoxq", "dataecqyianjm", "p", "datagzvzcfmwfogjrhmt"))) + .withDescription("joclvfz") + .withParameters(mapOf("ixhulfjlmwhv", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataptapyqeesgin"), + "izviswixlvnwznf", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datawrvtflot"), + "bdtrwkpely", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataylsl"))) + .withAnnotations(Arrays.asList("datawmajllatbl", "datahcfhw", "dataithxna", "datanssvrkzslyliog")) + .withConnectionString("datarpeli") + .withDatabase("datamitmtkcqixgqxs"); model = BinaryData.fromObject(model).toObject(MongoDbV2LinkedService.class); - Assertions.assertEquals("opnccaxtp", model.connectVia().referenceName()); - Assertions.assertEquals("ymfqmvnhsfjxtusb", model.description()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("onalpwa").type()); + Assertions.assertEquals("uvupdsafqaghw", model.connectVia().referenceName()); + Assertions.assertEquals("joclvfz", model.description()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("ixhulfjlmwhv").type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTypePropertiesTests.java index d9e0892f549aa..f1dc6aef47f42 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTypePropertiesTests.java @@ -11,14 +11,15 @@ public final class MongoDbV2LinkedServiceTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbV2LinkedServiceTypeProperties model - = BinaryData.fromString("{\"connectionString\":\"dataqcjclvbqovkz\",\"database\":\"dataeytphnazpgvfcubx\"}") + = BinaryData.fromString("{\"connectionString\":\"datajfpsyqvgaaymfk\",\"database\":\"dataxhiwmkljhk\"}") .toObject(MongoDbV2LinkedServiceTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MongoDbV2LinkedServiceTypeProperties model = new MongoDbV2LinkedServiceTypeProperties() - .withConnectionString("dataqcjclvbqovkz").withDatabase("dataeytphnazpgvfcubx"); + MongoDbV2LinkedServiceTypeProperties model + = new MongoDbV2LinkedServiceTypeProperties().withConnectionString("datajfpsyqvgaaymfk") + .withDatabase("dataxhiwmkljhk"); model = BinaryData.fromObject(model).toObject(MongoDbV2LinkedServiceTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SinkTests.java index 4094a15d65678..d7230f8cce105 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SinkTests.java @@ -11,16 +11,19 @@ public final class MongoDbV2SinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbV2Sink model = BinaryData.fromString( - "{\"type\":\"MongoDbV2Sink\",\"writeBehavior\":\"datauaqtqnqm\",\"writeBatchSize\":\"dataptzgomuju\",\"writeBatchTimeout\":\"datankuyombkgkyobu\",\"sinkRetryCount\":\"dataprvokodrpyxkzx\",\"sinkRetryWait\":\"datamoycufkxygxoubek\",\"maxConcurrentConnections\":\"datadxgtgcfk\",\"disableMetricsCollection\":\"datae\",\"\":{\"fpqebbqetx\":\"datahtlk\"}}") + "{\"type\":\"mazdgeablknqnq\",\"writeBehavior\":\"datanjegom\",\"writeBatchSize\":\"datagiy\",\"writeBatchTimeout\":\"datadeo\",\"sinkRetryCount\":\"datawlpopjlg\",\"sinkRetryWait\":\"dataswqxeva\",\"maxConcurrentConnections\":\"dataoxmxtcnmo\",\"disableMetricsCollection\":\"datakpgnagncguq\",\"\":{\"wdez\":\"datammved\",\"lmvvbw\":\"datatpbezlucxbudaj\",\"myvbiuvxlhf\":\"dataunr\",\"hqzvfzxseqscoy\":\"datakllxoahfvkyhfd\"}}") .toObject(MongoDbV2Sink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MongoDbV2Sink model = new MongoDbV2Sink().withWriteBatchSize("dataptzgomuju") - .withWriteBatchTimeout("datankuyombkgkyobu").withSinkRetryCount("dataprvokodrpyxkzx") - .withSinkRetryWait("datamoycufkxygxoubek").withMaxConcurrentConnections("datadxgtgcfk") - .withDisableMetricsCollection("datae").withWriteBehavior("datauaqtqnqm"); + MongoDbV2Sink model = new MongoDbV2Sink().withWriteBatchSize("datagiy") + .withWriteBatchTimeout("datadeo") + .withSinkRetryCount("datawlpopjlg") + .withSinkRetryWait("dataswqxeva") + .withMaxConcurrentConnections("dataoxmxtcnmo") + .withDisableMetricsCollection("datakpgnagncguq") + .withWriteBehavior("datanjegom"); model = BinaryData.fromObject(model).toObject(MongoDbV2Sink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SourceTests.java index 74065089bbb48..7ff23a4ecd171 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SourceTests.java @@ -14,19 +14,25 @@ public final class MongoDbV2SourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MongoDbV2Source model = BinaryData.fromString( - "{\"type\":\"MongoDbV2Source\",\"filter\":\"dataff\",\"cursorMethods\":{\"project\":\"dataovfwzys\",\"sort\":\"datavrfjgbxupvhgo\",\"skip\":\"datavwuje\",\"limit\":\"dataow\",\"\":{\"aomy\":\"datasziubkyvcgkoufw\",\"vfmsxamncuhxz\":\"datalxrwdjbya\"}},\"batchSize\":\"dataakxyhuetztorh\",\"queryTimeout\":\"datau\",\"additionalColumns\":\"datasszhsewjqgzlo\",\"sourceRetryCount\":\"datahxd\",\"sourceRetryWait\":\"dataegljqpyxi\",\"maxConcurrentConnections\":\"datakgdkanm\",\"disableMetricsCollection\":\"datawgchgpb\",\"\":{\"axno\":\"datawgoomap\",\"jzt\":\"datanjfvjqvectoo\",\"vsrvkzv\":\"dataalsnm\"}}") + "{\"type\":\"frxlsypwu\",\"filter\":\"datatobosjxbnyt\",\"cursorMethods\":{\"project\":\"dataruditumyycvtya\",\"sort\":\"datayimhspjqhi\",\"skip\":\"datablqvwhjgtbh\",\"limit\":\"dataoutq\",\"\":{\"drdxoutkgezuln\":\"databtqibqbougcwzgd\",\"arejxjhl\":\"datatp\"}},\"batchSize\":\"datalj\",\"queryTimeout\":\"dataoeoywy\",\"additionalColumns\":\"datahavwhrivvzrc\",\"sourceRetryCount\":\"dataearbbxan\",\"sourceRetryWait\":\"dataiqkjupvidzh\",\"maxConcurrentConnections\":\"datappqcgbp\",\"disableMetricsCollection\":\"datani\",\"\":{\"arjbakpasuugcng\":\"datadlxuptbtl\"}}") .toObject(MongoDbV2Source.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MongoDbV2Source model = new MongoDbV2Source().withSourceRetryCount("datahxd") - .withSourceRetryWait("dataegljqpyxi").withMaxConcurrentConnections("datakgdkanm") - .withDisableMetricsCollection("datawgchgpb").withFilter("dataff") - .withCursorMethods( - new MongoDbCursorMethodsProperties().withProject("dataovfwzys").withSort("datavrfjgbxupvhgo") - .withSkip("datavwuje").withLimit("dataow").withAdditionalProperties(mapOf())) - .withBatchSize("dataakxyhuetztorh").withQueryTimeout("datau").withAdditionalColumns("datasszhsewjqgzlo"); + MongoDbV2Source model = new MongoDbV2Source().withSourceRetryCount("dataearbbxan") + .withSourceRetryWait("dataiqkjupvidzh") + .withMaxConcurrentConnections("datappqcgbp") + .withDisableMetricsCollection("datani") + .withFilter("datatobosjxbnyt") + .withCursorMethods(new MongoDbCursorMethodsProperties().withProject("dataruditumyycvtya") + .withSort("datayimhspjqhi") + .withSkip("datablqvwhjgtbh") + .withLimit("dataoutq") + .withAdditionalProperties(mapOf())) + .withBatchSize("datalj") + .withQueryTimeout("dataoeoywy") + .withAdditionalColumns("datahavwhrivvzrc"); model = BinaryData.fromObject(model).toObject(MongoDbV2Source.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MultiplePipelineTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MultiplePipelineTriggerTests.java index 55e4220ec7295..86636628f95d9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MultiplePipelineTriggerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MultiplePipelineTriggerTests.java @@ -17,31 +17,34 @@ public final class MultiplePipelineTriggerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MultiplePipelineTrigger model = BinaryData.fromString( - "{\"type\":\"MultiplePipelineTrigger\",\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"oxj\",\"name\":\"ha\"},\"parameters\":{\"fcr\":\"dataqktbgu\",\"xergclmqkufq\":\"datatcupo\",\"kqezeee\":\"datamylrtnzyosd\",\"fk\":\"dataligunw\"}},{\"pipelineReference\":{\"referenceName\":\"vwzywxzx\",\"name\":\"htqcwidspegxdeai\"},\"parameters\":{\"yjnmdc\":\"datacmcqslngmsip\",\"cxacgeiyfpfaaah\":\"datatj\",\"fjld\":\"dataphuplfopqgcadnt\"}}],\"description\":\"f\",\"runtimeState\":\"Stopped\",\"annotations\":[\"dataoygcofh\"],\"\":{\"wuwbnngcdtxxyz\":\"datahhirbgmxmvxbaazn\",\"uqtjcyllpas\":\"dataybndiqpadhrij\"}}") + "{\"type\":\"fmxomupdqpc\",\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"qgfimllra\",\"name\":\"wpudvuphizztklk\"},\"parameters\":{\"kphsk\":\"dataeehtjmde\",\"yafopc\":\"datai\",\"fxz\":\"datanpjulpwwmxwlwcur\",\"juki\":\"dataazep\"}},{\"pipelineReference\":{\"referenceName\":\"cvruxmp\",\"name\":\"guj\"},\"parameters\":{\"sjobanxshltfghyk\":\"dataduns\",\"pyb\":\"dataxruqrobkne\",\"gkqudxvj\":\"dataskvjb\"}},{\"pipelineReference\":{\"referenceName\":\"dbinqqrkkgawnae\",\"name\":\"ui\"},\"parameters\":{\"iskqxeclw\":\"datavaxyitnzpfdoete\",\"r\":\"dataso\"}}],\"description\":\"vktdvwmefjpoelly\",\"runtimeState\":\"Stopped\",\"annotations\":[\"datarltrztrloyrjvr\",\"datayr\",\"datahfrsyckqwefmq\"],\"\":{\"ffrzg\":\"datazlvfncphhlnba\",\"mxzraihlzgroj\":\"dataob\",\"jrccyysyceykvml\":\"datanx\",\"pp\":\"datahymceg\"}}") .toObject(MultiplePipelineTrigger.class); - Assertions.assertEquals("f", model.description()); - Assertions.assertEquals("oxj", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("ha", model.pipelines().get(0).pipelineReference().name()); + Assertions.assertEquals("vktdvwmefjpoelly", model.description()); + Assertions.assertEquals("qgfimllra", model.pipelines().get(0).pipelineReference().referenceName()); + Assertions.assertEquals("wpudvuphizztklk", model.pipelines().get(0).pipelineReference().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MultiplePipelineTrigger model - = new MultiplePipelineTrigger().withDescription("f").withAnnotations(Arrays.asList("dataoygcofh")) - .withPipelines(Arrays.asList( - new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("oxj").withName("ha")) - .withParameters(mapOf("fcr", "dataqktbgu", "xergclmqkufq", "datatcupo", "kqezeee", - "datamylrtnzyosd", "fk", "dataligunw")), - new TriggerPipelineReference() - .withPipelineReference( - new PipelineReference().withReferenceName("vwzywxzx").withName("htqcwidspegxdeai")) - .withParameters(mapOf("yjnmdc", "datacmcqslngmsip", "cxacgeiyfpfaaah", "datatj", "fjld", - "dataphuplfopqgcadnt")))); + MultiplePipelineTrigger model = new MultiplePipelineTrigger().withDescription("vktdvwmefjpoelly") + .withAnnotations(Arrays.asList("datarltrztrloyrjvr", "datayr", "datahfrsyckqwefmq")) + .withPipelines(Arrays.asList( + new TriggerPipelineReference() + .withPipelineReference( + new PipelineReference().withReferenceName("qgfimllra").withName("wpudvuphizztklk")) + .withParameters(mapOf("kphsk", "dataeehtjmde", "yafopc", "datai", "fxz", "datanpjulpwwmxwlwcur", + "juki", "dataazep")), + new TriggerPipelineReference() + .withPipelineReference(new PipelineReference().withReferenceName("cvruxmp").withName("guj")) + .withParameters( + mapOf("sjobanxshltfghyk", "dataduns", "pyb", "dataxruqrobkne", "gkqudxvj", "dataskvjb")), + new TriggerPipelineReference() + .withPipelineReference(new PipelineReference().withReferenceName("dbinqqrkkgawnae").withName("ui")) + .withParameters(mapOf("iskqxeclw", "datavaxyitnzpfdoete", "r", "dataso")))); model = BinaryData.fromObject(model).toObject(MultiplePipelineTrigger.class); - Assertions.assertEquals("f", model.description()); - Assertions.assertEquals("oxj", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("ha", model.pipelines().get(0).pipelineReference().name()); + Assertions.assertEquals("vktdvwmefjpoelly", model.description()); + Assertions.assertEquals("qgfimllra", model.pipelines().get(0).pipelineReference().referenceName()); + Assertions.assertEquals("wpudvuphizztklk", model.pipelines().get(0).pipelineReference().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlSourceTests.java index 18b4d393403ff..1d87865fc86d6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlSourceTests.java @@ -11,16 +11,19 @@ public final class MySqlSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MySqlSource model = BinaryData.fromString( - "{\"type\":\"MySqlSource\",\"query\":\"databgljcy\",\"queryTimeout\":\"datarzxipxhlxxkviyj\",\"additionalColumns\":\"dataqyejyavxgm\",\"sourceRetryCount\":\"datacnwxkqqxpnj\",\"sourceRetryWait\":\"datazdahvethn\",\"maxConcurrentConnections\":\"dataeggyqlvnhmuut\",\"disableMetricsCollection\":\"datawt\",\"\":{\"fbpfdsatrzqmt\":\"dataccmwsyfsgikgcbjc\"}}") + "{\"type\":\"nucgw\",\"query\":\"datawvpnbgyxo\",\"queryTimeout\":\"dataqpzqivfgemvuicxw\",\"additionalColumns\":\"datarydl\",\"sourceRetryCount\":\"datajjatjmnd\",\"sourceRetryWait\":\"datacdum\",\"maxConcurrentConnections\":\"datahatckomdyfjss\",\"disableMetricsCollection\":\"datazvkddaeiepvjrmk\",\"\":{\"awokr\":\"dataymkbfd\",\"idtxmbnmjimggz\":\"datahjyahrmuwvsdyruo\"}}") .toObject(MySqlSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MySqlSource model = new MySqlSource().withSourceRetryCount("datacnwxkqqxpnj") - .withSourceRetryWait("datazdahvethn").withMaxConcurrentConnections("dataeggyqlvnhmuut") - .withDisableMetricsCollection("datawt").withQueryTimeout("datarzxipxhlxxkviyj") - .withAdditionalColumns("dataqyejyavxgm").withQuery("databgljcy"); + MySqlSource model = new MySqlSource().withSourceRetryCount("datajjatjmnd") + .withSourceRetryWait("datacdum") + .withMaxConcurrentConnections("datahatckomdyfjss") + .withDisableMetricsCollection("datazvkddaeiepvjrmk") + .withQueryTimeout("dataqpzqivfgemvuicxw") + .withAdditionalColumns("datarydl") + .withQuery("datawvpnbgyxo"); model = BinaryData.fromObject(model).toObject(MySqlSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTests.java index aa88249395986..863c24ec46f4f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTests.java @@ -19,33 +19,32 @@ public final class MySqlTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { MySqlTableDataset model = BinaryData.fromString( - "{\"type\":\"MySqlTable\",\"typeProperties\":{\"tableName\":\"databtownoljdkx\"},\"description\":\"ewy\",\"structure\":\"datalclzxkrdpuy\",\"schema\":\"databpkrpk\",\"linkedServiceName\":{\"referenceName\":\"qetp\",\"parameters\":{\"fpc\":\"dataefno\",\"yrxowv\":\"datarx\"}},\"parameters\":{\"ozfrfawtnnsv\":{\"type\":\"Int\",\"defaultValue\":\"datauajwblxph\"},\"gzqzhluc\":{\"type\":\"Array\",\"defaultValue\":\"datajynihtibu\"},\"cgyo\":{\"type\":\"Float\",\"defaultValue\":\"datafehb\"},\"ebldxagmdfjwc\":{\"type\":\"String\",\"defaultValue\":\"datameqljxdumhycxo\"}},\"annotations\":[\"datawxjsjquv\"],\"folder\":{\"name\":\"fzdtsrpjuvgz\"},\"\":{\"huqczouanbfulv\":\"datazhnsbylgmg\"}}") + "{\"type\":\"uubxcwojtupqdvnp\",\"typeProperties\":{\"tableName\":\"dataryibrb\"},\"description\":\"evffifu\",\"structure\":\"datatdowlxmwefcbyb\",\"schema\":\"datahrtczwjcujyz\",\"linkedServiceName\":{\"referenceName\":\"vyrjqdjlgk\",\"parameters\":{\"n\":\"dataxxeuwiiirc\"}},\"parameters\":{\"fujdbqjnyexbvx\":{\"type\":\"Float\",\"defaultValue\":\"dataiwxohktxa\"},\"asfeooq\":{\"type\":\"Float\",\"defaultValue\":\"dataq\"}},\"annotations\":[\"datavev\",\"dataarp\",\"dataklqlii\",\"dataeanuwg\"],\"folder\":{\"name\":\"fgijydgs\"},\"\":{\"mwywhrjkejva\":\"datauymtevaeb\",\"gcphivfhrmte\":\"datadogzougxbxxgj\",\"usrjzhdtrsyfezf\":\"datafdvdoeary\"}}") .toObject(MySqlTableDataset.class); - Assertions.assertEquals("ewy", model.description()); - Assertions.assertEquals("qetp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("ozfrfawtnnsv").type()); - Assertions.assertEquals("fzdtsrpjuvgz", model.folder().name()); + Assertions.assertEquals("evffifu", model.description()); + Assertions.assertEquals("vyrjqdjlgk", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("fujdbqjnyexbvx").type()); + Assertions.assertEquals("fgijydgs", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - MySqlTableDataset model = new MySqlTableDataset().withDescription("ewy").withStructure("datalclzxkrdpuy") - .withSchema("databpkrpk") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("qetp") - .withParameters(mapOf("fpc", "dataefno", "yrxowv", "datarx"))) - .withParameters(mapOf("ozfrfawtnnsv", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datauajwblxph"), "gzqzhluc", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datajynihtibu"), "cgyo", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datafehb"), - "ebldxagmdfjwc", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datameqljxdumhycxo"))) - .withAnnotations(Arrays.asList("datawxjsjquv")).withFolder(new DatasetFolder().withName("fzdtsrpjuvgz")) - .withTableName("databtownoljdkx"); + MySqlTableDataset model = new MySqlTableDataset().withDescription("evffifu") + .withStructure("datatdowlxmwefcbyb") + .withSchema("datahrtczwjcujyz") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vyrjqdjlgk") + .withParameters(mapOf("n", "dataxxeuwiiirc"))) + .withParameters(mapOf("fujdbqjnyexbvx", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataiwxohktxa"), "asfeooq", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataq"))) + .withAnnotations(Arrays.asList("datavev", "dataarp", "dataklqlii", "dataeanuwg")) + .withFolder(new DatasetFolder().withName("fgijydgs")) + .withTableName("dataryibrb"); model = BinaryData.fromObject(model).toObject(MySqlTableDataset.class); - Assertions.assertEquals("ewy", model.description()); - Assertions.assertEquals("qetp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("ozfrfawtnnsv").type()); - Assertions.assertEquals("fzdtsrpjuvgz", model.folder().name()); + Assertions.assertEquals("evffifu", model.description()); + Assertions.assertEquals("vyrjqdjlgk", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("fujdbqjnyexbvx").type()); + Assertions.assertEquals("fgijydgs", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTypePropertiesTests.java index 00b2c522cba2c..65c8a2aacefec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTypePropertiesTests.java @@ -10,14 +10,14 @@ public final class MySqlTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - MySqlTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"dataglxoqwbztilqb\"}") + MySqlTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datayljdzyyrwnmwt\"}") .toObject(MySqlTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { MySqlTableDatasetTypeProperties model - = new MySqlTableDatasetTypeProperties().withTableName("dataglxoqwbztilqb"); + = new MySqlTableDatasetTypeProperties().withTableName("datayljdzyyrwnmwt"); model = BinaryData.fromObject(model).toObject(MySqlTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaPartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaPartitionSettingsTests.java index aa61d20258429..efaf58af83125 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaPartitionSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaPartitionSettingsTests.java @@ -11,14 +11,15 @@ public final class NetezzaPartitionSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { NetezzaPartitionSettings model = BinaryData.fromString( - "{\"partitionColumnName\":\"datavvrhoqyvqd\",\"partitionUpperBound\":\"dataruoduexhskh\",\"partitionLowerBound\":\"datalvo\"}") + "{\"partitionColumnName\":\"databl\",\"partitionUpperBound\":\"datakqubnnmzz\",\"partitionLowerBound\":\"datacxbvn\"}") .toObject(NetezzaPartitionSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - NetezzaPartitionSettings model = new NetezzaPartitionSettings().withPartitionColumnName("datavvrhoqyvqd") - .withPartitionUpperBound("dataruoduexhskh").withPartitionLowerBound("datalvo"); + NetezzaPartitionSettings model = new NetezzaPartitionSettings().withPartitionColumnName("databl") + .withPartitionUpperBound("datakqubnnmzz") + .withPartitionLowerBound("datacxbvn"); model = BinaryData.fromObject(model).toObject(NetezzaPartitionSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaSourceTests.java index e42da672f1c36..dae441198ee6e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaSourceTests.java @@ -12,18 +12,23 @@ public final class NetezzaSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { NetezzaSource model = BinaryData.fromString( - "{\"type\":\"NetezzaSource\",\"query\":\"datacq\",\"partitionOption\":\"datazrflo\",\"partitionSettings\":{\"partitionColumnName\":\"datavem\",\"partitionUpperBound\":\"datayddfqfnftrrhh\",\"partitionLowerBound\":\"dataawizhanvcfx\"},\"queryTimeout\":\"datakdnfgctxultxhqqv\",\"additionalColumns\":\"datadyya\",\"sourceRetryCount\":\"dataxnepub\",\"sourceRetryWait\":\"datainfauytmqvsdyqyj\",\"maxConcurrentConnections\":\"datafotwmxedlcxmyxt\",\"disableMetricsCollection\":\"dataapoj\",\"\":{\"pfgdnq\":\"dataxantlpspi\"}}") + "{\"type\":\"lvxdpopubbwps\",\"query\":\"datauwmbdtfe\",\"partitionOption\":\"datagzybp\",\"partitionSettings\":{\"partitionColumnName\":\"datapghloemqa\",\"partitionUpperBound\":\"datanqszqk\",\"partitionLowerBound\":\"datanxuezwrbq\"},\"queryTimeout\":\"datamtgkrivcxxejp\",\"additionalColumns\":\"datahkiyiqpiiodvzjk\",\"sourceRetryCount\":\"dataxbkwmhjqakacbc\",\"sourceRetryWait\":\"datasnnvlmmgfgt\",\"maxConcurrentConnections\":\"datamtyfqutmjkbis\",\"disableMetricsCollection\":\"datarilq\",\"\":{\"fogockbizqqajsua\":\"datazdorb\",\"qcyvrbginbrde\":\"datawojgvpqzvtgwl\"}}") .toObject(NetezzaSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - NetezzaSource model = new NetezzaSource().withSourceRetryCount("dataxnepub") - .withSourceRetryWait("datainfauytmqvsdyqyj").withMaxConcurrentConnections("datafotwmxedlcxmyxt") - .withDisableMetricsCollection("dataapoj").withQueryTimeout("datakdnfgctxultxhqqv") - .withAdditionalColumns("datadyya").withQuery("datacq").withPartitionOption("datazrflo") - .withPartitionSettings(new NetezzaPartitionSettings().withPartitionColumnName("datavem") - .withPartitionUpperBound("datayddfqfnftrrhh").withPartitionLowerBound("dataawizhanvcfx")); + NetezzaSource model = new NetezzaSource().withSourceRetryCount("dataxbkwmhjqakacbc") + .withSourceRetryWait("datasnnvlmmgfgt") + .withMaxConcurrentConnections("datamtyfqutmjkbis") + .withDisableMetricsCollection("datarilq") + .withQueryTimeout("datamtgkrivcxxejp") + .withAdditionalColumns("datahkiyiqpiiodvzjk") + .withQuery("datauwmbdtfe") + .withPartitionOption("datagzybp") + .withPartitionSettings(new NetezzaPartitionSettings().withPartitionColumnName("datapghloemqa") + .withPartitionUpperBound("datanqszqk") + .withPartitionLowerBound("datanxuezwrbq")); model = BinaryData.fromObject(model).toObject(NetezzaSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTests.java index d5ef1080d11d6..e798d47a79148 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTests.java @@ -19,32 +19,36 @@ public final class NetezzaTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { NetezzaTableDataset model = BinaryData.fromString( - "{\"type\":\"NetezzaTable\",\"typeProperties\":{\"tableName\":\"datadvfjd\",\"table\":\"dataephtoshqtuar\",\"schema\":\"datagujrcnxaeypyq\"},\"description\":\"zfyasyddqbws\",\"structure\":\"datawyyeomiflrvfe\",\"schema\":\"datactshwfrhhasabvau\",\"linkedServiceName\":{\"referenceName\":\"nwwumkbpg\",\"parameters\":{\"rpdgitenyuksli\":\"databwtpwbjlpfwuq\",\"amrplanch\":\"datampnxg\",\"z\":\"dataotmmxlmxejwyv\",\"sbeqieiuxhj\":\"datajwvtuekbbypqsm\"}},\"parameters\":{\"zyxvta\":{\"type\":\"String\",\"defaultValue\":\"datalnjjhrgkjjpcpih\"},\"urdgc\":{\"type\":\"Float\",\"defaultValue\":\"dataatoidne\"}},\"annotations\":[\"datanaqve\",\"datagnpuelrnanbrpkoc\",\"dataxfbagegjtjltcki\"],\"folder\":{\"name\":\"gfagijxmdbo\"},\"\":{\"invzsod\":\"datahxhahuq\"}}") + "{\"type\":\"zrinlpxngzzx\",\"typeProperties\":{\"tableName\":\"datanlsyia\",\"table\":\"dataistcjhatclpp\",\"schema\":\"datastmrycpanapfa\"},\"description\":\"gqnzmzctbxzjkpif\",\"structure\":\"datacvbdozwbsk\",\"schema\":\"dataafzsq\",\"linkedServiceName\":{\"referenceName\":\"binpxmiwtkqifpv\",\"parameters\":{\"buxtwgbawshra\":\"datakcdnzoxl\",\"ugqcglmadfztof\":\"dataq\",\"ayfmcerfxfeiqbas\":\"datavqlauuag\"}},\"parameters\":{\"o\":{\"type\":\"Int\",\"defaultValue\":\"dataipssv\"},\"pdqqbtok\":{\"type\":\"Int\",\"defaultValue\":\"datacqcjozzjku\"},\"rhwetto\":{\"type\":\"SecureString\",\"defaultValue\":\"datauzxllbpw\"}},\"annotations\":[\"dataz\",\"dataxyvtkzbhizxp\"],\"folder\":{\"name\":\"dmwnfhmjusuqn\"},\"\":{\"nirmidtvhjc\":\"datahvlxudheka\"}}") .toObject(NetezzaTableDataset.class); - Assertions.assertEquals("zfyasyddqbws", model.description()); - Assertions.assertEquals("nwwumkbpg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("zyxvta").type()); - Assertions.assertEquals("gfagijxmdbo", model.folder().name()); + Assertions.assertEquals("gqnzmzctbxzjkpif", model.description()); + Assertions.assertEquals("binpxmiwtkqifpv", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("o").type()); + Assertions.assertEquals("dmwnfhmjusuqn", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - NetezzaTableDataset model = new NetezzaTableDataset().withDescription("zfyasyddqbws") - .withStructure("datawyyeomiflrvfe").withSchema("datactshwfrhhasabvau") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("nwwumkbpg") - .withParameters(mapOf("rpdgitenyuksli", "databwtpwbjlpfwuq", "amrplanch", "datampnxg", "z", - "dataotmmxlmxejwyv", "sbeqieiuxhj", "datajwvtuekbbypqsm"))) - .withParameters(mapOf("zyxvta", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datalnjjhrgkjjpcpih"), - "urdgc", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataatoidne"))) - .withAnnotations(Arrays.asList("datanaqve", "datagnpuelrnanbrpkoc", "dataxfbagegjtjltcki")) - .withFolder(new DatasetFolder().withName("gfagijxmdbo")).withTableName("datadvfjd") - .withTable("dataephtoshqtuar").withSchemaTypePropertiesSchema("datagujrcnxaeypyq"); + NetezzaTableDataset model = new NetezzaTableDataset().withDescription("gqnzmzctbxzjkpif") + .withStructure("datacvbdozwbsk") + .withSchema("dataafzsq") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("binpxmiwtkqifpv") + .withParameters(mapOf("buxtwgbawshra", "datakcdnzoxl", "ugqcglmadfztof", "dataq", "ayfmcerfxfeiqbas", + "datavqlauuag"))) + .withParameters(mapOf("o", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataipssv"), "pdqqbtok", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datacqcjozzjku"), "rhwetto", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datauzxllbpw"))) + .withAnnotations(Arrays.asList("dataz", "dataxyvtkzbhizxp")) + .withFolder(new DatasetFolder().withName("dmwnfhmjusuqn")) + .withTableName("datanlsyia") + .withTable("dataistcjhatclpp") + .withSchemaTypePropertiesSchema("datastmrycpanapfa"); model = BinaryData.fromObject(model).toObject(NetezzaTableDataset.class); - Assertions.assertEquals("zfyasyddqbws", model.description()); - Assertions.assertEquals("nwwumkbpg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("zyxvta").type()); - Assertions.assertEquals("gfagijxmdbo", model.folder().name()); + Assertions.assertEquals("gqnzmzctbxzjkpif", model.description()); + Assertions.assertEquals("binpxmiwtkqifpv", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("o").type()); + Assertions.assertEquals("dmwnfhmjusuqn", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTypePropertiesTests.java index 766adb9b643cf..8880020495cee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTypePropertiesTests.java @@ -11,14 +11,16 @@ public final class NetezzaTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { NetezzaTableDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datakrqdbsgkqy\",\"table\":\"dataotypcjxh\",\"schema\":\"datazlocjhzppdbr\"}") + .fromString("{\"tableName\":\"datazfbqygkxrlfojl\",\"table\":\"datapum\",\"schema\":\"dataybodhrv\"}") .toObject(NetezzaTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - NetezzaTableDatasetTypeProperties model = new NetezzaTableDatasetTypeProperties() - .withTableName("datakrqdbsgkqy").withTable("dataotypcjxh").withSchema("datazlocjhzppdbr"); + NetezzaTableDatasetTypeProperties model + = new NetezzaTableDatasetTypeProperties().withTableName("datazfbqygkxrlfojl") + .withTable("datapum") + .withSchema("dataybodhrv"); model = BinaryData.fromObject(model).toObject(NetezzaTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NotebookParameterTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NotebookParameterTests.java index ea9a655e2d66e..4b48b6dc8c9b0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NotebookParameterTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NotebookParameterTests.java @@ -12,16 +12,16 @@ public final class NotebookParameterTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - NotebookParameter model = BinaryData.fromString("{\"value\":\"dataxyawtdsnvxhx\",\"type\":\"bool\"}") - .toObject(NotebookParameter.class); - Assertions.assertEquals(NotebookParameterType.BOOL, model.type()); + NotebookParameter model + = BinaryData.fromString("{\"value\":\"datafkgzgveud\",\"type\":\"int\"}").toObject(NotebookParameter.class); + Assertions.assertEquals(NotebookParameterType.INT, model.type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { NotebookParameter model - = new NotebookParameter().withValue("dataxyawtdsnvxhx").withType(NotebookParameterType.BOOL); + = new NotebookParameter().withValue("datafkgzgveud").withType(NotebookParameterType.INT); model = BinaryData.fromObject(model).toObject(NotebookParameter.class); - Assertions.assertEquals(NotebookParameterType.BOOL, model.type()); + Assertions.assertEquals(NotebookParameterType.INT, model.type()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTests.java index f3c177051cd76..3aa0a852ae314 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTests.java @@ -19,32 +19,31 @@ public final class ODataResourceDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ODataResourceDataset model = BinaryData.fromString( - "{\"type\":\"ODataResource\",\"typeProperties\":{\"path\":\"dataicovj\"},\"description\":\"vwrmjx\",\"structure\":\"datauod\",\"schema\":\"dataczbassqfyylwpp\",\"linkedServiceName\":{\"referenceName\":\"ygkbzb\",\"parameters\":{\"pmhttuvsqjsrvjnq\":\"datasybxhqvov\",\"qbfkceincnrecjbi\":\"dataaqg\",\"sqsvzvmxtc\":\"datawevsfgdrmnszdosm\",\"hgsulwvgs\":\"dataghndae\"}},\"parameters\":{\"jjuzk\":{\"type\":\"Float\",\"defaultValue\":\"datav\"},\"vljlbzdlby\":{\"type\":\"Bool\",\"defaultValue\":\"dataciwuhyzekypy\"},\"ov\":{\"type\":\"String\",\"defaultValue\":\"dataxhpzy\"}},\"annotations\":[\"databhanz\"],\"folder\":{\"name\":\"fhsh\"},\"\":{\"zpbyfyvynpmggq\":\"dataahn\",\"izorbloejzs\":\"dataagenvqbugihcdvf\"}}") + "{\"type\":\"kkfku\",\"typeProperties\":{\"path\":\"datavqiiasbt\"},\"description\":\"kmxhhqsx\",\"structure\":\"datajkewriglbqtrefe\",\"schema\":\"dataflxcjffzwncvde\",\"linkedServiceName\":{\"referenceName\":\"xonzt\",\"parameters\":{\"pjrrhpgsjbioag\":\"dataptnntqrcjqpzjv\",\"dxdfsfvk\":\"dataviqehmdqvaol\"}},\"parameters\":{\"rkd\":{\"type\":\"Array\",\"defaultValue\":\"datarsvxphtjnhptj\"}},\"annotations\":[\"datahimmydtdtftmi\"],\"folder\":{\"name\":\"jdurgyzcslazpr\"},\"\":{\"cnqpjuytvudeylp\":\"datayimxpggktteagbg\",\"ifm\":\"dataybkisb\",\"urfshzn\":\"dataglpwdjr\",\"xdcdjmdkqt\":\"datarttuhaaa\"}}") .toObject(ODataResourceDataset.class); - Assertions.assertEquals("vwrmjx", model.description()); - Assertions.assertEquals("ygkbzb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("jjuzk").type()); - Assertions.assertEquals("fhsh", model.folder().name()); + Assertions.assertEquals("kmxhhqsx", model.description()); + Assertions.assertEquals("xonzt", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("rkd").type()); + Assertions.assertEquals("jdurgyzcslazpr", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ODataResourceDataset model = new ODataResourceDataset().withDescription("vwrmjx").withStructure("datauod") - .withSchema("dataczbassqfyylwpp") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ygkbzb") - .withParameters(mapOf("pmhttuvsqjsrvjnq", "datasybxhqvov", "qbfkceincnrecjbi", "dataaqg", "sqsvzvmxtc", - "datawevsfgdrmnszdosm", "hgsulwvgs", "dataghndae"))) - .withParameters(mapOf("jjuzk", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datav"), "vljlbzdlby", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataciwuhyzekypy"), "ov", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataxhpzy"))) - .withAnnotations(Arrays.asList("databhanz")).withFolder(new DatasetFolder().withName("fhsh")) - .withPath("dataicovj"); + ODataResourceDataset model = new ODataResourceDataset().withDescription("kmxhhqsx") + .withStructure("datajkewriglbqtrefe") + .withSchema("dataflxcjffzwncvde") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xonzt") + .withParameters(mapOf("pjrrhpgsjbioag", "dataptnntqrcjqpzjv", "dxdfsfvk", "dataviqehmdqvaol"))) + .withParameters(mapOf("rkd", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datarsvxphtjnhptj"))) + .withAnnotations(Arrays.asList("datahimmydtdtftmi")) + .withFolder(new DatasetFolder().withName("jdurgyzcslazpr")) + .withPath("datavqiiasbt"); model = BinaryData.fromObject(model).toObject(ODataResourceDataset.class); - Assertions.assertEquals("vwrmjx", model.description()); - Assertions.assertEquals("ygkbzb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("jjuzk").type()); - Assertions.assertEquals("fhsh", model.folder().name()); + Assertions.assertEquals("kmxhhqsx", model.description()); + Assertions.assertEquals("xonzt", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("rkd").type()); + Assertions.assertEquals("jdurgyzcslazpr", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTypePropertiesTests.java index 14d75bcc71a20..4bd4ef7a74fb3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTypePropertiesTests.java @@ -11,12 +11,12 @@ public final class ODataResourceDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ODataResourceDatasetTypeProperties model - = BinaryData.fromString("{\"path\":\"datazgkqwvde\"}").toObject(ODataResourceDatasetTypeProperties.class); + = BinaryData.fromString("{\"path\":\"datarmbecx\"}").toObject(ODataResourceDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ODataResourceDatasetTypeProperties model = new ODataResourceDatasetTypeProperties().withPath("datazgkqwvde"); + ODataResourceDatasetTypeProperties model = new ODataResourceDatasetTypeProperties().withPath("datarmbecx"); model = BinaryData.fromObject(model).toObject(ODataResourceDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataSourceTests.java index 5010746838d78..abb519d6f0e30 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataSourceTests.java @@ -11,16 +11,19 @@ public final class ODataSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ODataSource model = BinaryData.fromString( - "{\"type\":\"ODataSource\",\"query\":\"dataqyllcckgfo\",\"httpRequestTimeout\":\"datarbfyjmenq\",\"additionalColumns\":\"datajfxqtvsfsvqy\",\"sourceRetryCount\":\"dataaweixnoblazwhda\",\"sourceRetryWait\":\"dataixfdu\",\"maxConcurrentConnections\":\"datas\",\"disableMetricsCollection\":\"dataitpcsmax\",\"\":{\"a\":\"dataubhmiuxypvua\"}}") + "{\"type\":\"gd\",\"query\":\"datajjprd\",\"httpRequestTimeout\":\"datablonlhtgexwjhicu\",\"additionalColumns\":\"dataavimxnhylwogtvl\",\"sourceRetryCount\":\"datat\",\"sourceRetryWait\":\"datadxlfn\",\"maxConcurrentConnections\":\"dataclkmggnzlfyxaiaf\",\"disableMetricsCollection\":\"datamxekfvycvhwduo\",\"\":{\"sbahcassqeybd\":\"datapzzcxku\",\"cxtumqinawctaarb\":\"datazeyakgrkohfqmxzk\"}}") .toObject(ODataSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ODataSource model = new ODataSource().withSourceRetryCount("dataaweixnoblazwhda") - .withSourceRetryWait("dataixfdu").withMaxConcurrentConnections("datas") - .withDisableMetricsCollection("dataitpcsmax").withQuery("dataqyllcckgfo") - .withHttpRequestTimeout("datarbfyjmenq").withAdditionalColumns("datajfxqtvsfsvqy"); + ODataSource model = new ODataSource().withSourceRetryCount("datat") + .withSourceRetryWait("datadxlfn") + .withMaxConcurrentConnections("dataclkmggnzlfyxaiaf") + .withDisableMetricsCollection("datamxekfvycvhwduo") + .withQuery("datajjprd") + .withHttpRequestTimeout("datablonlhtgexwjhicu") + .withAdditionalColumns("dataavimxnhylwogtvl"); model = BinaryData.fromObject(model).toObject(ODataSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSinkTests.java index ca8c8940af845..0b02d794df535 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSinkTests.java @@ -11,16 +11,19 @@ public final class OdbcSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OdbcSink model = BinaryData.fromString( - "{\"type\":\"OdbcSink\",\"preCopyScript\":\"datam\",\"writeBatchSize\":\"databfzaaiihyl\",\"writeBatchTimeout\":\"datazhlbpmplethek\",\"sinkRetryCount\":\"datanamtvooaace\",\"sinkRetryWait\":\"dataonsvjc\",\"maxConcurrentConnections\":\"datatytyrv\",\"disableMetricsCollection\":\"dataxvzywimmmmg\",\"\":{\"nvahpxdgy\":\"datavoytdtvkfq\",\"ygc\":\"dataowxcptxvxfwwv\",\"jri\":\"dataaztoias\"}}") + "{\"type\":\"tupjqfupoam\",\"preCopyScript\":\"dataacn\",\"writeBatchSize\":\"datadzqxkgr\",\"writeBatchTimeout\":\"datanqipskpynrsacdc\",\"sinkRetryCount\":\"datautahlhiqodx\",\"sinkRetryWait\":\"datacirgqjnf\",\"maxConcurrentConnections\":\"datahhkeyhf\",\"disableMetricsCollection\":\"dataif\",\"\":{\"scx\":\"dataojkikgbhkvhld\"}}") .toObject(OdbcSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OdbcSink model = new OdbcSink().withWriteBatchSize("databfzaaiihyl").withWriteBatchTimeout("datazhlbpmplethek") - .withSinkRetryCount("datanamtvooaace").withSinkRetryWait("dataonsvjc") - .withMaxConcurrentConnections("datatytyrv").withDisableMetricsCollection("dataxvzywimmmmg") - .withPreCopyScript("datam"); + OdbcSink model = new OdbcSink().withWriteBatchSize("datadzqxkgr") + .withWriteBatchTimeout("datanqipskpynrsacdc") + .withSinkRetryCount("datautahlhiqodx") + .withSinkRetryWait("datacirgqjnf") + .withMaxConcurrentConnections("datahhkeyhf") + .withDisableMetricsCollection("dataif") + .withPreCopyScript("dataacn"); model = BinaryData.fromObject(model).toObject(OdbcSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSourceTests.java index e58e2e0e853c0..b5d2aa68b90ae 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSourceTests.java @@ -11,15 +11,19 @@ public final class OdbcSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OdbcSource model = BinaryData.fromString( - "{\"type\":\"OdbcSource\",\"query\":\"datakpwltozxdzold\",\"queryTimeout\":\"datafnpn\",\"additionalColumns\":\"dataterjjuzarege\",\"sourceRetryCount\":\"dataozpudal\",\"sourceRetryWait\":\"datagdhdtt\",\"maxConcurrentConnections\":\"datakeculxvkuxvccpda\",\"disableMetricsCollection\":\"dataasi\",\"\":{\"ejh\":\"datayvvg\",\"ybneuzueikadhusg\":\"dataoswjwbhtawbc\"}}") + "{\"type\":\"otodprrq\",\"query\":\"dataydmeu\",\"queryTimeout\":\"datalqgyu\",\"additionalColumns\":\"dataoelyjduzapn\",\"sourceRetryCount\":\"dataglytxjcbdpc\",\"sourceRetryWait\":\"datazu\",\"maxConcurrentConnections\":\"datacehfw\",\"disableMetricsCollection\":\"dataclea\",\"\":{\"fccnwmdpb\":\"dataiqskylwpqrc\",\"uckrzwraqaptqy\":\"dataotknhfxpver\",\"b\":\"datanlyuyopwwnqoub\",\"osyl\":\"datasvfzjjflcx\"}}") .toObject(OdbcSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OdbcSource model = new OdbcSource().withSourceRetryCount("dataozpudal").withSourceRetryWait("datagdhdtt") - .withMaxConcurrentConnections("datakeculxvkuxvccpda").withDisableMetricsCollection("dataasi") - .withQueryTimeout("datafnpn").withAdditionalColumns("dataterjjuzarege").withQuery("datakpwltozxdzold"); + OdbcSource model = new OdbcSource().withSourceRetryCount("dataglytxjcbdpc") + .withSourceRetryWait("datazu") + .withMaxConcurrentConnections("datacehfw") + .withDisableMetricsCollection("dataclea") + .withQueryTimeout("datalqgyu") + .withAdditionalColumns("dataoelyjduzapn") + .withQuery("dataydmeu"); model = BinaryData.fromObject(model).toObject(OdbcSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTests.java index 9ce5d79fe61de..c61af03839585 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTests.java @@ -19,30 +19,33 @@ public final class OdbcTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OdbcTableDataset model = BinaryData.fromString( - "{\"type\":\"OdbcTable\",\"typeProperties\":{\"tableName\":\"dataizp\"},\"description\":\"fxzspfyvslazippl\",\"structure\":\"datatdumjtycildrzn\",\"schema\":\"dataxozqthkwxfugfziz\",\"linkedServiceName\":{\"referenceName\":\"xduyjnqzbrqcakm\",\"parameters\":{\"nsbqoitwhmuc\":\"dataviyjuca\",\"xy\":\"dataiuh\",\"ycudus\":\"dataehyklelyqdvpqfbx\",\"vfopkyl\":\"datamtxqlefnohey\"}},\"parameters\":{\"w\":{\"type\":\"SecureString\",\"defaultValue\":\"datanj\"}},\"annotations\":[\"datafwtwrsvevc\",\"datae\",\"dataswxhqhgkhtbzv\"],\"folder\":{\"name\":\"evvjncpmyhtxg\"},\"\":{\"bcyjrtalqee\":\"dataghcmixmlwkfe\",\"tomsgoihlqwbywaa\":\"dataudfyimooaez\"}}") + "{\"type\":\"eujywdalisdeqngc\",\"typeProperties\":{\"tableName\":\"datamunwjivtbuszbdjr\"},\"description\":\"dz\",\"structure\":\"datalooulpozm\",\"schema\":\"datahyclxrsidoebldp\",\"linkedServiceName\":{\"referenceName\":\"iaffj\",\"parameters\":{\"x\":\"datanhrevimxm\",\"wdsoqtbfkvuozbzc\":\"datajppitygv\",\"urlcydjhtkjs\":\"datanqekwankl\",\"lr\":\"datarwiyndurdonkgobx\"}},\"parameters\":{\"froefq\":{\"type\":\"SecureString\",\"defaultValue\":\"datarswknpdrgnmza\"}},\"annotations\":[\"dataevyrejyoybkqftu\"],\"folder\":{\"name\":\"m\"},\"\":{\"wsicvwqzoc\":\"datavvbucnnrovomep\"}}") .toObject(OdbcTableDataset.class); - Assertions.assertEquals("fxzspfyvslazippl", model.description()); - Assertions.assertEquals("xduyjnqzbrqcakm", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("w").type()); - Assertions.assertEquals("evvjncpmyhtxg", model.folder().name()); + Assertions.assertEquals("dz", model.description()); + Assertions.assertEquals("iaffj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("froefq").type()); + Assertions.assertEquals("m", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OdbcTableDataset model = new OdbcTableDataset().withDescription("fxzspfyvslazippl") - .withStructure("datatdumjtycildrzn").withSchema("dataxozqthkwxfugfziz") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xduyjnqzbrqcakm") - .withParameters(mapOf("nsbqoitwhmuc", "dataviyjuca", "xy", "dataiuh", "ycudus", "dataehyklelyqdvpqfbx", - "vfopkyl", "datamtxqlefnohey"))) - .withParameters(mapOf("w", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datanj"))) - .withAnnotations(Arrays.asList("datafwtwrsvevc", "datae", "dataswxhqhgkhtbzv")) - .withFolder(new DatasetFolder().withName("evvjncpmyhtxg")).withTableName("dataizp"); + OdbcTableDataset model = new OdbcTableDataset().withDescription("dz") + .withStructure("datalooulpozm") + .withSchema("datahyclxrsidoebldp") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("iaffj") + .withParameters(mapOf("x", "datanhrevimxm", "wdsoqtbfkvuozbzc", "datajppitygv", "urlcydjhtkjs", + "datanqekwankl", "lr", "datarwiyndurdonkgobx"))) + .withParameters(mapOf("froefq", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("datarswknpdrgnmza"))) + .withAnnotations(Arrays.asList("dataevyrejyoybkqftu")) + .withFolder(new DatasetFolder().withName("m")) + .withTableName("datamunwjivtbuszbdjr"); model = BinaryData.fromObject(model).toObject(OdbcTableDataset.class); - Assertions.assertEquals("fxzspfyvslazippl", model.description()); - Assertions.assertEquals("xduyjnqzbrqcakm", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("w").type()); - Assertions.assertEquals("evvjncpmyhtxg", model.folder().name()); + Assertions.assertEquals("dz", model.description()); + Assertions.assertEquals("iaffj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("froefq").type()); + Assertions.assertEquals("m", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTypePropertiesTests.java index 20bcabc6baf52..8c46bdd79ad08 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTypePropertiesTests.java @@ -10,13 +10,13 @@ public final class OdbcTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - OdbcTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"dataaeeekfztvna\"}") + OdbcTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datashennmsgpywdib\"}") .toObject(OdbcTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OdbcTableDatasetTypeProperties model = new OdbcTableDatasetTypeProperties().withTableName("dataaeeekfztvna"); + OdbcTableDatasetTypeProperties model = new OdbcTableDatasetTypeProperties().withTableName("datashennmsgpywdib"); model = BinaryData.fromObject(model).toObject(OdbcTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTests.java index 7ccb481637c5a..e4685c24b6692 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTests.java @@ -19,34 +19,35 @@ public final class Office365DatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { Office365Dataset model = BinaryData.fromString( - "{\"type\":\"Office365Table\",\"typeProperties\":{\"tableName\":\"datavovoa\",\"predicate\":\"datagjsmbc\"},\"description\":\"oygsabdgdheronsd\",\"structure\":\"datarkzvz\",\"schema\":\"datatqhgz\",\"linkedServiceName\":{\"referenceName\":\"yxtrvfdbqsk\",\"parameters\":{\"ptpvsffavdhpiw\":\"databvi\",\"bwxyldqtmggcpd\":\"datamuwkgjwbyfdw\",\"zctwymzsk\":\"datamegaj\"}},\"parameters\":{\"gliupqscoob\":{\"type\":\"Object\",\"defaultValue\":\"dataeseip\"},\"incev\":{\"type\":\"Object\",\"defaultValue\":\"datacaxsqcomjiq\"},\"duvtvod\":{\"type\":\"Int\",\"defaultValue\":\"datadevpximziizmeq\"},\"hm\":{\"type\":\"SecureString\",\"defaultValue\":\"datap\"}},\"annotations\":[\"datab\",\"datablmcvrjaznotdof\",\"datavpbqsdqkpsbqs\",\"databmitaftazgcxsvq\"],\"folder\":{\"name\":\"ufylamxowbg\"},\"\":{\"xiknsgofuns\":\"datayutehlkarvtipquk\",\"xn\":\"datahpcekggvmfnnb\"}}") + "{\"type\":\"uqmtxynof\",\"typeProperties\":{\"tableName\":\"datarnicupdyttqmi\",\"predicate\":\"datajplosebmh\"},\"description\":\"obfixngxebi\",\"structure\":\"dataxhnkingiqcd\",\"schema\":\"datarpgupsjlbsmnda\",\"linkedServiceName\":{\"referenceName\":\"bncuyjeykcnhp\",\"parameters\":{\"xjziuucrlnew\":\"datahcf\",\"zlvwyw\":\"datauwkkfzzetlvhdy\",\"lgqpwwlzpd\":\"datav\",\"knmstbdoprwkamp\":\"dataarcbcdwhslxebaja\"}},\"parameters\":{\"huzymhlhihqk\":{\"type\":\"Float\",\"defaultValue\":\"dataldzniudrcycm\"},\"aiildcpud\":{\"type\":\"Array\",\"defaultValue\":\"datakmnbzko\"}},\"annotations\":[\"datauxsyjofpgved\",\"dataobujnjgyb\",\"dataxmqxigidullnjgc\"],\"folder\":{\"name\":\"grh\"},\"\":{\"n\":\"dataslgsazuq\",\"zj\":\"datahxhky\",\"ffxsfybntmveh\":\"datadkpvnrvzw\",\"zqweuyd\":\"datahflyuvbgtz\"}}") .toObject(Office365Dataset.class); - Assertions.assertEquals("oygsabdgdheronsd", model.description()); - Assertions.assertEquals("yxtrvfdbqsk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("gliupqscoob").type()); - Assertions.assertEquals("ufylamxowbg", model.folder().name()); + Assertions.assertEquals("obfixngxebi", model.description()); + Assertions.assertEquals("bncuyjeykcnhp", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("huzymhlhihqk").type()); + Assertions.assertEquals("grh", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Office365Dataset model = new Office365Dataset().withDescription("oygsabdgdheronsd").withStructure("datarkzvz") - .withSchema("datatqhgz") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("yxtrvfdbqsk").withParameters( - mapOf("ptpvsffavdhpiw", "databvi", "bwxyldqtmggcpd", "datamuwkgjwbyfdw", "zctwymzsk", "datamegaj"))) - .withParameters(mapOf("gliupqscoob", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataeseip"), "incev", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datacaxsqcomjiq"), - "duvtvod", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datadevpximziizmeq"), "hm", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datap"))) - .withAnnotations(Arrays.asList("datab", "datablmcvrjaznotdof", "datavpbqsdqkpsbqs", "databmitaftazgcxsvq")) - .withFolder(new DatasetFolder().withName("ufylamxowbg")).withTableName("datavovoa") - .withPredicate("datagjsmbc"); + Office365Dataset model = new Office365Dataset().withDescription("obfixngxebi") + .withStructure("dataxhnkingiqcd") + .withSchema("datarpgupsjlbsmnda") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bncuyjeykcnhp") + .withParameters(mapOf("xjziuucrlnew", "datahcf", "zlvwyw", "datauwkkfzzetlvhdy", "lgqpwwlzpd", "datav", + "knmstbdoprwkamp", "dataarcbcdwhslxebaja"))) + .withParameters(mapOf("huzymhlhihqk", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataldzniudrcycm"), + "aiildcpud", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datakmnbzko"))) + .withAnnotations(Arrays.asList("datauxsyjofpgved", "dataobujnjgyb", "dataxmqxigidullnjgc")) + .withFolder(new DatasetFolder().withName("grh")) + .withTableName("datarnicupdyttqmi") + .withPredicate("datajplosebmh"); model = BinaryData.fromObject(model).toObject(Office365Dataset.class); - Assertions.assertEquals("oygsabdgdheronsd", model.description()); - Assertions.assertEquals("yxtrvfdbqsk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("gliupqscoob").type()); - Assertions.assertEquals("ufylamxowbg", model.folder().name()); + Assertions.assertEquals("obfixngxebi", model.description()); + Assertions.assertEquals("bncuyjeykcnhp", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("huzymhlhihqk").type()); + Assertions.assertEquals("grh", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTypePropertiesTests.java index 6697a497a1c88..cb54d014daeb8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class Office365DatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { Office365DatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"datadfkkedeetxtpwcv\",\"predicate\":\"datafwsunjzijaciwmm\"}") + = BinaryData.fromString("{\"tableName\":\"databnairvhpqsv\",\"predicate\":\"dataeoge\"}") .toObject(Office365DatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Office365DatasetTypeProperties model = new Office365DatasetTypeProperties().withTableName("datadfkkedeetxtpwcv") - .withPredicate("datafwsunjzijaciwmm"); + Office365DatasetTypeProperties model + = new Office365DatasetTypeProperties().withTableName("databnairvhpqsv").withPredicate("dataeoge"); model = BinaryData.fromObject(model).toObject(Office365DatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365SourceTests.java index 72e4f58493542..a99bdd2771681 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365SourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365SourceTests.java @@ -11,17 +11,22 @@ public final class Office365SourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { Office365Source model = BinaryData.fromString( - "{\"type\":\"Office365Source\",\"allowedGroups\":\"datacugchtwxifudlrxb\",\"userScopeFilterUri\":\"dataftpvgmqzitc\",\"dateFilterColumn\":\"datamlltas\",\"startTime\":\"dataqsf\",\"endTime\":\"datae\",\"outputColumns\":\"dataveg\",\"sourceRetryCount\":\"databmyvgmbirvv\",\"sourceRetryWait\":\"databqxisavktuxw\",\"maxConcurrentConnections\":\"datal\",\"disableMetricsCollection\":\"databridagwuvcdymoqv\",\"\":{\"agvuioxjwzt\":\"datarynziudmhedhryg\",\"cv\":\"datafotllf\",\"zz\":\"datafnbccffs\"}}") + "{\"type\":\"o\",\"allowedGroups\":\"datafcduqsdurfqaawr\",\"userScopeFilterUri\":\"datatzslfrztpnry\",\"dateFilterColumn\":\"dataxajr\",\"startTime\":\"dataighlhddvno\",\"endTime\":\"datasowbtnfqlwc\",\"outputColumns\":\"datajphtehoucmeuuua\",\"sourceRetryCount\":\"datacxofqjninrsk\",\"sourceRetryWait\":\"datakqtiuve\",\"maxConcurrentConnections\":\"datauciwbiwygwpwqu\",\"disableMetricsCollection\":\"dataqgslspihuxgvvio\",\"\":{\"quxvufrf\":\"dataolkmfizefbbrnd\",\"ysieyeblkgu\":\"dataaehssosowav\"}}") .toObject(Office365Source.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Office365Source model = new Office365Source().withSourceRetryCount("databmyvgmbirvv") - .withSourceRetryWait("databqxisavktuxw").withMaxConcurrentConnections("datal") - .withDisableMetricsCollection("databridagwuvcdymoqv").withAllowedGroups("datacugchtwxifudlrxb") - .withUserScopeFilterUri("dataftpvgmqzitc").withDateFilterColumn("datamlltas").withStartTime("dataqsf") - .withEndTime("datae").withOutputColumns("dataveg"); + Office365Source model = new Office365Source().withSourceRetryCount("datacxofqjninrsk") + .withSourceRetryWait("datakqtiuve") + .withMaxConcurrentConnections("datauciwbiwygwpwqu") + .withDisableMetricsCollection("dataqgslspihuxgvvio") + .withAllowedGroups("datafcduqsdurfqaawr") + .withUserScopeFilterUri("datatzslfrztpnry") + .withDateFilterColumn("dataxajr") + .withStartTime("dataighlhddvno") + .withEndTime("datasowbtnfqlwc") + .withOutputColumns("datajphtehoucmeuuua"); model = BinaryData.fromObject(model).toObject(Office365Source.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationDisplayTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationDisplayTests.java index 1503e0a26f767..6bee94c69be22 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationDisplayTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationDisplayTests.java @@ -23,8 +23,10 @@ public void testDeserialize() throws Exception { @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OperationDisplay model = new OperationDisplay().withDescription("xinpmqnjaq").withProvider("xj") - .withResource("r").withOperation("vcputegj"); + OperationDisplay model = new OperationDisplay().withDescription("xinpmqnjaq") + .withProvider("xj") + .withResource("r") + .withOperation("vcputegj"); model = BinaryData.fromObject(model).toObject(OperationDisplay.class); Assertions.assertEquals("xinpmqnjaq", model.description()); Assertions.assertEquals("xj", model.provider()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationInnerTests.java index ddf080ea69a74..374710606f40a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationInnerTests.java @@ -47,22 +47,32 @@ public void testDeserialize() throws Exception { @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OperationInner model = new OperationInner().withName("itjz").withOrigin("lusarh") - .withDisplay(new OperationDisplay().withDescription("cqhsm").withProvider("rkdtmlxh") - .withResource("uksjtxukcdmp").withOperation("cryuan")) + OperationInner model = new OperationInner().withName("itjz") + .withOrigin("lusarh") + .withDisplay(new OperationDisplay().withDescription("cqhsm") + .withProvider("rkdtmlxh") + .withResource("uksjtxukcdmp") + .withOperation("cryuan")) .withServiceSpecification(new OperationServiceSpecification() .withLogSpecifications(Arrays.asList( new OperationLogSpecification().withName("xtayr").withDisplayName("m").withBlobDuration("fp"), - new OperationLogSpecification().withName("qobmtukk").withDisplayName("yrtih") + new OperationLogSpecification().withName("qobmtukk") + .withDisplayName("yrtih") .withBlobDuration("tijbpzvgnwzsymgl"), - new OperationLogSpecification().withName("fcyzkohdbihanufh").withDisplayName("bj") + new OperationLogSpecification().withName("fcyzkohdbihanufh") + .withDisplayName("bj") .withBlobDuration("a"), - new OperationLogSpecification().withName("th").withDisplayName("hab") + new OperationLogSpecification().withName("th") + .withDisplayName("hab") .withBlobDuration("pikxwczbyscnpqxu"))) .withMetricSpecifications(Arrays.asList(new OperationMetricSpecification().withName("qniwbybrkxvdumj") - .withDisplayName("tfwvukxgaudc").withDisplayDescription("nhsjcnyej").withUnit("ryhtnapczwlokjy") - .withAggregationType("kkvnipjox").withEnableRegionalMdmAccount("nchgej") - .withSourceMdmAccount("odmailzyd").withSourceMdmNamespace("o") + .withDisplayName("tfwvukxgaudc") + .withDisplayDescription("nhsjcnyej") + .withUnit("ryhtnapczwlokjy") + .withAggregationType("kkvnipjox") + .withEnableRegionalMdmAccount("nchgej") + .withSourceMdmAccount("odmailzyd") + .withSourceMdmNamespace("o") .withAvailabilities( Arrays.asList(new OperationMetricAvailability(), new OperationMetricAvailability())) .withDimensions(Arrays.asList(new OperationMetricDimension(), new OperationMetricDimension(), diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationListResponseTests.java index fc4f19992f677..12a2758f83c19 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationListResponseTests.java @@ -33,24 +33,33 @@ public void testDeserialize() throws Exception { public void testSerialize() throws Exception { OperationListResponse model = new OperationListResponse() .withValue(Arrays.asList( - new OperationInner().withName("quvgjxpybczme").withOrigin("tzopbsphrupidgsy") - .withDisplay(new OperationDisplay().withDescription("jhphoyc").withProvider("xaobhdxbmtqioqjz") - .withResource("tbmufpo").withOperation("oizh")) + new OperationInner().withName("quvgjxpybczme") + .withOrigin("tzopbsphrupidgsy") + .withDisplay(new OperationDisplay().withDescription("jhphoyc") + .withProvider("xaobhdxbmtqioqjz") + .withResource("tbmufpo") + .withOperation("oizh")) .withServiceSpecification(new OperationServiceSpecification() .withLogSpecifications(Arrays.asList(new OperationLogSpecification(), new OperationLogSpecification(), new OperationLogSpecification())) .withMetricSpecifications(Arrays.asList(new OperationMetricSpecification(), new OperationMetricSpecification(), new OperationMetricSpecification()))), - new OperationInner().withName("oqijgkdmbpaz").withOrigin("bc") - .withDisplay(new OperationDisplay().withDescription("dznrbtcqq").withProvider("qglhq") - .withResource("ufo").withOperation("jywif")) + new OperationInner().withName("oqijgkdmbpaz") + .withOrigin("bc") + .withDisplay(new OperationDisplay().withDescription("dznrbtcqq") + .withProvider("qglhq") + .withResource("ufo") + .withOperation("jywif")) .withServiceSpecification(new OperationServiceSpecification() .withLogSpecifications( Arrays.asList(new OperationLogSpecification(), new OperationLogSpecification())) .withMetricSpecifications(Arrays.asList(new OperationMetricSpecification()))), - new OperationInner().withName("f").withOrigin("lzl") - .withDisplay(new OperationDisplay().withDescription("rifkwm").withProvider("ktsizntocipaou") - .withResource("psqucmpoyf").withOperation("fogknygjofjdde")) + new OperationInner().withName("f") + .withOrigin("lzl") + .withDisplay(new OperationDisplay().withDescription("rifkwm") + .withProvider("ktsizntocipaou") + .withResource("psqucmpoyf") + .withOperation("fogknygjofjdde")) .withServiceSpecification(new OperationServiceSpecification() .withLogSpecifications( Arrays.asList(new OperationLogSpecification(), new OperationLogSpecification(), diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationLogSpecificationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationLogSpecificationTests.java index 308493d0ac600..c14a0858e383a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationLogSpecificationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationLogSpecificationTests.java @@ -22,7 +22,8 @@ public void testDeserialize() throws Exception { @org.junit.jupiter.api.Test public void testSerialize() throws Exception { OperationLogSpecification model = new OperationLogSpecification().withName("ifiyipjxsqwpgrj") - .withDisplayName("norcjxvsnbyxqab").withBlobDuration("ocpcy"); + .withDisplayName("norcjxvsnbyxqab") + .withBlobDuration("ocpcy"); model = BinaryData.fromObject(model).toObject(OperationLogSpecification.class); Assertions.assertEquals("ifiyipjxsqwpgrj", model.name()); Assertions.assertEquals("norcjxvsnbyxqab", model.displayName()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricDimensionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricDimensionTests.java index 1f42d6f941c63..ff1b280faca16 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricDimensionTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricDimensionTests.java @@ -21,7 +21,8 @@ public void testDeserialize() throws Exception { @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OperationMetricDimension model = new OperationMetricDimension().withName("yd").withDisplayName("lmjthjq") + OperationMetricDimension model = new OperationMetricDimension().withName("yd") + .withDisplayName("lmjthjq") .withToBeExportedForShoebox(false); model = BinaryData.fromObject(model).toObject(OperationMetricDimension.class); Assertions.assertEquals("yd", model.name()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricSpecificationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricSpecificationTests.java index 98e8b4aaa5664..24b8fd36a9135 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricSpecificationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricSpecificationTests.java @@ -34,20 +34,27 @@ public void testDeserialize() throws Exception { @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OperationMetricSpecification model = new OperationMetricSpecification().withName("urzafb").withDisplayName("j") - .withDisplayDescription("btoqcjmkljavbqid").withUnit("ajzyul").withAggregationType("u") - .withEnableRegionalMdmAccount("krlkhbzhfepg").withSourceMdmAccount("qex") + OperationMetricSpecification model = new OperationMetricSpecification().withName("urzafb") + .withDisplayName("j") + .withDisplayDescription("btoqcjmkljavbqid") + .withUnit("ajzyul") + .withAggregationType("u") + .withEnableRegionalMdmAccount("krlkhbzhfepg") + .withSourceMdmAccount("qex") .withSourceMdmNamespace("ocxscpaierhhbcs") .withAvailabilities(Arrays.asList( new OperationMetricAvailability().withTimeGrain("majtjaod").withBlobDuration("bnbdxkqpxokajion"), new OperationMetricAvailability().withTimeGrain("mexgstxgcp").withBlobDuration("gmaajrm"), new OperationMetricAvailability().withTimeGrain("jwzrl").withBlobDuration("mcl"))) .withDimensions(Arrays.asList( - new OperationMetricDimension().withName("coejctbzaqs").withDisplayName("y") + new OperationMetricDimension().withName("coejctbzaqs") + .withDisplayName("y") .withToBeExportedForShoebox(false), - new OperationMetricDimension().withName("fkgukdkexxppof").withDisplayName("axcfjpgddtocjjx") + new OperationMetricDimension().withName("fkgukdkexxppof") + .withDisplayName("axcfjpgddtocjjx") .withToBeExportedForShoebox(true), - new OperationMetricDimension().withName("o").withDisplayName("xhdzxibqeojnx") + new OperationMetricDimension().withName("o") + .withDisplayName("xhdzxibqeojnx") .withToBeExportedForShoebox(true))); model = BinaryData.fromObject(model).toObject(OperationMetricSpecification.class); Assertions.assertEquals("urzafb", model.name()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationPropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationPropertiesTests.java index 0351e3a7115c4..ec62c748a605c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationPropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationPropertiesTests.java @@ -53,23 +53,36 @@ public void testSerialize() throws Exception { OperationProperties model = new OperationProperties().withServiceSpecification(new OperationServiceSpecification() .withLogSpecifications(Arrays.asList( - new OperationLogSpecification().withName("atscmd").withDisplayName("jhulsuuvmkjo") + new OperationLogSpecification().withName("atscmd") + .withDisplayName("jhulsuuvmkjo") .withBlobDuration("rwfndiod"), - new OperationLogSpecification().withName("slwejdpvw").withDisplayName("oqpsoa") + new OperationLogSpecification().withName("slwejdpvw") + .withDisplayName("oqpsoa") .withBlobDuration("tazak"))) - .withMetricSpecifications(Arrays.asList(new OperationMetricSpecification().withName("hbcryffdfdosyge") - .withDisplayName("aojakhmsbzjhcrz").withDisplayDescription("dphlxaolt").withUnit("trg") - .withAggregationType("bpf").withEnableRegionalMdmAccount("s").withSourceMdmAccount("zgvfcjrwz") - .withSourceMdmNamespace("xjtfelluwfzit") - .withAvailabilities(Arrays.asList( - new OperationMetricAvailability().withTimeGrain("qfpjk").withBlobDuration("xofpdvhpfxxypi"), - new OperationMetricAvailability().withTimeGrain("nmayhuybb").withBlobDuration("odepoogin"), - new OperationMetricAvailability().withTimeGrain("amiheognarxz").withBlobDuration("heotusiv"))) - .withDimensions(Arrays.asList(new OperationMetricDimension().withName("ciqihnhung") - .withDisplayName("jzrnf").withToBeExportedForShoebox(true))), - new OperationMetricSpecification().withName("ispe").withDisplayName("tzfkufubl") - .withDisplayDescription("fxqeof").withUnit("e").withAggregationType("hqjbasvmsmj") - .withEnableRegionalMdmAccount("lngsntnbybkzgcwr").withSourceMdmAccount("lxxwrljdouskc") + .withMetricSpecifications(Arrays.asList( + new OperationMetricSpecification().withName("hbcryffdfdosyge") + .withDisplayName("aojakhmsbzjhcrz") + .withDisplayDescription("dphlxaolt") + .withUnit("trg") + .withAggregationType("bpf") + .withEnableRegionalMdmAccount("s") + .withSourceMdmAccount("zgvfcjrwz") + .withSourceMdmNamespace("xjtfelluwfzit") + .withAvailabilities(Arrays.asList( + new OperationMetricAvailability().withTimeGrain("qfpjk").withBlobDuration("xofpdvhpfxxypi"), + new OperationMetricAvailability().withTimeGrain("nmayhuybb").withBlobDuration("odepoogin"), + new OperationMetricAvailability().withTimeGrain("amiheognarxz") + .withBlobDuration("heotusiv"))) + .withDimensions(Arrays.asList(new OperationMetricDimension().withName("ciqihnhung") + .withDisplayName("jzrnf") + .withToBeExportedForShoebox(true))), + new OperationMetricSpecification().withName("ispe") + .withDisplayName("tzfkufubl") + .withDisplayDescription("fxqeof") + .withUnit("e") + .withAggregationType("hqjbasvmsmj") + .withEnableRegionalMdmAccount("lngsntnbybkzgcwr") + .withSourceMdmAccount("lxxwrljdouskc") .withSourceMdmNamespace("kocrcjdkwtnhx") .withAvailabilities(Arrays.asList( new OperationMetricAvailability().withTimeGrain("iksqr").withBlobDuration("ssainqpjwnzll"), @@ -78,15 +91,22 @@ public void testSerialize() throws Exception { new OperationMetricAvailability().withTimeGrain("czdzev").withBlobDuration("hkr"), new OperationMetricAvailability().withTimeGrain("d").withBlobDuration("p"))) .withDimensions(Arrays.asList( - new OperationMetricDimension().withName("kvwrwjfeu").withDisplayName("hutje") + new OperationMetricDimension().withName("kvwrwjfeu") + .withDisplayName("hutje") .withToBeExportedForShoebox(false), - new OperationMetricDimension().withName("ldhugjzzdatqxh").withDisplayName("dgeablgphu") + new OperationMetricDimension().withName("ldhugjzzdatqxh") + .withDisplayName("dgeablgphu") .withToBeExportedForShoebox(true), - new OperationMetricDimension().withName("dvkaozw").withDisplayName("ftyhxhurokf") + new OperationMetricDimension().withName("dvkaozw") + .withDisplayName("ftyhxhurokf") .withToBeExportedForShoebox(true))), - new OperationMetricSpecification().withName("lniwpwcukjfkgiaw").withDisplayName("lryplwckbasyy") - .withDisplayDescription("ddhsgcbacphe").withUnit("ot").withAggregationType("qgoulznd") - .withEnableRegionalMdmAccount("kwy").withSourceMdmAccount("gfgibm") + new OperationMetricSpecification().withName("lniwpwcukjfkgiaw") + .withDisplayName("lryplwckbasyy") + .withDisplayDescription("ddhsgcbacphe") + .withUnit("ot") + .withAggregationType("qgoulznd") + .withEnableRegionalMdmAccount("kwy") + .withSourceMdmAccount("gfgibm") .withSourceMdmNamespace("gakeqsr") .withAvailabilities(Arrays.asList( new OperationMetricAvailability().withTimeGrain("qqedqytbciqfou") @@ -96,7 +116,8 @@ public void testSerialize() throws Exception { new OperationMetricAvailability().withTimeGrain("kdosvqw").withBlobDuration("mdgbbjfdd"), new OperationMetricAvailability().withTimeGrain("bmbexppbhtqqro").withBlobDuration("p"))) .withDimensions(Arrays.asList(new OperationMetricDimension().withName("lgbquxig") - .withDisplayName("jgzjaoyfhrtx").withToBeExportedForShoebox(false)))))); + .withDisplayName("jgzjaoyfhrtx") + .withToBeExportedForShoebox(false)))))); model = BinaryData.fromObject(model).toObject(OperationProperties.class); Assertions.assertEquals("atscmd", model.serviceSpecification().logSpecifications().get(0).name()); Assertions.assertEquals("jhulsuuvmkjo", model.serviceSpecification().logSpecifications().get(0).displayName()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationServiceSpecificationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationServiceSpecificationTests.java index f930c33f4b7ce..426ce6ebce177 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationServiceSpecificationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationServiceSpecificationTests.java @@ -43,19 +43,26 @@ public void testDeserialize() throws Exception { public void testSerialize() throws Exception { OperationServiceSpecification model = new OperationServiceSpecification() .withLogSpecifications(Arrays.asList( - new OperationLogSpecification().withName("ujysvle").withDisplayName("vfqawrlyxwjkcpr").withBlobDuration( - "wbxgjvt"), + new OperationLogSpecification() + .withName("ujysvle") + .withDisplayName("vfqawrlyxwjkcpr") + .withBlobDuration("wbxgjvt"), new OperationLogSpecification().withName("p").withDisplayName("szdnr").withBlobDuration("qguhmuo"))) .withMetricSpecifications(Arrays.asList(new OperationMetricSpecification().withName("rwzwbng") - .withDisplayName("tnwu").withDisplayDescription("gazxuf").withUnit("uckyf") - .withAggregationType("rfidfvzwdz").withEnableRegionalMdmAccount("tymw") - .withSourceMdmAccount("dkfthwxmnt").withSourceMdmNamespace("waopvkmijcmmxd") + .withDisplayName("tnwu") + .withDisplayDescription("gazxuf") + .withUnit("uckyf") + .withAggregationType("rfidfvzwdz") + .withEnableRegionalMdmAccount("tymw") + .withSourceMdmAccount("dkfthwxmnt") + .withSourceMdmNamespace("waopvkmijcmmxd") .withAvailabilities(Arrays.asList( new OperationMetricAvailability().withTimeGrain("fsrpymzidnse").withBlobDuration("xtbzsgfyccsne"), new OperationMetricAvailability().withTimeGrain("dwzjeiach").withBlobDuration("osfln"), new OperationMetricAvailability().withTimeGrain("sfqpteehz").withBlobDuration("ypyqrimzinp"))) .withDimensions(Arrays.asList(new OperationMetricDimension().withName("dkirsoodqxhcr") - .withDisplayName("ohjtckw").withToBeExportedForShoebox(true))))); + .withDisplayName("ohjtckw") + .withToBeExportedForShoebox(true))))); model = BinaryData.fromObject(model).toObject(OperationServiceSpecification.class); Assertions.assertEquals("ujysvle", model.logSpecifications().get(0).name()); Assertions.assertEquals("vfqawrlyxwjkcpr", model.logSpecifications().get(0).displayName()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationsListMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationsListMockTests.java index a75c747fedaba..86d18fa5d8aec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationsListMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationsListMockTests.java @@ -6,78 +6,60 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.Operation; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class OperationsListMockTests { @Test public void testList() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"name\":\"sdwcx\",\"origin\":\"gidyansnunvgqtvg\",\"display\":{\"description\":\"rbn\",\"provider\":\"dyvrdsvsjgt\",\"resource\":\"rmibm\",\"operation\":\"xspztirvzbmhmk\"},\"properties\":{\"serviceSpecification\":{\"logSpecifications\":[{\"name\":\"ngpawy\",\"displayName\":\"bw\",\"blobDuration\":\"fz\"},{\"name\":\"qtpwhicnnanqzrml\",\"displayName\":\"ptmzsdwx\",\"blobDuration\":\"tpuzxinwj\"}],\"metricSpecifications\":[{\"name\":\"rhzd\",\"displayName\":\"eauhldq\",\"displayDescription\":\"kx\",\"unit\":\"nroewwrhvdwrowe\",\"aggregationType\":\"goijhc\",\"enableRegionalMdmAccount\":\"nprviivc\",\"sourceMdmAccount\":\"pc\",\"sourceMdmNamespace\":\"itvym\",\"availabilities\":[{}],\"dimensions\":[{}]}]}}}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"name\":\"wegijdejs\",\"origin\":\"j\",\"display\":{\"description\":\"o\",\"provider\":\"blhc\",\"resource\":\"nz\",\"operation\":\"aathdr\"},\"properties\":{\"serviceSpecification\":{\"logSpecifications\":[{\"name\":\"cwzmdencqyms\",\"displayName\":\"lio\",\"blobDuration\":\"av\"},{\"name\":\"mxctkwx\",\"displayName\":\"kcvbfeucdqejazht\",\"blobDuration\":\"imcjkhexxn\"},{\"name\":\"lcekonmcxriqfrrx\",\"displayName\":\"svrhcjhszmy\",\"blobDuration\":\"riosmicjkiz\"},{\"name\":\"dawmrkryixbb\",\"displayName\":\"gnjlzdjz\",\"blobDuration\":\"wobxsokemawr\"}],\"metricSpecifications\":[{\"name\":\"drtkwxmglln\",\"displayName\":\"h\",\"displayDescription\":\"fup\",\"unit\":\"fosrwzhmlklocyjp\",\"aggregationType\":\"tnvxomhk\",\"enableRegionalMdmAccount\":\"vdmjjiqjv\",\"sourceMdmAccount\":\"iomfkiopkkhb\",\"sourceMdmNamespace\":\"hsp\",\"availabilities\":[{}],\"dimensions\":[{}]},{\"name\":\"t\",\"displayName\":\"crwnayfkvwl\",\"displayDescription\":\"feyhny\",\"unit\":\"mpuqnvn\",\"aggregationType\":\"awicou\",\"enableRegionalMdmAccount\":\"bjccjorovrfdfgdv\",\"sourceMdmAccount\":\"oxozqcy\",\"sourceMdmNamespace\":\"jpkojykyt\",\"availabilities\":[{},{},{},{}],\"dimensions\":[{},{},{},{}]}]}}}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PagedIterable response = manager.operations().list(com.azure.core.util.Context.NONE); - Assertions.assertEquals("sdwcx", response.iterator().next().name()); - Assertions.assertEquals("gidyansnunvgqtvg", response.iterator().next().origin()); - Assertions.assertEquals("rbn", response.iterator().next().display().description()); - Assertions.assertEquals("dyvrdsvsjgt", response.iterator().next().display().provider()); - Assertions.assertEquals("rmibm", response.iterator().next().display().resource()); - Assertions.assertEquals("xspztirvzbmhmk", response.iterator().next().display().operation()); - Assertions.assertEquals("ngpawy", + Assertions.assertEquals("wegijdejs", response.iterator().next().name()); + Assertions.assertEquals("j", response.iterator().next().origin()); + Assertions.assertEquals("o", response.iterator().next().display().description()); + Assertions.assertEquals("blhc", response.iterator().next().display().provider()); + Assertions.assertEquals("nz", response.iterator().next().display().resource()); + Assertions.assertEquals("aathdr", response.iterator().next().display().operation()); + Assertions.assertEquals("cwzmdencqyms", response.iterator().next().serviceSpecification().logSpecifications().get(0).name()); - Assertions.assertEquals("bw", + Assertions.assertEquals("lio", response.iterator().next().serviceSpecification().logSpecifications().get(0).displayName()); - Assertions.assertEquals("fz", + Assertions.assertEquals("av", response.iterator().next().serviceSpecification().logSpecifications().get(0).blobDuration()); - Assertions.assertEquals("rhzd", + Assertions.assertEquals("drtkwxmglln", response.iterator().next().serviceSpecification().metricSpecifications().get(0).name()); - Assertions.assertEquals("eauhldq", + Assertions.assertEquals("h", response.iterator().next().serviceSpecification().metricSpecifications().get(0).displayName()); - Assertions.assertEquals("kx", + Assertions.assertEquals("fup", response.iterator().next().serviceSpecification().metricSpecifications().get(0).displayDescription()); - Assertions.assertEquals("nroewwrhvdwrowe", + Assertions.assertEquals("fosrwzhmlklocyjp", response.iterator().next().serviceSpecification().metricSpecifications().get(0).unit()); - Assertions.assertEquals("goijhc", + Assertions.assertEquals("tnvxomhk", response.iterator().next().serviceSpecification().metricSpecifications().get(0).aggregationType()); - Assertions.assertEquals("nprviivc", + Assertions.assertEquals("vdmjjiqjv", response.iterator().next().serviceSpecification().metricSpecifications().get(0).enableRegionalMdmAccount()); - Assertions.assertEquals("pc", + Assertions.assertEquals("iomfkiopkkhb", response.iterator().next().serviceSpecification().metricSpecifications().get(0).sourceMdmAccount()); - Assertions.assertEquals("itvym", + Assertions.assertEquals("hsp", response.iterator().next().serviceSpecification().metricSpecifications().get(0).sourceMdmNamespace()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageLocationTests.java index e3bc72d992090..dbf9482cd52d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageLocationTests.java @@ -11,14 +11,16 @@ public final class OracleCloudStorageLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OracleCloudStorageLocation model = BinaryData.fromString( - "{\"type\":\"OracleCloudStorageLocation\",\"bucketName\":\"datamdofgeoagfuoft\",\"version\":\"dataodwxmdajwiygmgs\",\"folderPath\":\"datamdmze\",\"fileName\":\"datarstgfczljdnc\",\"\":{\"cvucgytoxu\":\"datajvamyyznmrgcdo\"}}") + "{\"type\":\"naihtgsiqikvllr\",\"bucketName\":\"dataiwifhbk\",\"version\":\"datadttohq\",\"folderPath\":\"datap\",\"fileName\":\"datalxkoi\",\"\":{\"klwzlw\":\"datarawrqkzawbunmp\",\"ejzltka\":\"dataepr\",\"xapgrcqebmvrdjom\":\"datazfjsxscbd\"}}") .toObject(OracleCloudStorageLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OracleCloudStorageLocation model = new OracleCloudStorageLocation().withFolderPath("datamdmze") - .withFileName("datarstgfczljdnc").withBucketName("datamdofgeoagfuoft").withVersion("dataodwxmdajwiygmgs"); + OracleCloudStorageLocation model = new OracleCloudStorageLocation().withFolderPath("datap") + .withFileName("datalxkoi") + .withBucketName("dataiwifhbk") + .withVersion("datadttohq"); model = BinaryData.fromObject(model).toObject(OracleCloudStorageLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageReadSettingsTests.java index 0b48c6ecd9a6b..4854d05f0b71e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageReadSettingsTests.java @@ -11,19 +11,25 @@ public final class OracleCloudStorageReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OracleCloudStorageReadSettings model = BinaryData.fromString( - "{\"type\":\"OracleCloudStorageReadSettings\",\"recursive\":\"datarbjxewcscuveljf\",\"wildcardFolderPath\":\"datain\",\"wildcardFileName\":\"dataziztgddahymv\",\"prefix\":\"datajtdhmig\",\"fileListPath\":\"dataaoexgienylsijqyg\",\"enablePartitionDiscovery\":\"datashd\",\"partitionRootPath\":\"datahxv\",\"deleteFilesAfterCompletion\":\"datafdsafgkysymhuxs\",\"modifiedDatetimeStart\":\"datallbpegcetezaa\",\"modifiedDatetimeEnd\":\"dataszrbttz\",\"maxConcurrentConnections\":\"dataeyrw\",\"disableMetricsCollection\":\"datafgoyxxszpaiecurf\",\"\":{\"xurveekhsmulv\":\"datashfmgi\",\"iuwhcyckekm\":\"dataywoefkpuuu\",\"oycpotmaosongtbh\":\"datafipygt\",\"nrvwjxmwalh\":\"datahsqvubwwqgiyu\"}}") + "{\"type\":\"mqntutetdtgci\",\"recursive\":\"dataslfxejpocsgig\",\"wildcardFolderPath\":\"datab\",\"wildcardFileName\":\"datandyjwmglgstrzfhe\",\"prefix\":\"datazovkbcbef\",\"fileListPath\":\"datanymfhmljimkg\",\"enablePartitionDiscovery\":\"datazvmtjcxigiszxd\",\"partitionRootPath\":\"datalceetuivmbugizwy\",\"deleteFilesAfterCompletion\":\"datahfptbdxtv\",\"modifiedDatetimeStart\":\"datajuls\",\"modifiedDatetimeEnd\":\"dataiuzytxeaq\",\"maxConcurrentConnections\":\"datarjwiwou\",\"disableMetricsCollection\":\"dataaqnfyhgrcm\",\"\":{\"twelutrvdzhwp\":\"datapledxyecttu\",\"ltslfccyavy\":\"datapscw\",\"mldkci\":\"datafmndrdqqjkeg\"}}") .toObject(OracleCloudStorageReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { OracleCloudStorageReadSettings model - = new OracleCloudStorageReadSettings().withMaxConcurrentConnections("dataeyrw") - .withDisableMetricsCollection("datafgoyxxszpaiecurf").withRecursive("datarbjxewcscuveljf") - .withWildcardFolderPath("datain").withWildcardFileName("dataziztgddahymv").withPrefix("datajtdhmig") - .withFileListPath("dataaoexgienylsijqyg").withEnablePartitionDiscovery("datashd") - .withPartitionRootPath("datahxv").withDeleteFilesAfterCompletion("datafdsafgkysymhuxs") - .withModifiedDatetimeStart("datallbpegcetezaa").withModifiedDatetimeEnd("dataszrbttz"); + = new OracleCloudStorageReadSettings().withMaxConcurrentConnections("datarjwiwou") + .withDisableMetricsCollection("dataaqnfyhgrcm") + .withRecursive("dataslfxejpocsgig") + .withWildcardFolderPath("datab") + .withWildcardFileName("datandyjwmglgstrzfhe") + .withPrefix("datazovkbcbef") + .withFileListPath("datanymfhmljimkg") + .withEnablePartitionDiscovery("datazvmtjcxigiszxd") + .withPartitionRootPath("datalceetuivmbugizwy") + .withDeleteFilesAfterCompletion("datahfptbdxtv") + .withModifiedDatetimeStart("datajuls") + .withModifiedDatetimeEnd("dataiuzytxeaq"); model = BinaryData.fromObject(model).toObject(OracleCloudStorageReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OraclePartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OraclePartitionSettingsTests.java index b53e304cd14c6..6a52d120b8cc8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OraclePartitionSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OraclePartitionSettingsTests.java @@ -11,15 +11,16 @@ public final class OraclePartitionSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OraclePartitionSettings model = BinaryData.fromString( - "{\"partitionNames\":\"datapomihgksqwzuosy\",\"partitionColumnName\":\"datalr\",\"partitionUpperBound\":\"datazudfar\",\"partitionLowerBound\":\"datayrdy\"}") + "{\"partitionNames\":\"dataflizmul\",\"partitionColumnName\":\"dataaulwlzekygnepj\",\"partitionUpperBound\":\"dataxqdrphiyxjq\",\"partitionLowerBound\":\"datanpztlac\"}") .toObject(OraclePartitionSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OraclePartitionSettings model - = new OraclePartitionSettings().withPartitionNames("datapomihgksqwzuosy").withPartitionColumnName("datalr") - .withPartitionUpperBound("datazudfar").withPartitionLowerBound("datayrdy"); + OraclePartitionSettings model = new OraclePartitionSettings().withPartitionNames("dataflizmul") + .withPartitionColumnName("dataaulwlzekygnepj") + .withPartitionUpperBound("dataxqdrphiyxjq") + .withPartitionLowerBound("datanpztlac"); model = BinaryData.fromObject(model).toObject(OraclePartitionSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudObjectDatasetTests.java index 7b181bf27d752..c4819701f42c1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudObjectDatasetTests.java @@ -19,32 +19,35 @@ public final class OracleServiceCloudObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OracleServiceCloudObjectDataset model = BinaryData.fromString( - "{\"type\":\"OracleServiceCloudObject\",\"typeProperties\":{\"tableName\":\"datakkv\"},\"description\":\"aehjjirvjq\",\"structure\":\"datavqmdmrac\",\"schema\":\"dataffdralihhs\",\"linkedServiceName\":{\"referenceName\":\"cygyzhcv\",\"parameters\":{\"dxrmyzvti\":\"datayrjl\"}},\"parameters\":{\"xoyjyhutwedigiv\":{\"type\":\"Float\",\"defaultValue\":\"datarubx\"},\"mcaxbqpmfhji\":{\"type\":\"Array\",\"defaultValue\":\"dataccxfnatn\"},\"lzvrchmy\":{\"type\":\"Array\",\"defaultValue\":\"datanbdqitghnm\"},\"h\":{\"type\":\"String\",\"defaultValue\":\"datarmwy\"}},\"annotations\":[\"dataplgqqqgrbr\",\"datahvipgt\"],\"folder\":{\"name\":\"aoylwhfm\"},\"\":{\"gypjixdmobadydw\":\"dataea\",\"wdvclsx\":\"datae\",\"xr\":\"dataqdchnzib\"}}") + "{\"type\":\"shm\",\"typeProperties\":{\"tableName\":\"datacthype\"},\"description\":\"iscsdvkymktcw\",\"structure\":\"datavoxgzegnglafnfga\",\"schema\":\"dataghddc\",\"linkedServiceName\":{\"referenceName\":\"zwxuxorrutuhvem\",\"parameters\":{\"vwxlxo\":\"datassolqy\",\"vgdojcvzfcmxmjp\":\"datavrkqv\",\"xp\":\"dataakjdtuodocgquqxn\"}},\"parameters\":{\"kuzabrsoih\":{\"type\":\"Bool\",\"defaultValue\":\"dataniiontqikdipkxs\"},\"oadoh\":{\"type\":\"Array\",\"defaultValue\":\"datajdtacvsynssxylsu\"},\"qqnzk\":{\"type\":\"Int\",\"defaultValue\":\"dataiehkxgfu\"}},\"annotations\":[\"dataq\"],\"folder\":{\"name\":\"ipdnl\"},\"\":{\"pwwgzeylzp\":\"datatf\",\"o\":\"dataimxacrkt\",\"quarb\":\"datagynsixgzbbnug\",\"zbhg\":\"datauvqbeyxwrmupzpe\"}}") .toObject(OracleServiceCloudObjectDataset.class); - Assertions.assertEquals("aehjjirvjq", model.description()); - Assertions.assertEquals("cygyzhcv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("xoyjyhutwedigiv").type()); - Assertions.assertEquals("aoylwhfm", model.folder().name()); + Assertions.assertEquals("iscsdvkymktcw", model.description()); + Assertions.assertEquals("zwxuxorrutuhvem", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("kuzabrsoih").type()); + Assertions.assertEquals("ipdnl", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OracleServiceCloudObjectDataset model = new OracleServiceCloudObjectDataset().withDescription("aehjjirvjq") - .withStructure("datavqmdmrac").withSchema("dataffdralihhs") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cygyzhcv") - .withParameters(mapOf("dxrmyzvti", "datayrjl"))) - .withParameters(mapOf("xoyjyhutwedigiv", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datarubx"), "mcaxbqpmfhji", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataccxfnatn"), "lzvrchmy", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datanbdqitghnm"), "h", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datarmwy"))) - .withAnnotations(Arrays.asList("dataplgqqqgrbr", "datahvipgt")) - .withFolder(new DatasetFolder().withName("aoylwhfm")).withTableName("datakkv"); + OracleServiceCloudObjectDataset model = new OracleServiceCloudObjectDataset().withDescription("iscsdvkymktcw") + .withStructure("datavoxgzegnglafnfga") + .withSchema("dataghddc") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zwxuxorrutuhvem") + .withParameters( + mapOf("vwxlxo", "datassolqy", "vgdojcvzfcmxmjp", "datavrkqv", "xp", "dataakjdtuodocgquqxn"))) + .withParameters(mapOf("kuzabrsoih", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataniiontqikdipkxs"), + "oadoh", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datajdtacvsynssxylsu"), + "qqnzk", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataiehkxgfu"))) + .withAnnotations(Arrays.asList("dataq")) + .withFolder(new DatasetFolder().withName("ipdnl")) + .withTableName("datacthype"); model = BinaryData.fromObject(model).toObject(OracleServiceCloudObjectDataset.class); - Assertions.assertEquals("aehjjirvjq", model.description()); - Assertions.assertEquals("cygyzhcv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("xoyjyhutwedigiv").type()); - Assertions.assertEquals("aoylwhfm", model.folder().name()); + Assertions.assertEquals("iscsdvkymktcw", model.description()); + Assertions.assertEquals("zwxuxorrutuhvem", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("kuzabrsoih").type()); + Assertions.assertEquals("ipdnl", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudSourceTests.java index bfaf1e946eae2..7c8f99f719942 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudSourceTests.java @@ -11,16 +11,19 @@ public final class OracleServiceCloudSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OracleServiceCloudSource model = BinaryData.fromString( - "{\"type\":\"OracleServiceCloudSource\",\"query\":\"datagdwpgmhqhvnexnw\",\"queryTimeout\":\"datawcxaqlym\",\"additionalColumns\":\"datazv\",\"sourceRetryCount\":\"datatecfyusfkcwfpo\",\"sourceRetryWait\":\"datalgkzgzxqwv\",\"maxConcurrentConnections\":\"datakqbgkssygdvll\",\"disableMetricsCollection\":\"datadfulvhpwpsxygrn\",\"\":{\"qgn\":\"dataxpsebazbtyrjr\",\"imtu\":\"datafzrra\",\"kve\":\"datajqzgyymyywhfdkj\",\"mccqljrnveq\":\"datasoztzdhzkbmzldpl\"}}") + "{\"type\":\"bnbluutmfimlozlf\",\"query\":\"datak\",\"queryTimeout\":\"dataxzukhvioccszd\",\"additionalColumns\":\"dataafuhccnqf\",\"sourceRetryCount\":\"datajirfyetljrdcg\",\"sourceRetryWait\":\"datarmx\",\"maxConcurrentConnections\":\"datawcqhaonmfnf\",\"disableMetricsCollection\":\"datawiocuhas\",\"\":{\"dpqkfxdqm\":\"datalhtukheimwhotjc\"}}") .toObject(OracleServiceCloudSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OracleServiceCloudSource model = new OracleServiceCloudSource().withSourceRetryCount("datatecfyusfkcwfpo") - .withSourceRetryWait("datalgkzgzxqwv").withMaxConcurrentConnections("datakqbgkssygdvll") - .withDisableMetricsCollection("datadfulvhpwpsxygrn").withQueryTimeout("datawcxaqlym") - .withAdditionalColumns("datazv").withQuery("datagdwpgmhqhvnexnw"); + OracleServiceCloudSource model = new OracleServiceCloudSource().withSourceRetryCount("datajirfyetljrdcg") + .withSourceRetryWait("datarmx") + .withMaxConcurrentConnections("datawcqhaonmfnf") + .withDisableMetricsCollection("datawiocuhas") + .withQueryTimeout("dataxzukhvioccszd") + .withAdditionalColumns("dataafuhccnqf") + .withQuery("datak"); model = BinaryData.fromObject(model).toObject(OracleServiceCloudSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSinkTests.java index f11f63fa17466..4edd846736413 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSinkTests.java @@ -11,16 +11,19 @@ public final class OracleSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OracleSink model = BinaryData.fromString( - "{\"type\":\"OracleSink\",\"preCopyScript\":\"datatmqquyco\",\"writeBatchSize\":\"datafyojfwvmsfns\",\"writeBatchTimeout\":\"datathdfqqzhqgmoe\",\"sinkRetryCount\":\"datanyugsasg\",\"sinkRetryWait\":\"datai\",\"maxConcurrentConnections\":\"dataxdxhxpqkcstyn\",\"disableMetricsCollection\":\"datafndxrofwctjhd\",\"\":{\"kpx\":\"dataldktiojitfa\",\"sorwtakny\":\"dataetdrcm\"}}") + "{\"type\":\"vav\",\"preCopyScript\":\"datawynslcfx\",\"writeBatchSize\":\"datariedf\",\"writeBatchTimeout\":\"datatfugppiudhylx\",\"sinkRetryCount\":\"datasumqdri\",\"sinkRetryWait\":\"dataxzcrf\",\"maxConcurrentConnections\":\"databchnh\",\"disableMetricsCollection\":\"datamgcqlufoj\",\"\":{\"zvnsnaklob\":\"datapkqhgfw\",\"jfxueqyjee\":\"datauyhmltdgxiqrgrr\",\"buglalaazncnhzqn\":\"dataytgnycnklqipnzgn\",\"galodfsbhphwt\":\"dataxkscyykrzrjjernj\"}}") .toObject(OracleSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OracleSink model = new OracleSink().withWriteBatchSize("datafyojfwvmsfns") - .withWriteBatchTimeout("datathdfqqzhqgmoe").withSinkRetryCount("datanyugsasg").withSinkRetryWait("datai") - .withMaxConcurrentConnections("dataxdxhxpqkcstyn").withDisableMetricsCollection("datafndxrofwctjhd") - .withPreCopyScript("datatmqquyco"); + OracleSink model = new OracleSink().withWriteBatchSize("datariedf") + .withWriteBatchTimeout("datatfugppiudhylx") + .withSinkRetryCount("datasumqdri") + .withSinkRetryWait("dataxzcrf") + .withMaxConcurrentConnections("databchnh") + .withDisableMetricsCollection("datamgcqlufoj") + .withPreCopyScript("datawynslcfx"); model = BinaryData.fromObject(model).toObject(OracleSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSourceTests.java index f9c6a9fc11bfb..711ad77f77d2b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSourceTests.java @@ -12,20 +12,24 @@ public final class OracleSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OracleSource model = BinaryData.fromString( - "{\"type\":\"OracleSource\",\"oracleReaderQuery\":\"datanknnqlty\",\"queryTimeout\":\"dataeyxmuwgnwxt\",\"partitionOption\":\"datawgen\",\"partitionSettings\":{\"partitionNames\":\"dataswcxlgzquqx\",\"partitionColumnName\":\"datasvqpifzavctywa\",\"partitionUpperBound\":\"dataaczprzrs\",\"partitionLowerBound\":\"datau\"},\"additionalColumns\":\"datanp\",\"sourceRetryCount\":\"dataqlanuhmsrnp\",\"sourceRetryWait\":\"dataaghoeqiwpdxpd\",\"maxConcurrentConnections\":\"dataoajqxyplhsto\",\"disableMetricsCollection\":\"datayb\",\"\":{\"gqjdoglec\":\"datasvpi\",\"iniidaxbesbwci\":\"datafgyivsiirxcxppqp\",\"uasjrs\":\"datayjch\"}}") + "{\"type\":\"fhar\",\"oracleReaderQuery\":\"dataob\",\"queryTimeout\":\"dataeb\",\"partitionOption\":\"datatputmt\",\"partitionSettings\":{\"partitionNames\":\"datalkwtnq\",\"partitionColumnName\":\"dataqckmfxldqtm\",\"partitionUpperBound\":\"datadejnemrfq\",\"partitionLowerBound\":\"datacdpwlezbfgullq\"},\"additionalColumns\":\"dataijyxcmqcggksrorx\",\"sourceRetryCount\":\"dataltlftraylxz\",\"sourceRetryWait\":\"datajpu\",\"maxConcurrentConnections\":\"dataaogfwbkxdhavegys\",\"disableMetricsCollection\":\"datamlbzixpdat\",\"\":{\"mhcpujy\":\"datavwwejvqpwyrioq\",\"qxprsocfxlrzj\":\"datantxregbssqthcywy\"}}") .toObject(OracleSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OracleSource model = new OracleSource().withSourceRetryCount("dataqlanuhmsrnp") - .withSourceRetryWait("dataaghoeqiwpdxpd").withMaxConcurrentConnections("dataoajqxyplhsto") - .withDisableMetricsCollection("datayb").withOracleReaderQuery("datanknnqlty") - .withQueryTimeout("dataeyxmuwgnwxt").withPartitionOption("datawgen") - .withPartitionSettings(new OraclePartitionSettings().withPartitionNames("dataswcxlgzquqx") - .withPartitionColumnName("datasvqpifzavctywa").withPartitionUpperBound("dataaczprzrs") - .withPartitionLowerBound("datau")) - .withAdditionalColumns("datanp"); + OracleSource model = new OracleSource().withSourceRetryCount("dataltlftraylxz") + .withSourceRetryWait("datajpu") + .withMaxConcurrentConnections("dataaogfwbkxdhavegys") + .withDisableMetricsCollection("datamlbzixpdat") + .withOracleReaderQuery("dataob") + .withQueryTimeout("dataeb") + .withPartitionOption("datatputmt") + .withPartitionSettings(new OraclePartitionSettings().withPartitionNames("datalkwtnq") + .withPartitionColumnName("dataqckmfxldqtm") + .withPartitionUpperBound("datadejnemrfq") + .withPartitionLowerBound("datacdpwlezbfgullq")) + .withAdditionalColumns("dataijyxcmqcggksrorx"); model = BinaryData.fromObject(model).toObject(OracleSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTests.java index 78132a8f959b3..1f1bb00c2da12 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTests.java @@ -19,31 +19,37 @@ public final class OracleTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OracleTableDataset model = BinaryData.fromString( - "{\"type\":\"OracleTable\",\"typeProperties\":{\"tableName\":\"datanaphifkfrpmpl\",\"schema\":\"datap\",\"table\":\"datarmj\"},\"description\":\"fpghtbttpkim\",\"structure\":\"datahnkkhbykrs\",\"schema\":\"datarcmelycpgokut\",\"linkedServiceName\":{\"referenceName\":\"rvybnz\",\"parameters\":{\"ixlvzcgul\":\"datamshfuzzlap\",\"wjt\":\"dataebxiauqsuptessj\",\"skxgxqaygas\":\"datatpvb\",\"wpvlcjbvyezjwjkq\":\"datakvc\"}},\"parameters\":{\"fpucwn\":{\"type\":\"Bool\",\"defaultValue\":\"dataiieyozvrc\"}},\"annotations\":[\"dataqefgzjvbx\",\"datacbgoarxtuuciagv\",\"datadlhuduklbjo\",\"datafmjfexulv\"],\"folder\":{\"name\":\"kna\"},\"\":{\"leqfgkxenvszg\":\"dataiancsqoacbuqdgsa\",\"eszsuuv\":\"datavya\",\"brveci\":\"datalaqcwggchxvlqg\"}}") + "{\"type\":\"jwqdmr\",\"typeProperties\":{\"tableName\":\"dataaqgblkkncyp\",\"schema\":\"dataevspsaney\",\"table\":\"dataerpiobnhrfbrj\"},\"description\":\"nilppqcai\",\"structure\":\"datazwfwlrfdjwlzseod\",\"schema\":\"datafdrslzymqx\",\"linkedServiceName\":{\"referenceName\":\"erwycuhytjwgetfi\",\"parameters\":{\"stubw\":\"dataeqjxzizebjrahg\"}},\"parameters\":{\"xliqmsck\":{\"type\":\"Bool\",\"defaultValue\":\"datas\"},\"rsofxcacr\":{\"type\":\"Array\",\"defaultValue\":\"datamdoiiyobqzwjal\"},\"lsxfk\":{\"type\":\"Bool\",\"defaultValue\":\"datafmrsuydldpr\"},\"rjwbuocqflm\":{\"type\":\"Array\",\"defaultValue\":\"datatirjvqxvwkiocxo\"}},\"annotations\":[\"datalqxbctatezyo\",\"datadbcqq\",\"datalsjxcscnito\"],\"folder\":{\"name\":\"ahjjido\"},\"\":{\"i\":\"dataltcvmahpuwkupbbn\",\"zhrcqdfwbif\":\"datalh\",\"ynb\":\"datanhlsforsimtfcqm\",\"zamadlerzi\":\"datapelpfijtezgxmpe\"}}") .toObject(OracleTableDataset.class); - Assertions.assertEquals("fpghtbttpkim", model.description()); - Assertions.assertEquals("rvybnz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("fpucwn").type()); - Assertions.assertEquals("kna", model.folder().name()); + Assertions.assertEquals("nilppqcai", model.description()); + Assertions.assertEquals("erwycuhytjwgetfi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("xliqmsck").type()); + Assertions.assertEquals("ahjjido", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OracleTableDataset model = new OracleTableDataset().withDescription("fpghtbttpkim") - .withStructure("datahnkkhbykrs").withSchema("datarcmelycpgokut") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rvybnz") - .withParameters(mapOf("ixlvzcgul", "datamshfuzzlap", "wjt", "dataebxiauqsuptessj", "skxgxqaygas", - "datatpvb", "wpvlcjbvyezjwjkq", "datakvc"))) - .withParameters(mapOf("fpucwn", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataiieyozvrc"))) - .withAnnotations(Arrays.asList("dataqefgzjvbx", "datacbgoarxtuuciagv", "datadlhuduklbjo", "datafmjfexulv")) - .withFolder(new DatasetFolder().withName("kna")).withTableName("datanaphifkfrpmpl") - .withSchemaTypePropertiesSchema("datap").withTable("datarmj"); + OracleTableDataset model = new OracleTableDataset().withDescription("nilppqcai") + .withStructure("datazwfwlrfdjwlzseod") + .withSchema("datafdrslzymqx") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("erwycuhytjwgetfi") + .withParameters(mapOf("stubw", "dataeqjxzizebjrahg"))) + .withParameters(mapOf("xliqmsck", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datas"), "rsofxcacr", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datamdoiiyobqzwjal"), + "lsxfk", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datafmrsuydldpr"), + "rjwbuocqflm", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datatirjvqxvwkiocxo"))) + .withAnnotations(Arrays.asList("datalqxbctatezyo", "datadbcqq", "datalsjxcscnito")) + .withFolder(new DatasetFolder().withName("ahjjido")) + .withTableName("dataaqgblkkncyp") + .withSchemaTypePropertiesSchema("dataevspsaney") + .withTable("dataerpiobnhrfbrj"); model = BinaryData.fromObject(model).toObject(OracleTableDataset.class); - Assertions.assertEquals("fpghtbttpkim", model.description()); - Assertions.assertEquals("rvybnz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("fpucwn").type()); - Assertions.assertEquals("kna", model.folder().name()); + Assertions.assertEquals("nilppqcai", model.description()); + Assertions.assertEquals("erwycuhytjwgetfi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("xliqmsck").type()); + Assertions.assertEquals("ahjjido", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTypePropertiesTests.java index af450a8677425..a7da641fbd66e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTypePropertiesTests.java @@ -10,15 +10,15 @@ public final class OracleTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - OracleTableDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"dataov\",\"schema\":\"datairlzbipiunn\",\"table\":\"datakwzzzkueruwc\"}") - .toObject(OracleTableDatasetTypeProperties.class); + OracleTableDatasetTypeProperties model + = BinaryData.fromString("{\"tableName\":\"dataui\",\"schema\":\"dataz\",\"table\":\"datallxsw\"}") + .toObject(OracleTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OracleTableDatasetTypeProperties model = new OracleTableDatasetTypeProperties().withTableName("dataov") - .withSchema("datairlzbipiunn").withTable("datakwzzzkueruwc"); + OracleTableDatasetTypeProperties model + = new OracleTableDatasetTypeProperties().withTableName("dataui").withSchema("dataz").withTable("datallxsw"); model = BinaryData.fromObject(model).toObject(OracleTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcFormatTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcFormatTests.java index 8d8101dcff9a5..53925d38cda13 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcFormatTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcFormatTests.java @@ -11,13 +11,13 @@ public final class OrcFormatTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OrcFormat model = BinaryData.fromString( - "{\"type\":\"OrcFormat\",\"serializer\":\"dataxp\",\"deserializer\":\"dataiwifhbk\",\"\":{\"h\":\"datattohqclna\",\"tpmglxkoikmtr\":\"datagsiqikvllr\"}}") + "{\"type\":\"pt\",\"serializer\":\"dataipuugkwdrqmr\",\"deserializer\":\"datahuuonjkkxukg\",\"\":{\"jojlle\":\"datavvpxjoeg\",\"hgnhtmeplhbpjba\":\"dataidpmton\"}}") .toObject(OrcFormat.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OrcFormat model = new OrcFormat().withSerializer("dataxp").withDeserializer("dataiwifhbk"); + OrcFormat model = new OrcFormat().withSerializer("dataipuugkwdrqmr").withDeserializer("datahuuonjkkxukg"); model = BinaryData.fromObject(model).toObject(OrcFormat.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSinkTests.java index b5c361f8efa99..8e3b52312b7ca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSinkTests.java @@ -17,21 +17,26 @@ public final class OrcSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OrcSink model = BinaryData.fromString( - "{\"type\":\"OrcSink\",\"storeSettings\":{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"dataivrhjxdn\",\"disableMetricsCollection\":\"dataztf\",\"copyBehavior\":\"datauzvbrehdtqggza\",\"metadata\":[{\"name\":\"datarseiidfpw\",\"value\":\"databmxf\"},{\"name\":\"datazgolfensibqi\",\"value\":\"datapyjzv\"}],\"\":{\"vz\":\"datalfs\"}},\"formatSettings\":{\"type\":\"OrcWriteSettings\",\"maxRowsPerFile\":\"datavwr\",\"fileNamePrefix\":\"datagalywgq\",\"\":{\"csktvkwb\":\"databobheyxe\"}},\"writeBatchSize\":\"datakfvvxiikrja\",\"writeBatchTimeout\":\"datatvnmr\",\"sinkRetryCount\":\"datatypuotmkbofuh\",\"sinkRetryWait\":\"dataksgou\",\"maxConcurrentConnections\":\"dataegtn\",\"disableMetricsCollection\":\"datanotrgyyje\",\"\":{\"ufegbvvkuz\":\"datavjdunbaets\",\"mhzpurnp\":\"datagzrzubdtzsac\",\"albx\":\"datakbxkzcfios\",\"nluvcwuafbhxoa\":\"datad\"}}") + "{\"type\":\"xljsbtosiwc\",\"storeSettings\":{\"type\":\"lyhgiisn\",\"maxConcurrentConnections\":\"datax\",\"disableMetricsCollection\":\"databo\",\"copyBehavior\":\"datapyilojwcza\",\"metadata\":[{\"name\":\"dataauskishhm\",\"value\":\"datapgrkd\"},{\"name\":\"datazaunbwcqti\",\"value\":\"datavzds\"}],\"\":{\"pplrdkcazujvh\":\"datahjlploa\",\"elaaexcnxrtlnzd\":\"datauzbsxhivnc\"}},\"formatSettings\":{\"type\":\"aboozxkdzmtkmn\",\"maxRowsPerFile\":\"datammzisljxphwy\",\"fileNamePrefix\":\"datamcpfrakucgjreoac\",\"\":{\"kyet\":\"datademrclsxg\",\"yflke\":\"datatkhihixisd\"}},\"writeBatchSize\":\"dataqgehbw\",\"writeBatchTimeout\":\"dataoc\",\"sinkRetryCount\":\"datazlfhhwdajfth\",\"sinkRetryWait\":\"datawuomjd\",\"maxConcurrentConnections\":\"dataufqhq\",\"disableMetricsCollection\":\"dataio\",\"\":{\"pyemppwkryzrdqpk\":\"dataxxbneiobub\",\"myhjgmfyernckg\":\"datafuvhoiqqzb\",\"kafiokeolzizfbun\":\"datawiq\",\"muwl\":\"datamxhfttmj\"}}") .toObject(OrcSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OrcSink model = new OrcSink().withWriteBatchSize("datakfvvxiikrja").withWriteBatchTimeout("datatvnmr") - .withSinkRetryCount("datatypuotmkbofuh").withSinkRetryWait("dataksgou") - .withMaxConcurrentConnections("dataegtn").withDisableMetricsCollection("datanotrgyyje") - .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("dataivrhjxdn") - .withDisableMetricsCollection("dataztf").withCopyBehavior("datauzvbrehdtqggza") - .withMetadata(Arrays.asList(new MetadataItem().withName("datarseiidfpw").withValue("databmxf"), - new MetadataItem().withName("datazgolfensibqi").withValue("datapyjzv"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings"))) - .withFormatSettings(new OrcWriteSettings().withMaxRowsPerFile("datavwr").withFileNamePrefix("datagalywgq")); + OrcSink model = new OrcSink().withWriteBatchSize("dataqgehbw") + .withWriteBatchTimeout("dataoc") + .withSinkRetryCount("datazlfhhwdajfth") + .withSinkRetryWait("datawuomjd") + .withMaxConcurrentConnections("dataufqhq") + .withDisableMetricsCollection("dataio") + .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("datax") + .withDisableMetricsCollection("databo") + .withCopyBehavior("datapyilojwcza") + .withMetadata(Arrays.asList(new MetadataItem().withName("dataauskishhm").withValue("datapgrkd"), + new MetadataItem().withName("datazaunbwcqti").withValue("datavzds"))) + .withAdditionalProperties(mapOf("type", "lyhgiisn"))) + .withFormatSettings(new OrcWriteSettings().withMaxRowsPerFile("datammzisljxphwy") + .withFileNamePrefix("datamcpfrakucgjreoac")); model = BinaryData.fromObject(model).toObject(OrcSink.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSourceTests.java index 4b6a24e43a960..a48781b884d3e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSourceTests.java @@ -14,18 +14,20 @@ public final class OrcSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OrcSource model = BinaryData.fromString( - "{\"type\":\"OrcSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"dataaaepxlxbofdchbo\",\"disableMetricsCollection\":\"datafsk\",\"\":{\"tutqjs\":\"datas\",\"oixtrnakytzcma\":\"dataj\",\"l\":\"datav\"}},\"additionalColumns\":\"dataaarqhpxwqhzsxgmg\",\"sourceRetryCount\":\"datagmtywivbu\",\"sourceRetryWait\":\"dataeedjnklvbrsxykwb\",\"maxConcurrentConnections\":\"datasd\",\"disableMetricsCollection\":\"datajitlqxpsnnnxhgd\",\"\":{\"dbg\":\"dataxyl\",\"zfzkhdnp\":\"dataicjkq\",\"vquasvywkbiek\":\"datamrxjdfkqlkaipf\"}}") + "{\"type\":\"yrdlvbomhfqsjz\",\"storeSettings\":{\"type\":\"ygkuidgwdhawjco\",\"maxConcurrentConnections\":\"datanmthxcmx\",\"disableMetricsCollection\":\"dataexn\",\"\":{\"pnjgiumuztbcjt\":\"dataoxcmsmzy\",\"ehmvrveurpzrysef\":\"datamcnrgwgcstozrv\"}},\"additionalColumns\":\"datachkkwah\",\"sourceRetryCount\":\"dataktk\",\"sourceRetryWait\":\"dataxtee\",\"maxConcurrentConnections\":\"datahxgnlpjytle\",\"disableMetricsCollection\":\"datamijhnjk\",\"\":{\"nkzbdeyhw\":\"datahhuwz\",\"jmmdmbylyndtq\":\"databhobdocfv\"}}") .toObject(OrcSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - OrcSource model = new OrcSource().withSourceRetryCount("datagmtywivbu") - .withSourceRetryWait("dataeedjnklvbrsxykwb").withMaxConcurrentConnections("datasd") - .withDisableMetricsCollection("datajitlqxpsnnnxhgd") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataaaepxlxbofdchbo") - .withDisableMetricsCollection("datafsk").withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withAdditionalColumns("dataaarqhpxwqhzsxgmg"); + OrcSource model = new OrcSource().withSourceRetryCount("dataktk") + .withSourceRetryWait("dataxtee") + .withMaxConcurrentConnections("datahxgnlpjytle") + .withDisableMetricsCollection("datamijhnjk") + .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datanmthxcmx") + .withDisableMetricsCollection("dataexn") + .withAdditionalProperties(mapOf("type", "ygkuidgwdhawjco"))) + .withAdditionalColumns("datachkkwah"); model = BinaryData.fromObject(model).toObject(OrcSource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcWriteSettingsTests.java index 992ce1c024b26..33ee234ef8531 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcWriteSettingsTests.java @@ -11,14 +11,14 @@ public final class OrcWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { OrcWriteSettings model = BinaryData.fromString( - "{\"type\":\"OrcWriteSettings\",\"maxRowsPerFile\":\"datakinmxanjg\",\"fileNamePrefix\":\"datadhipgfx\",\"\":{\"cshmqxgjzslhopyq\":\"dataaazeqjnouuujlit\"}}") + "{\"type\":\"cslzca\",\"maxRowsPerFile\":\"databbfweozkbok\",\"fileNamePrefix\":\"datasu\",\"\":{\"bsg\":\"datacvfwkpu\"}}") .toObject(OrcWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { OrcWriteSettings model - = new OrcWriteSettings().withMaxRowsPerFile("datakinmxanjg").withFileNamePrefix("datadhipgfx"); + = new OrcWriteSettings().withMaxRowsPerFile("databbfweozkbok").withFileNamePrefix("datasu"); model = BinaryData.fromObject(model).toObject(OrcWriteSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PackageStoreTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PackageStoreTests.java index 8e1dd61289254..294937cb41464 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PackageStoreTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PackageStoreTests.java @@ -14,23 +14,24 @@ public final class PackageStoreTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PackageStore model = BinaryData.fromString( - "{\"name\":\"zkifqbxmnnid\",\"packageStoreLinkedService\":{\"type\":\"IntegrationRuntimeReference\",\"referenceName\":\"um\"}}") + "{\"name\":\"zorprraptvbj\",\"packageStoreLinkedService\":{\"type\":\"IntegrationRuntimeReference\",\"referenceName\":\"gebrsnvofu\"}}") .toObject(PackageStore.class); - Assertions.assertEquals("zkifqbxmnnid", model.name()); + Assertions.assertEquals("zorprraptvbj", model.name()); Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.INTEGRATION_RUNTIME_REFERENCE, model.packageStoreLinkedService().type()); - Assertions.assertEquals("um", model.packageStoreLinkedService().referenceName()); + Assertions.assertEquals("gebrsnvofu", model.packageStoreLinkedService().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PackageStore model - = new PackageStore().withName("zkifqbxmnnid").withPackageStoreLinkedService(new EntityReference() - .withType(IntegrationRuntimeEntityReferenceType.INTEGRATION_RUNTIME_REFERENCE).withReferenceName("um")); + PackageStore model = new PackageStore().withName("zorprraptvbj") + .withPackageStoreLinkedService( + new EntityReference().withType(IntegrationRuntimeEntityReferenceType.INTEGRATION_RUNTIME_REFERENCE) + .withReferenceName("gebrsnvofu")); model = BinaryData.fromObject(model).toObject(PackageStore.class); - Assertions.assertEquals("zkifqbxmnnid", model.name()); + Assertions.assertEquals("zorprraptvbj", model.name()); Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.INTEGRATION_RUNTIME_REFERENCE, model.packageStoreLinkedService().type()); - Assertions.assertEquals("um", model.packageStoreLinkedService().referenceName()); + Assertions.assertEquals("gebrsnvofu", model.packageStoreLinkedService().referenceName()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParameterSpecificationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParameterSpecificationTests.java index 50dfe668900f8..716827cf5a9d9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParameterSpecificationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParameterSpecificationTests.java @@ -12,16 +12,17 @@ public final class ParameterSpecificationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - ParameterSpecification model = BinaryData.fromString("{\"type\":\"Int\",\"defaultValue\":\"datasmond\"}") - .toObject(ParameterSpecification.class); - Assertions.assertEquals(ParameterType.INT, model.type()); + ParameterSpecification model + = BinaryData.fromString("{\"type\":\"Float\",\"defaultValue\":\"dataoievseotgqrlltm\"}") + .toObject(ParameterSpecification.class); + Assertions.assertEquals(ParameterType.FLOAT, model.type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ParameterSpecification model - = new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datasmond"); + = new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataoievseotgqrlltm"); model = BinaryData.fromObject(model).toObject(ParameterSpecification.class); - Assertions.assertEquals(ParameterType.INT, model.type()); + Assertions.assertEquals(ParameterType.FLOAT, model.type()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetFormatTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetFormatTests.java index fb4fdfdf444cf..530967ad7f79e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetFormatTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetFormatTests.java @@ -11,13 +11,13 @@ public final class ParquetFormatTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ParquetFormat model = BinaryData.fromString( - "{\"type\":\"ParquetFormat\",\"serializer\":\"datarqkzawbunmpak\",\"deserializer\":\"dataz\",\"\":{\"ejzltka\":\"dataepr\",\"xapgrcqebmvrdjom\":\"datazfjsxscbd\",\"vdlsflxkqesdfeds\":\"datanwsb\"}}") + "{\"type\":\"iqpmnufz\",\"serializer\":\"datachxwwuzdmh\",\"deserializer\":\"datalvivj\",\"\":{\"cotyx\":\"datalitqdsjipdvi\",\"s\":\"datariifefn\"}}") .toObject(ParquetFormat.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ParquetFormat model = new ParquetFormat().withSerializer("datarqkzawbunmpak").withDeserializer("dataz"); + ParquetFormat model = new ParquetFormat().withSerializer("datachxwwuzdmh").withDeserializer("datalvivj"); model = BinaryData.fromObject(model).toObject(ParquetFormat.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetReadSettingsTests.java index 5dbd8133dab64..194683755a579 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetReadSettingsTests.java @@ -14,14 +14,14 @@ public final class ParquetReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ParquetReadSettings model = BinaryData.fromString( - "{\"type\":\"ParquetReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"owuicvj\":\"datamveyrcikedmoufju\",\"rilwkcgusvp\":\"datajszmleuqxh\"}},\"\":{\"vgwqiwodhasl\":\"dataulxxznfxdqqzi\"}}") + "{\"type\":\"ift\",\"compressionProperties\":{\"type\":\"ofts\",\"\":{\"vjemp\":\"datawusfbrnjvzly\",\"wzntbi\":\"dataubs\",\"fg\":\"datau\"}},\"\":{\"hcfndijz\":\"datapuukpswwutdu\",\"mxtijssytdp\":\"datavckhbu\"}}") .toObject(ParquetReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ParquetReadSettings model = new ParquetReadSettings().withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings"))); + ParquetReadSettings model = new ParquetReadSettings() + .withCompressionProperties(new CompressionReadSettings().withAdditionalProperties(mapOf("type", "ofts"))); model = BinaryData.fromObject(model).toObject(ParquetReadSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSinkTests.java index a3cfd641b64ab..c46d5a08ca7d9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSinkTests.java @@ -17,22 +17,26 @@ public final class ParquetSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ParquetSink model = BinaryData.fromString( - "{\"type\":\"ParquetSink\",\"storeSettings\":{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"dataajqnsrcqd\",\"disableMetricsCollection\":\"datamlqamd\",\"copyBehavior\":\"dataukdmrv\",\"metadata\":[{\"name\":\"datacclsnprdartq\",\"value\":\"databbxexacgmtpkx\"}],\"\":{\"bmujlsztpygq\":\"databovexsnmww\",\"sn\":\"datakdl\",\"mksfejzmyvlbz\":\"datamkc\"}},\"formatSettings\":{\"type\":\"ParquetWriteSettings\",\"maxRowsPerFile\":\"dataxzpdnb\",\"fileNamePrefix\":\"datavhddvtnbtv\",\"\":{\"myizdglzzaufi\":\"datajfkaoew\",\"ykng\":\"datawvyxy\"}},\"writeBatchSize\":\"datajgpyvjg\",\"writeBatchTimeout\":\"datayjnh\",\"sinkRetryCount\":\"databhwrncxwzuer\",\"sinkRetryWait\":\"datapa\",\"maxConcurrentConnections\":\"datapiniopbfkmfbru\",\"disableMetricsCollection\":\"datayl\",\"\":{\"fsol\":\"dataovnlb\",\"lnhxr\":\"datarqu\",\"l\":\"datajshicvrmwbgpc\",\"pboaevtxi\":\"databxppvpgsrfshkjg\"}}") + "{\"type\":\"jzf\",\"storeSettings\":{\"type\":\"vshg\",\"maxConcurrentConnections\":\"dataukaswgvoa\",\"disableMetricsCollection\":\"datatdt\",\"copyBehavior\":\"dataoafhhiyk\",\"metadata\":[{\"name\":\"dataebcuynq\",\"value\":\"dataiptefdvjgbemr\"},{\"name\":\"dataovquxpdprjethyh\",\"value\":\"dataoyexuivdrzxob\"}],\"\":{\"q\":\"datalg\",\"xwmwrjm\":\"dataiblaumogu\"}},\"formatSettings\":{\"type\":\"pxaps\",\"maxRowsPerFile\":\"datatyvra\",\"fileNamePrefix\":\"dataqbeqz\",\"\":{\"gouwefzdnyg\":\"dataygajqmpf\",\"qxjpzykkwk\":\"datathvlwhrjakd\",\"sdg\":\"datakkpbybhqwil\",\"kir\":\"datahe\"}},\"writeBatchSize\":\"datan\",\"writeBatchTimeout\":\"datahbcurthmbgav\",\"sinkRetryCount\":\"dataqjetoaijayvu\",\"sinkRetryWait\":\"dataibfnqjcx\",\"maxConcurrentConnections\":\"datadeqwxivjhmldvnox\",\"disableMetricsCollection\":\"datahltxtpgqqinkktay\",\"\":{\"wawblkkc\":\"datab\",\"ncqqhbjmvbe\":\"dataixsgklxgsqhczok\",\"ejnwwqyyfctfs\":\"datanlukeqzcbqv\",\"tnuzorx\":\"datahmrughmrybbh\"}}") .toObject(ParquetSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ParquetSink model = new ParquetSink().withWriteBatchSize("datajgpyvjg").withWriteBatchTimeout("datayjnh") - .withSinkRetryCount("databhwrncxwzuer").withSinkRetryWait("datapa") - .withMaxConcurrentConnections("datapiniopbfkmfbru").withDisableMetricsCollection("datayl") - .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("dataajqnsrcqd") - .withDisableMetricsCollection("datamlqamd").withCopyBehavior("dataukdmrv") - .withMetadata( - Arrays.asList(new MetadataItem().withName("datacclsnprdartq").withValue("databbxexacgmtpkx"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings"))) + ParquetSink model = new ParquetSink().withWriteBatchSize("datan") + .withWriteBatchTimeout("datahbcurthmbgav") + .withSinkRetryCount("dataqjetoaijayvu") + .withSinkRetryWait("dataibfnqjcx") + .withMaxConcurrentConnections("datadeqwxivjhmldvnox") + .withDisableMetricsCollection("datahltxtpgqqinkktay") + .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("dataukaswgvoa") + .withDisableMetricsCollection("datatdt") + .withCopyBehavior("dataoafhhiyk") + .withMetadata(Arrays.asList(new MetadataItem().withName("dataebcuynq").withValue("dataiptefdvjgbemr"), + new MetadataItem().withName("dataovquxpdprjethyh").withValue("dataoyexuivdrzxob"))) + .withAdditionalProperties(mapOf("type", "vshg"))) .withFormatSettings( - new ParquetWriteSettings().withMaxRowsPerFile("dataxzpdnb").withFileNamePrefix("datavhddvtnbtv")); + new ParquetWriteSettings().withMaxRowsPerFile("datatyvra").withFileNamePrefix("dataqbeqz")); model = BinaryData.fromObject(model).toObject(ParquetSink.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSourceTests.java index b817a9686662e..9a929a247934b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSourceTests.java @@ -16,20 +16,22 @@ public final class ParquetSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ParquetSource model = BinaryData.fromString( - "{\"type\":\"ParquetSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"dataqnkoskflnjaysr\",\"disableMetricsCollection\":\"dataevmbhemrhb\",\"\":{\"botbrepefrli\":\"datasspwwe\",\"zgcscootfsgilwis\":\"dataeocyarvs\",\"tust\":\"dataxzpz\"}},\"formatSettings\":{\"type\":\"ParquetReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"zbirb\":\"dataogknocshmpcjqt\"}},\"\":{\"rhccdgunsjssreo\":\"datahfjqpxydpamctzm\",\"ppbghyekggoaw\":\"datasgkouenpgkxyr\"}},\"additionalColumns\":\"datavu\",\"sourceRetryCount\":\"dataqmwqsgy\",\"sourceRetryWait\":\"datanostn\",\"maxConcurrentConnections\":\"datavrpkizj\",\"disableMetricsCollection\":\"datagdsur\",\"\":{\"voxshxumuuyblolr\":\"databcirkbkqp\"}}") + "{\"type\":\"mnkvpafoe\",\"storeSettings\":{\"type\":\"lrjdkyp\",\"maxConcurrentConnections\":\"datavilgn\",\"disableMetricsCollection\":\"datatjbldgikokjwgej\",\"\":{\"gsyszdtgw\":\"datazezwnqhcpk\",\"prourtmccd\":\"dataqcutk\",\"dcvlbxr\":\"datajtoypluxvjutckfh\",\"mzcxlgmuhxw\":\"datazvujp\"}},\"formatSettings\":{\"type\":\"q\",\"compressionProperties\":{\"type\":\"jczpfoispc\",\"\":{\"hazmcuggtsovozy\":\"datavmvsbgyq\",\"ugubob\":\"datapkrncjrq\",\"qcyhfubz\":\"dataqnwhcmvdow\"}},\"\":{\"fhjefiu\":\"datarawbftznxf\",\"bkdieismdkvfim\":\"databrnrilljucod\"}},\"additionalColumns\":\"datacij\",\"sourceRetryCount\":\"datacsk\",\"sourceRetryWait\":\"datawzmji\",\"maxConcurrentConnections\":\"dataqyllcckgfo\",\"disableMetricsCollection\":\"datarbfyjmenq\",\"\":{\"qtvsfsvq\":\"dataf\",\"hdaczixf\":\"datazgaweixnoblaz\",\"s\":\"datauw\"}}") .toObject(ParquetSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ParquetSource model = new ParquetSource().withSourceRetryCount("dataqmwqsgy").withSourceRetryWait("datanostn") - .withMaxConcurrentConnections("datavrpkizj").withDisableMetricsCollection("datagdsur") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataqnkoskflnjaysr") - .withDisableMetricsCollection("dataevmbhemrhb") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) + ParquetSource model = new ParquetSource().withSourceRetryCount("datacsk") + .withSourceRetryWait("datawzmji") + .withMaxConcurrentConnections("dataqyllcckgfo") + .withDisableMetricsCollection("datarbfyjmenq") + .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datavilgn") + .withDisableMetricsCollection("datatjbldgikokjwgej") + .withAdditionalProperties(mapOf("type", "lrjdkyp"))) .withFormatSettings(new ParquetReadSettings().withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings")))) - .withAdditionalColumns("datavu"); + new CompressionReadSettings().withAdditionalProperties(mapOf("type", "jczpfoispc")))) + .withAdditionalColumns("datacij"); model = BinaryData.fromObject(model).toObject(ParquetSource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetWriteSettingsTests.java index a08157217ab5c..1b9781f84d024 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetWriteSettingsTests.java @@ -11,14 +11,14 @@ public final class ParquetWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ParquetWriteSettings model = BinaryData.fromString( - "{\"type\":\"ParquetWriteSettings\",\"maxRowsPerFile\":\"datapbxspvkcng\",\"fileNamePrefix\":\"datacnwn\",\"\":{\"bklkhhjx\":\"dataitlam\",\"hqzvwznwcqoapdtj\":\"datawqzsyetbff\"}}") + "{\"type\":\"tcistd\",\"maxRowsPerFile\":\"dataiklx\",\"fileNamePrefix\":\"datastun\",\"\":{\"azwxdfaask\":\"datakbuajkodpzqtg\",\"ncjwzeatezlt\":\"dataasjbuhzucdljqjs\",\"hxivshjuxmcj\":\"dataundkjphv\"}}") .toObject(ParquetWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ParquetWriteSettings model - = new ParquetWriteSettings().withMaxRowsPerFile("datapbxspvkcng").withFileNamePrefix("datacnwn"); + = new ParquetWriteSettings().withMaxRowsPerFile("dataiklx").withFileNamePrefix("datastun"); model = BinaryData.fromObject(model).toObject(ParquetWriteSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalObjectDatasetTests.java index e998eef43e2e0..8ce864643627c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalObjectDatasetTests.java @@ -19,34 +19,35 @@ public final class PaypalObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PaypalObjectDataset model = BinaryData.fromString( - "{\"type\":\"PaypalObject\",\"typeProperties\":{\"tableName\":\"databb\"},\"description\":\"yp\",\"structure\":\"dataodaq\",\"schema\":\"datakp\",\"linkedServiceName\":{\"referenceName\":\"zf\",\"parameters\":{\"valcrqaxlmbrtvtg\":\"databg\"}},\"parameters\":{\"voysxa\":{\"type\":\"Object\",\"defaultValue\":\"datalgtlayyxhx\"},\"njc\":{\"type\":\"SecureString\",\"defaultValue\":\"datahdfhfaob\"},\"ydvwr\":{\"type\":\"SecureString\",\"defaultValue\":\"datazvcdqws\"},\"dlxbaeyocpkv\":{\"type\":\"Int\",\"defaultValue\":\"datavywotjnjuvtzij\"}},\"annotations\":[\"datafdz\",\"datamnpbdrcibj\",\"datannno\",\"datatnhvdtu\"],\"folder\":{\"name\":\"qobqehspshtisy\"},\"\":{\"zeb\":\"dataoctrzjwnzwc\",\"lmlnxrcatkuh\":\"databvwdxgyypmxq\",\"gdkvviilyeshoxf\":\"datak\"}}") + "{\"type\":\"ryhhe\",\"typeProperties\":{\"tableName\":\"datadc\"},\"description\":\"zhhllx\",\"structure\":\"datan\",\"schema\":\"dataxdjklfsd\",\"linkedServiceName\":{\"referenceName\":\"tkqiymmddslwnlg\",\"parameters\":{\"pybnnnlpqdn\":\"datahmk\",\"atupmrslwknrdvvm\":\"datas\",\"kdeetnne\":\"datajerndzzywxqraqx\"}},\"parameters\":{\"uxjmrzsxwasfwqj\":{\"type\":\"Float\",\"defaultValue\":\"dataks\"},\"r\":{\"type\":\"Int\",\"defaultValue\":\"datamfqdnppfcfgua\"},\"kgdwpj\":{\"type\":\"Int\",\"defaultValue\":\"datau\"}},\"annotations\":[\"dataukgctvnspjvsyydj\",\"datahdukcsqvyeegxhu\",\"dataojwumfjdymeqv\"],\"folder\":{\"name\":\"fyxdjspnonxem\"},\"\":{\"twvgc\":\"datazhn\",\"ypmjc\":\"datacvdjlwwefevtwll\",\"fwgkzuhk\":\"datay\",\"kckwbqwjyfmmkwa\":\"datahx\"}}") .toObject(PaypalObjectDataset.class); - Assertions.assertEquals("yp", model.description()); - Assertions.assertEquals("zf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("voysxa").type()); - Assertions.assertEquals("qobqehspshtisy", model.folder().name()); + Assertions.assertEquals("zhhllx", model.description()); + Assertions.assertEquals("tkqiymmddslwnlg", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("uxjmrzsxwasfwqj").type()); + Assertions.assertEquals("fyxdjspnonxem", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PaypalObjectDataset model = new PaypalObjectDataset().withDescription("yp").withStructure("dataodaq") - .withSchema("datakp") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zf") - .withParameters(mapOf("valcrqaxlmbrtvtg", "databg"))) - .withParameters(mapOf("voysxa", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datalgtlayyxhx"), "njc", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datahdfhfaob"), - "ydvwr", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datazvcdqws"), - "dlxbaeyocpkv", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datavywotjnjuvtzij"))) - .withAnnotations(Arrays.asList("datafdz", "datamnpbdrcibj", "datannno", "datatnhvdtu")) - .withFolder(new DatasetFolder().withName("qobqehspshtisy")).withTableName("databb"); + PaypalObjectDataset model + = new PaypalObjectDataset().withDescription("zhhllx") + .withStructure("datan") + .withSchema("dataxdjklfsd") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("tkqiymmddslwnlg") + .withParameters(mapOf("pybnnnlpqdn", "datahmk", "atupmrslwknrdvvm", "datas", "kdeetnne", + "datajerndzzywxqraqx"))) + .withParameters(mapOf("uxjmrzsxwasfwqj", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataks"), "r", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datamfqdnppfcfgua"), + "kgdwpj", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datau"))) + .withAnnotations(Arrays.asList("dataukgctvnspjvsyydj", "datahdukcsqvyeegxhu", "dataojwumfjdymeqv")) + .withFolder(new DatasetFolder().withName("fyxdjspnonxem")) + .withTableName("datadc"); model = BinaryData.fromObject(model).toObject(PaypalObjectDataset.class); - Assertions.assertEquals("yp", model.description()); - Assertions.assertEquals("zf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("voysxa").type()); - Assertions.assertEquals("qobqehspshtisy", model.folder().name()); + Assertions.assertEquals("zhhllx", model.description()); + Assertions.assertEquals("tkqiymmddslwnlg", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("uxjmrzsxwasfwqj").type()); + Assertions.assertEquals("fyxdjspnonxem", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalSourceTests.java index 5aa4c901a073c..cf6eb307817e5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalSourceTests.java @@ -11,16 +11,19 @@ public final class PaypalSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PaypalSource model = BinaryData.fromString( - "{\"type\":\"PaypalSource\",\"query\":\"datajjprd\",\"queryTimeout\":\"datablonlhtgexwjhicu\",\"additionalColumns\":\"dataavimxnhylwogtvl\",\"sourceRetryCount\":\"datagd\",\"sourceRetryWait\":\"datat\",\"maxConcurrentConnections\":\"datadxlfn\",\"disableMetricsCollection\":\"dataclkmggnzlfyxaiaf\",\"\":{\"uoayapzzcxkuusba\":\"dataxekfvycvhw\",\"yak\":\"datacassqeybdnz\",\"zkicxtumqinawct\":\"datarkohfqm\",\"kjnpe\":\"dataarboxaluoadmcv\"}}") + "{\"type\":\"wld\",\"query\":\"datadeqqbdcbnr\",\"queryTimeout\":\"datatgtn\",\"additionalColumns\":\"datasopuwesmxodyto\",\"sourceRetryCount\":\"datan\",\"sourceRetryWait\":\"datasdgmuaqtqn\",\"maxConcurrentConnections\":\"datasiptzgomujuken\",\"disableMetricsCollection\":\"datayombkgky\",\"\":{\"kzxrmmoy\":\"dataihprvokodrpy\",\"xoubekafdxgtgcfk\":\"dataufkxy\",\"m\":\"dataaeu\"}}") .toObject(PaypalSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PaypalSource model = new PaypalSource().withSourceRetryCount("datagd").withSourceRetryWait("datat") - .withMaxConcurrentConnections("datadxlfn").withDisableMetricsCollection("dataclkmggnzlfyxaiaf") - .withQueryTimeout("datablonlhtgexwjhicu").withAdditionalColumns("dataavimxnhylwogtvl") - .withQuery("datajjprd"); + PaypalSource model = new PaypalSource().withSourceRetryCount("datan") + .withSourceRetryWait("datasdgmuaqtqn") + .withMaxConcurrentConnections("datasiptzgomujuken") + .withDisableMetricsCollection("datayombkgky") + .withQueryTimeout("datatgtn") + .withAdditionalColumns("datasopuwesmxodyto") + .withQuery("datadeqqbdcbnr"); model = BinaryData.fromObject(model).toObject(PaypalSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixDatasetTypePropertiesTests.java index 19fd2c28f66f8..ce1620c83f35b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class PhoenixDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PhoenixDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"datambdqra\",\"table\":\"datad\",\"schema\":\"dataluobbva\"}") + = BinaryData.fromString("{\"tableName\":\"datah\",\"table\":\"dataeinvzsodm\",\"schema\":\"datarq\"}") .toObject(PhoenixDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PhoenixDatasetTypeProperties model = new PhoenixDatasetTypeProperties().withTableName("datambdqra") - .withTable("datad").withSchema("dataluobbva"); + PhoenixDatasetTypeProperties model + = new PhoenixDatasetTypeProperties().withTableName("datah").withTable("dataeinvzsodm").withSchema("datarq"); model = BinaryData.fromObject(model).toObject(PhoenixDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixObjectDatasetTests.java index be3b7200c4872..fae167bfc0af1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixObjectDatasetTests.java @@ -19,35 +19,37 @@ public final class PhoenixObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PhoenixObjectDataset model = BinaryData.fromString( - "{\"type\":\"PhoenixObject\",\"typeProperties\":{\"tableName\":\"datadmupbusxyug\",\"table\":\"datawplx\",\"schema\":\"datazu\"},\"description\":\"otiixkkbygbgiq\",\"structure\":\"datayshybb\",\"schema\":\"datattyizonzsurqcoja\",\"linkedServiceName\":{\"referenceName\":\"fzhzzcarciuo\",\"parameters\":{\"hwsgnsputfe\":\"datapdthjfv\",\"hnu\":\"dataf\",\"chxxc\":\"datassjgbfbb\",\"bphceeivkbuxlep\":\"datazunf\"}},\"parameters\":{\"fscl\":{\"type\":\"Object\",\"defaultValue\":\"dataqhqpvtwt\"},\"bfytnhdnihuzzjuz\":{\"type\":\"Bool\",\"defaultValue\":\"datagygn\"},\"tsucrxdtejobjz\":{\"type\":\"Bool\",\"defaultValue\":\"databzdtorbiwnyfzdpx\"}},\"annotations\":[\"datat\",\"datad\",\"datanzalgm\",\"dataupjhltyl\"],\"folder\":{\"name\":\"dvbgvzlzjs\"},\"\":{\"ydpoknse\":\"datacutzaz\"}}") + "{\"type\":\"eenlqtqyvlfb\",\"typeProperties\":{\"tableName\":\"datauepaco\",\"table\":\"dataohex\",\"schema\":\"datagukfkbsycbdymb\"},\"description\":\"vqdvfjdsqephtosh\",\"structure\":\"dataua\",\"schema\":\"datawgujrcn\",\"linkedServiceName\":{\"referenceName\":\"a\",\"parameters\":{\"jwyyeomiflrvfeow\":\"datayqoizfyasyddqbws\",\"znwwu\":\"datatshwfrhhasabva\",\"bwtpwbjlpfwuq\":\"datakbpgci\",\"mpnxg\":\"datarpdgitenyuksli\"}},\"parameters\":{\"xej\":{\"type\":\"Bool\",\"defaultValue\":\"datalanchqotmmxl\"},\"psbeqieiux\":{\"type\":\"SecureString\",\"defaultValue\":\"datajzbjwvtuekbbypqs\"}},\"annotations\":[\"datawndalnjjh\",\"datagkjjpcpi\"],\"folder\":{\"name\":\"yxvtajfjatoid\"},\"\":{\"zanaqvengnpuelr\":\"dataurdgc\",\"jt\":\"dataanbrpkocuxfbage\",\"xmdboefnhx\":\"dataltckiwxggfagi\"}}") .toObject(PhoenixObjectDataset.class); - Assertions.assertEquals("otiixkkbygbgiq", model.description()); - Assertions.assertEquals("fzhzzcarciuo", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("fscl").type()); - Assertions.assertEquals("dvbgvzlzjs", model.folder().name()); + Assertions.assertEquals("vqdvfjdsqephtosh", model.description()); + Assertions.assertEquals("a", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("xej").type()); + Assertions.assertEquals("yxvtajfjatoid", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PhoenixObjectDataset model = new PhoenixObjectDataset().withDescription("otiixkkbygbgiq") - .withStructure("datayshybb").withSchema("datattyizonzsurqcoja") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("fzhzzcarciuo") - .withParameters(mapOf("hwsgnsputfe", "datapdthjfv", "hnu", "dataf", "chxxc", "datassjgbfbb", - "bphceeivkbuxlep", "datazunf"))) - .withParameters(mapOf("fscl", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataqhqpvtwt"), - "bfytnhdnihuzzjuz", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datagygn"), - "tsucrxdtejobjz", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("databzdtorbiwnyfzdpx"))) - .withAnnotations(Arrays.asList("datat", "datad", "datanzalgm", "dataupjhltyl")) - .withFolder(new DatasetFolder().withName("dvbgvzlzjs")).withTableName("datadmupbusxyug") - .withTable("datawplx").withSchemaTypePropertiesSchema("datazu"); + PhoenixObjectDataset model = new PhoenixObjectDataset().withDescription("vqdvfjdsqephtosh") + .withStructure("dataua") + .withSchema("datawgujrcn") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("a") + .withParameters(mapOf("jwyyeomiflrvfeow", "datayqoizfyasyddqbws", "znwwu", "datatshwfrhhasabva", + "bwtpwbjlpfwuq", "datakbpgci", "mpnxg", "datarpdgitenyuksli"))) + .withParameters(mapOf("xej", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datalanchqotmmxl"), + "psbeqieiux", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("datajzbjwvtuekbbypqs"))) + .withAnnotations(Arrays.asList("datawndalnjjh", "datagkjjpcpi")) + .withFolder(new DatasetFolder().withName("yxvtajfjatoid")) + .withTableName("datauepaco") + .withTable("dataohex") + .withSchemaTypePropertiesSchema("datagukfkbsycbdymb"); model = BinaryData.fromObject(model).toObject(PhoenixObjectDataset.class); - Assertions.assertEquals("otiixkkbygbgiq", model.description()); - Assertions.assertEquals("fzhzzcarciuo", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("fscl").type()); - Assertions.assertEquals("dvbgvzlzjs", model.folder().name()); + Assertions.assertEquals("vqdvfjdsqephtosh", model.description()); + Assertions.assertEquals("a", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("xej").type()); + Assertions.assertEquals("yxvtajfjatoid", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixSourceTests.java index 63ba1bbc95a01..e35ef7bb9ab4d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixSourceTests.java @@ -11,16 +11,19 @@ public final class PhoenixSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PhoenixSource model = BinaryData.fromString( - "{\"type\":\"PhoenixSource\",\"query\":\"datawevlohuahl\",\"queryTimeout\":\"datacboxgpmmz\",\"additionalColumns\":\"dataoyllxc\",\"sourceRetryCount\":\"datahzylspz\",\"sourceRetryWait\":\"datarhynlbtr\",\"maxConcurrentConnections\":\"dataecvag\",\"disableMetricsCollection\":\"datarhadg\",\"\":{\"hiafbhzdjv\":\"datarasxeomjqqhbkxi\",\"ggbpdpzgvq\":\"datayrzi\",\"lvxilaytj\":\"dataznxzaliicrutyhm\",\"ghqdlj\":\"datawfqzwn\"}}") + "{\"type\":\"ancjlkrskzw\",\"query\":\"datajwlwysrswzhci\",\"queryTimeout\":\"dataskmqkanuxju\",\"additionalColumns\":\"datavzodnxlcdgkc\",\"sourceRetryCount\":\"databafqzihmvw\",\"sourceRetryWait\":\"datajwvqiahoqjz\",\"maxConcurrentConnections\":\"datavwdlrt\",\"disableMetricsCollection\":\"dataulmzxhgwz\",\"\":{\"p\":\"datatwuuwehntjssj\"}}") .toObject(PhoenixSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PhoenixSource model - = new PhoenixSource().withSourceRetryCount("datahzylspz").withSourceRetryWait("datarhynlbtr") - .withMaxConcurrentConnections("dataecvag").withDisableMetricsCollection("datarhadg") - .withQueryTimeout("datacboxgpmmz").withAdditionalColumns("dataoyllxc").withQuery("datawevlohuahl"); + PhoenixSource model = new PhoenixSource().withSourceRetryCount("databafqzihmvw") + .withSourceRetryWait("datajwvqiahoqjz") + .withMaxConcurrentConnections("datavwdlrt") + .withDisableMetricsCollection("dataulmzxhgwz") + .withQueryTimeout("dataskmqkanuxju") + .withAdditionalColumns("datavzodnxlcdgkc") + .withQuery("datajwlwysrswzhci"); model = BinaryData.fromObject(model).toObject(PhoenixSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineElapsedTimeMetricPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineElapsedTimeMetricPolicyTests.java index f1f7a8b1ec268..459e48802b070 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineElapsedTimeMetricPolicyTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineElapsedTimeMetricPolicyTests.java @@ -10,14 +10,13 @@ public final class PipelineElapsedTimeMetricPolicyTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - PipelineElapsedTimeMetricPolicy model = BinaryData.fromString("{\"duration\":\"datahwagohbuffkmrqe\"}") + PipelineElapsedTimeMetricPolicy model = BinaryData.fromString("{\"duration\":\"datamblrrilbywd\"}") .toObject(PipelineElapsedTimeMetricPolicy.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PipelineElapsedTimeMetricPolicy model - = new PipelineElapsedTimeMetricPolicy().withDuration("datahwagohbuffkmrqe"); + PipelineElapsedTimeMetricPolicy model = new PipelineElapsedTimeMetricPolicy().withDuration("datamblrrilbywd"); model = BinaryData.fromObject(model).toObject(PipelineElapsedTimeMetricPolicy.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineExternalComputeScalePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineExternalComputeScalePropertiesTests.java index c8a03545d0593..7bd29d9824100 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineExternalComputeScalePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineExternalComputeScalePropertiesTests.java @@ -14,22 +14,24 @@ public final class PipelineExternalComputeScalePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PipelineExternalComputeScaleProperties model = BinaryData.fromString( - "{\"timeToLive\":1340072376,\"numberOfPipelineNodes\":536369061,\"numberOfExternalNodes\":1689303593,\"\":{\"toiwfsz\":\"dataipdjxyotgvraxh\",\"etsluqfgk\":\"datarlkosjwr\",\"imioixviobuwbnge\":\"datad\",\"gqamhbmggnqxnex\":\"datawhdq\"}}") + "{\"timeToLive\":707426378,\"numberOfPipelineNodes\":2050100407,\"numberOfExternalNodes\":1341467619,\"\":{\"obmgwavz\":\"datareisojhuswmgrb\",\"widteb\":\"dataqpaa\",\"r\":\"dataed\"}}") .toObject(PipelineExternalComputeScaleProperties.class); - Assertions.assertEquals(1340072376, model.timeToLive()); - Assertions.assertEquals(536369061, model.numberOfPipelineNodes()); - Assertions.assertEquals(1689303593, model.numberOfExternalNodes()); + Assertions.assertEquals(707426378, model.timeToLive()); + Assertions.assertEquals(2050100407, model.numberOfPipelineNodes()); + Assertions.assertEquals(1341467619, model.numberOfExternalNodes()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PipelineExternalComputeScaleProperties model = new PipelineExternalComputeScaleProperties() - .withTimeToLive(1340072376).withNumberOfPipelineNodes(536369061).withNumberOfExternalNodes(1689303593) - .withAdditionalProperties(mapOf()); + PipelineExternalComputeScaleProperties model + = new PipelineExternalComputeScaleProperties().withTimeToLive(707426378) + .withNumberOfPipelineNodes(2050100407) + .withNumberOfExternalNodes(1341467619) + .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(PipelineExternalComputeScaleProperties.class); - Assertions.assertEquals(1340072376, model.timeToLive()); - Assertions.assertEquals(536369061, model.numberOfPipelineNodes()); - Assertions.assertEquals(1689303593, model.numberOfExternalNodes()); + Assertions.assertEquals(707426378, model.timeToLive()); + Assertions.assertEquals(2050100407, model.numberOfPipelineNodes()); + Assertions.assertEquals(1341467619, model.numberOfExternalNodes()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineFolderTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineFolderTests.java index 9e98450155456..d758c66b7171d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineFolderTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineFolderTests.java @@ -11,14 +11,14 @@ public final class PipelineFolderTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - PipelineFolder model = BinaryData.fromString("{\"name\":\"rnntiewdjcv\"}").toObject(PipelineFolder.class); - Assertions.assertEquals("rnntiewdjcv", model.name()); + PipelineFolder model = BinaryData.fromString("{\"name\":\"onmacjekniz\"}").toObject(PipelineFolder.class); + Assertions.assertEquals("onmacjekniz", model.name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PipelineFolder model = new PipelineFolder().withName("rnntiewdjcv"); + PipelineFolder model = new PipelineFolder().withName("onmacjekniz"); model = BinaryData.fromObject(model).toObject(PipelineFolder.class); - Assertions.assertEquals("rnntiewdjcv", model.name()); + Assertions.assertEquals("onmacjekniz", model.name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineListResponseTests.java index 29397c87bcce4..aea32d4b3048b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineListResponseTests.java @@ -28,93 +28,218 @@ public final class PipelineListResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PipelineListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"description\":\"xrbuukzclew\",\"activities\":[{\"type\":\"Activity\",\"name\":\"lw\",\"description\":\"ztzp\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"yfzqwhxxbu\",\"dependencyConditions\":[]},{\"activity\":\"qa\",\"dependencyConditions\":[]},{\"activity\":\"zfeqztppri\",\"dependencyConditions\":[]},{\"activity\":\"lxorjaltolmncws\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"wcsdbnwdcfhucq\",\"value\":\"datapfuvglsbjjca\"},{\"name\":\"vxb\",\"value\":\"datat\"},{\"name\":\"udutnco\",\"value\":\"datamr\"},{\"name\":\"xqtvcofu\",\"value\":\"dataf\"}],\"\":{\"bgdknnqv\":\"datagj\",\"sgsahmkycgr\":\"dataaznqntoru\",\"s\":\"datauwjuetaeburuvdmo\",\"tpuqujmq\":\"datazlxwabmqoefkifr\"}},{\"type\":\"Activity\",\"name\":\"gkfbtndoaong\",\"description\":\"cn\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"ed\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"waezkojvd\",\"value\":\"datapzfoqoui\"}],\"\":{\"p\":\"dataxarzgszufoxciq\",\"xkhnzbonlwnto\":\"datadoamciodhkha\",\"zcmrvexztvb\":\"datagokdwbwhks\",\"lmnguxaw\":\"dataqgsfraoyzkoow\"}}],\"parameters\":{\"bykutw\":{\"type\":\"Int\",\"defaultValue\":\"datayuuximerqfobwyzn\"},\"sd\":{\"type\":\"Float\",\"defaultValue\":\"datapagmhrskdsnf\"},\"zev\":{\"type\":\"String\",\"defaultValue\":\"datagtdlmk\"},\"ejdcngqqmoakuf\":{\"type\":\"String\",\"defaultValue\":\"dataewpusdsttwvogvb\"}},\"variables\":{\"grtwae\":{\"type\":\"Array\",\"defaultValue\":\"datawr\"},\"inrfdwoyu\":{\"type\":\"String\",\"defaultValue\":\"datazkopb\"},\"mzqhoftrmaequi\":{\"type\":\"Bool\",\"defaultValue\":\"dataiuiefozbhdmsm\"}},\"concurrency\":1964875083,\"annotations\":[\"dataslfaoqzpiyyl\",\"dataalnswhccsphk\",\"dataivwitqscywugg\",\"dataoluhczbwemh\"],\"runDimensions\":{\"wmsweypqwd\":\"datasbrgz\",\"mkttlstvlzywem\":\"dataggicccnxqhue\",\"lusiy\":\"datazrncsdt\"},\"folder\":{\"name\":\"fgytguslfeadcyg\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datahejhzisx\"}}},\"name\":\"pelol\",\"type\":\"vk\",\"etag\":\"pqvujzraehtwdwrf\",\"\":{\"cdl\":\"dataiby\"},\"id\":\"shfwpracstwity\"}],\"nextLink\":\"evxccedcp\"}") + "{\"value\":[{\"properties\":{\"description\":\"lxorjaltolmncws\",\"activities\":[{\"type\":\"csdbnwdcfhuc\",\"name\":\"dpfuvg\",\"description\":\"bjj\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"t\",\"dependencyConditions\":[]},{\"activity\":\"udutnco\",\"dependencyConditions\":[]},{\"activity\":\"mr\",\"dependencyConditions\":[]},{\"activity\":\"xqtvcofu\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"vkg\",\"value\":\"datau\"},{\"name\":\"gdknnqv\",\"value\":\"dataaznqntoru\"}],\"\":{\"grauwjuetaebur\":\"datasahmky\",\"oefki\":\"datavdmovsmzlxwabm\",\"qujmqlgkf\":\"datarvtp\",\"tujitcjedft\":\"datatndoaongbjc\"}},{\"type\":\"ae\",\"name\":\"kojvd\",\"description\":\"zfoqouicybxar\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"x\",\"dependencyConditions\":[]},{\"activity\":\"iqopidoamciod\",\"dependencyConditions\":[]},{\"activity\":\"khazxkhnzbonlwn\",\"dependencyConditions\":[]},{\"activity\":\"oegokdwbwh\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"zcmrvexztvb\",\"value\":\"dataqgsfraoyzkoow\"}],\"\":{\"uximerqfobw\":\"datanguxawqaldsy\",\"r\":\"dataznkbykutwpfhpagm\",\"kkze\":\"datakdsnfdsdoakgtdl\",\"sdsttwvog\":\"datadlhewp\"}},{\"type\":\"bejdcn\",\"name\":\"qqmoaku\",\"description\":\"m\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"rtwaenuuzko\",\"dependencyConditions\":[]},{\"activity\":\"bminrfdwoyuhhzi\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"fozbhdmsmlmzqhof\",\"value\":\"datarmaequ\"},{\"name\":\"ah\",\"value\":\"dataicslfaoq\"},{\"name\":\"piyylhalnswhccsp\",\"value\":\"datakaivwit\"}],\"\":{\"wem\":\"dataywuggwoluhcz\",\"sbrgz\":\"dataai\",\"ggicccnxqhue\":\"datawmsweypqwd\"}},{\"type\":\"ktt\",\"name\":\"stvlzywemhzrnc\",\"description\":\"tclusiypbsfgy\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"adcy\",\"dependencyConditions\":[]},{\"activity\":\"qukyhejhzi\",\"dependencyConditions\":[]},{\"activity\":\"xgfpelolppv\",\"dependencyConditions\":[]},{\"activity\":\"srp\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"jzraehtwdwrf\",\"value\":\"dataswibyr\"},{\"name\":\"dl\",\"value\":\"datah\"}],\"\":{\"c\":\"datawpracstwitykhev\",\"jc\":\"dataedcpnmdyodnwzxl\",\"vvwxqi\":\"datanhltiugcxn\"}}],\"parameters\":{\"wxwlmdjrkvfgb\":{\"type\":\"SecureString\",\"defaultValue\":\"datay\"},\"bodacizsjq\":{\"type\":\"SecureString\",\"defaultValue\":\"datap\"},\"ibqipqkg\":{\"type\":\"Float\",\"defaultValue\":\"datarribd\"},\"pjorwkqnyhg\":{\"type\":\"Array\",\"defaultValue\":\"datandzwmkrefa\"}},\"variables\":{\"wfsdjpvkvpbj\":{\"type\":\"Bool\",\"defaultValue\":\"dataivfxzsjabibsyst\"}},\"concurrency\":1795251983,\"annotations\":[\"datazkdvncja\",\"dataudurgkakmokz\"],\"runDimensions\":{\"mouwqlgzrfzeey\":\"dataklff\"},\"folder\":{\"name\":\"zi\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datahqlbjbsybbq\"}}},\"name\":\"vtldgmfpgvmpip\",\"type\":\"ltha\",\"etag\":\"x\",\"\":{\"pdrhne\":\"datawutwbdsre\",\"gpikpzimejza\":\"datayowqkdwytisibir\",\"rjqc\":\"datalfzxiavrmbzonoki\"},\"id\":\"gzpfrla\"},{\"properties\":{\"description\":\"zrnw\",\"activities\":[{\"type\":\"dfpwpjylwbtlhfls\",\"name\":\"cdhszf\",\"description\":\"fbgofeljagrqmqh\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"ojnal\",\"dependencyConditions\":[]},{\"activity\":\"hfkvtvsexsowuel\",\"dependencyConditions\":[]},{\"activity\":\"qhhahhxvrhmzkwpj\",\"dependencyConditions\":[]},{\"activity\":\"wws\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"hftqsxhqxujxukn\",\"value\":\"dataxdigrjg\"},{\"name\":\"ufzdmsyq\",\"value\":\"datafi\"},{\"name\":\"whbotzingamv\",\"value\":\"dataphoszqz\"},{\"name\":\"dphqamv\",\"value\":\"datakfwynw\"}],\"\":{\"tnvyqiatkzwp\":\"databvkayh\",\"vvsccyajguq\":\"datanpwzcjaes\",\"lvdnkfx\":\"datahwyg\",\"apfcqdpsq\":\"datasemdwzrmu\"}},{\"type\":\"vpsvuoymgcce\",\"name\":\"vezrypqlmfeo\",\"description\":\"rqwky\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"xedk\",\"dependencyConditions\":[]},{\"activity\":\"wep\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"cr\",\"value\":\"datakbwcc\"}],\"\":{\"htjsying\":\"datavcdwxlpqekftn\",\"tmtdhtmdvypgik\":\"datafq\",\"irryuzhlh\":\"datagszywk\"}},{\"type\":\"oqrvqqaatjin\",\"name\":\"vgoup\",\"description\":\"iibfggj\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"x\",\"dependencyConditions\":[]},{\"activity\":\"v\",\"dependencyConditions\":[]},{\"activity\":\"kkgll\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"ygvjayvblmh\",\"value\":\"datak\"},{\"name\":\"uhbxvvy\",\"value\":\"datagsopbyrqufegxu\"},{\"name\":\"wz\",\"value\":\"databnhlmc\"}],\"\":{\"itvgbmhrixkwm\":\"datadn\"}}],\"parameters\":{\"hbpnaixexccbd\":{\"type\":\"Array\",\"defaultValue\":\"dataveg\"},\"ghtpw\":{\"type\":\"Float\",\"defaultValue\":\"dataxhcexdrrvqahq\"}},\"variables\":{\"vmtgjqppy\":{\"type\":\"String\",\"defaultValue\":\"datajsvfycxzbfvoowv\"},\"sxkm\":{\"type\":\"String\",\"defaultValue\":\"dataronzmyhgfip\"}},\"concurrency\":115360224,\"annotations\":[\"datarrjreafxtsgu\",\"datahjglikk\"],\"runDimensions\":{\"bq\":\"datalo\"},\"folder\":{\"name\":\"zlmvfelfktgp\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datawjxeznoigbr\"}}},\"name\":\"w\",\"type\":\"kpnb\",\"etag\":\"zejjoqk\",\"\":{\"azpxdtnkdmkqjjl\":\"datahsxttaugzxnf\",\"ou\":\"datauenvrkp\",\"aays\":\"dataibreb\"},\"id\":\"ixqtn\"}],\"nextLink\":\"tezlwff\"}") .toObject(PipelineListResponse.class); - Assertions.assertEquals("shfwpracstwity", model.value().get(0).id()); - Assertions.assertEquals("xrbuukzclew", model.value().get(0).description()); - Assertions.assertEquals("lw", model.value().get(0).activities().get(0).name()); - Assertions.assertEquals("ztzp", model.value().get(0).activities().get(0).description()); + Assertions.assertEquals("gzpfrla", model.value().get(0).id()); + Assertions.assertEquals("lxorjaltolmncws", model.value().get(0).description()); + Assertions.assertEquals("dpfuvg", model.value().get(0).activities().get(0).name()); + Assertions.assertEquals("bjj", model.value().get(0).activities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.value().get(0).activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.value().get(0).activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("yfzqwhxxbu", model.value().get(0).activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("wcsdbnwdcfhucq", - model.value().get(0).activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.INT, model.value().get(0).parameters().get("bykutw").type()); - Assertions.assertEquals(VariableType.ARRAY, model.value().get(0).variables().get("grtwae").type()); - Assertions.assertEquals(1964875083, model.value().get(0).concurrency()); - Assertions.assertEquals("fgytguslfeadcyg", model.value().get(0).folder().name()); - Assertions.assertEquals("evxccedcp", model.nextLink()); + Assertions.assertEquals("t", model.value().get(0).activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals("vkg", model.value().get(0).activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals(ParameterType.SECURE_STRING, + model.value().get(0).parameters().get("wxwlmdjrkvfgb").type()); + Assertions.assertEquals(VariableType.BOOL, model.value().get(0).variables().get("wfsdjpvkvpbj").type()); + Assertions.assertEquals(1795251983, model.value().get(0).concurrency()); + Assertions.assertEquals("zi", model.value().get(0).folder().name()); + Assertions.assertEquals("tezlwff", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { PipelineListResponse model = new PipelineListResponse() - .withValue(Arrays.asList(new PipelineResourceInner().withId("shfwpracstwity").withDescription("xrbuukzclew") - .withActivities(Arrays.asList( - new Activity().withName("lw").withDescription("ztzp").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("yfzqwhxxbu") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("qa").withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("zfeqztppri") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("lxorjaltolmncws") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("wcsdbnwdcfhucq").withValue("datapfuvglsbjjca"), - new UserProperty().withName("vxb").withValue("datat"), - new UserProperty().withName("udutnco").withValue("datamr"), - new UserProperty().withName("xqtvcofu").withValue("dataf"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("gkfbtndoaong").withDescription("cn").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("ed") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("waezkojvd").withValue("datapzfoqoui"))) - .withAdditionalProperties(mapOf("type", "Activity")))) - .withParameters(mapOf("bykutw", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datayuuximerqfobwyzn"), - "sd", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datapagmhrskdsnf"), - "zev", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datagtdlmk"), - "ejdcngqqmoakuf", - new ParameterSpecification().withType(ParameterType.STRING) - .withDefaultValue("dataewpusdsttwvogvb"))) - .withVariables(mapOf("grtwae", - new VariableSpecification().withType(VariableType.ARRAY).withDefaultValue("datawr"), "inrfdwoyu", - new VariableSpecification().withType(VariableType.STRING).withDefaultValue("datazkopb"), - "mzqhoftrmaequi", - new VariableSpecification().withType(VariableType.BOOL).withDefaultValue("dataiuiefozbhdmsm"))) - .withConcurrency(1964875083) - .withAnnotations( - Arrays.asList("dataslfaoqzpiyyl", "dataalnswhccsphk", "dataivwitqscywugg", "dataoluhczbwemh")) - .withRunDimensions( - mapOf("wmsweypqwd", "datasbrgz", "mkttlstvlzywem", "dataggicccnxqhue", "lusiy", "datazrncsdt")) - .withFolder(new PipelineFolder().withName("fgytguslfeadcyg")) - .withPolicy(new PipelinePolicy() - .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datahejhzisx"))) - .withAdditionalProperties(mapOf("name", "pelol", "etag", "pqvujzraehtwdwrf", "type", "vk")))) - .withNextLink("evxccedcp"); + .withValue(Arrays.asList( + new PipelineResourceInner().withId("gzpfrla") + .withDescription("lxorjaltolmncws") + .withActivities(Arrays.asList( + new Activity().withName("dpfuvg") + .withDescription("bjj") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("t") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("udutnco") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("mr") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("xqtvcofu") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("vkg").withValue("datau"), + new UserProperty().withName("gdknnqv").withValue("dataaznqntoru"))) + .withAdditionalProperties(mapOf("type", "csdbnwdcfhuc")), + new Activity().withName("kojvd") + .withDescription("zfoqouicybxar") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("x") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("iqopidoamciod") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("khazxkhnzbonlwn") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("oegokdwbwh") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays + .asList(new UserProperty().withName("zcmrvexztvb").withValue("dataqgsfraoyzkoow"))) + .withAdditionalProperties(mapOf("type", "ae")), + new Activity().withName("qqmoaku") + .withDescription("m") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("rtwaenuuzko") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("bminrfdwoyuhhzi") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("fozbhdmsmlmzqhof").withValue("datarmaequ"), + new UserProperty().withName("ah").withValue("dataicslfaoq"), + new UserProperty().withName("piyylhalnswhccsp").withValue("datakaivwit"))) + .withAdditionalProperties(mapOf("type", "bejdcn")), + new Activity().withName("stvlzywemhzrnc") + .withDescription("tclusiypbsfgy") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("adcy") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("qukyhejhzi") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("xgfpelolppv") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("srp") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("jzraehtwdwrf").withValue("dataswibyr"), + new UserProperty().withName("dl").withValue("datah"))) + .withAdditionalProperties(mapOf("type", "ktt")))) + .withParameters(mapOf("wxwlmdjrkvfgb", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datay"), + "bodacizsjq", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datap"), + "ibqipqkg", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datarribd"), + "pjorwkqnyhg", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datandzwmkrefa"))) + .withVariables(mapOf("wfsdjpvkvpbj", + new VariableSpecification().withType(VariableType.BOOL) + .withDefaultValue("dataivfxzsjabibsyst"))) + .withConcurrency(1795251983) + .withAnnotations(Arrays.asList("datazkdvncja", "dataudurgkakmokz")) + .withRunDimensions(mapOf("mouwqlgzrfzeey", "dataklff")) + .withFolder(new PipelineFolder().withName("zi")) + .withPolicy(new PipelinePolicy() + .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datahqlbjbsybbq"))) + .withAdditionalProperties(mapOf("name", "vtldgmfpgvmpip", "etag", "x", "type", "ltha")), + new PipelineResourceInner().withId("ixqtn") + .withDescription("zrnw") + .withActivities(Arrays.asList( + new Activity().withName("cdhszf") + .withDescription("fbgofeljagrqmqh") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("ojnal") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("hfkvtvsexsowuel") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("qhhahhxvrhmzkwpj") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("wws") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("hftqsxhqxujxukn").withValue("dataxdigrjg"), + new UserProperty().withName("ufzdmsyq").withValue("datafi"), + new UserProperty().withName("whbotzingamv").withValue("dataphoszqz"), + new UserProperty().withName("dphqamv").withValue("datakfwynw"))) + .withAdditionalProperties(mapOf("type", "dfpwpjylwbtlhfls")), + new Activity().withName("vezrypqlmfeo") + .withDescription("rqwky") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("xedk") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("wep") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("cr").withValue("datakbwcc"))) + .withAdditionalProperties(mapOf("type", "vpsvuoymgcce")), + new Activity().withName("vgoup") + .withDescription("iibfggj") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("x") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("v") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("kkgll") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("ygvjayvblmh").withValue("datak"), + new UserProperty().withName("uhbxvvy").withValue("datagsopbyrqufegxu"), + new UserProperty().withName("wz").withValue("databnhlmc"))) + .withAdditionalProperties(mapOf("type", "oqrvqqaatjin")))) + .withParameters(mapOf("hbpnaixexccbd", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataveg"), "ghtpw", + new ParameterSpecification().withType(ParameterType.FLOAT) + .withDefaultValue("dataxhcexdrrvqahq"))) + .withVariables(mapOf("vmtgjqppy", + new VariableSpecification().withType(VariableType.STRING) + .withDefaultValue("datajsvfycxzbfvoowv"), + "sxkm", + new VariableSpecification().withType(VariableType.STRING).withDefaultValue("dataronzmyhgfip"))) + .withConcurrency(115360224) + .withAnnotations(Arrays.asList("datarrjreafxtsgu", "datahjglikk")) + .withRunDimensions(mapOf("bq", "datalo")) + .withFolder(new PipelineFolder().withName("zlmvfelfktgp")) + .withPolicy(new PipelinePolicy() + .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datawjxeznoigbr"))) + .withAdditionalProperties(mapOf("name", "w", "etag", "zejjoqk", "type", "kpnb")))) + .withNextLink("tezlwff"); model = BinaryData.fromObject(model).toObject(PipelineListResponse.class); - Assertions.assertEquals("shfwpracstwity", model.value().get(0).id()); - Assertions.assertEquals("xrbuukzclew", model.value().get(0).description()); - Assertions.assertEquals("lw", model.value().get(0).activities().get(0).name()); - Assertions.assertEquals("ztzp", model.value().get(0).activities().get(0).description()); + Assertions.assertEquals("gzpfrla", model.value().get(0).id()); + Assertions.assertEquals("lxorjaltolmncws", model.value().get(0).description()); + Assertions.assertEquals("dpfuvg", model.value().get(0).activities().get(0).name()); + Assertions.assertEquals("bjj", model.value().get(0).activities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.value().get(0).activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.value().get(0).activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("yfzqwhxxbu", model.value().get(0).activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("wcsdbnwdcfhucq", - model.value().get(0).activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.INT, model.value().get(0).parameters().get("bykutw").type()); - Assertions.assertEquals(VariableType.ARRAY, model.value().get(0).variables().get("grtwae").type()); - Assertions.assertEquals(1964875083, model.value().get(0).concurrency()); - Assertions.assertEquals("fgytguslfeadcyg", model.value().get(0).folder().name()); - Assertions.assertEquals("evxccedcp", model.nextLink()); + Assertions.assertEquals("t", model.value().get(0).activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals("vkg", model.value().get(0).activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals(ParameterType.SECURE_STRING, + model.value().get(0).parameters().get("wxwlmdjrkvfgb").type()); + Assertions.assertEquals(VariableType.BOOL, model.value().get(0).variables().get("wfsdjpvkvpbj").type()); + Assertions.assertEquals(1795251983, model.value().get(0).concurrency()); + Assertions.assertEquals("zi", model.value().get(0).folder().name()); + Assertions.assertEquals("tezlwff", model.nextLink()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinePolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinePolicyTests.java index 160be086b9c6e..b7b0409a6232c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinePolicyTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinePolicyTests.java @@ -11,14 +11,14 @@ public final class PipelinePolicyTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - PipelinePolicy model - = BinaryData.fromString("{\"elapsedTimeMetric\":{\"duration\":\"datawr\"}}").toObject(PipelinePolicy.class); + PipelinePolicy model = BinaryData.fromString("{\"elapsedTimeMetric\":{\"duration\":\"datavcimpev\"}}") + .toObject(PipelinePolicy.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PipelinePolicy model - = new PipelinePolicy().withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datawr")); + PipelinePolicy model = new PipelinePolicy() + .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datavcimpev")); model = BinaryData.fromObject(model).toObject(PipelinePolicy.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineReferenceTests.java index b7c0e8950aa9d..67c58afa33f0d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineReferenceTests.java @@ -11,18 +11,17 @@ public final class PipelineReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - PipelineReference model - = BinaryData.fromString("{\"referenceName\":\"pisqqzlgcndhzx\",\"name\":\"fcfsrhkhgsnx\"}") - .toObject(PipelineReference.class); - Assertions.assertEquals("pisqqzlgcndhzx", model.referenceName()); - Assertions.assertEquals("fcfsrhkhgsnx", model.name()); + PipelineReference model = BinaryData.fromString("{\"referenceName\":\"cpqtwloesq\",\"name\":\"gvrbny\"}") + .toObject(PipelineReference.class); + Assertions.assertEquals("cpqtwloesq", model.referenceName()); + Assertions.assertEquals("gvrbny", model.name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PipelineReference model = new PipelineReference().withReferenceName("pisqqzlgcndhzx").withName("fcfsrhkhgsnx"); + PipelineReference model = new PipelineReference().withReferenceName("cpqtwloesq").withName("gvrbny"); model = BinaryData.fromObject(model).toObject(PipelineReference.class); - Assertions.assertEquals("pisqqzlgcndhzx", model.referenceName()); - Assertions.assertEquals("fcfsrhkhgsnx", model.name()); + Assertions.assertEquals("cpqtwloesq", model.referenceName()); + Assertions.assertEquals("gvrbny", model.name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineResourceInnerTests.java index c81cf5c27f1de..d619b2bc59b64 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineResourceInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineResourceInnerTests.java @@ -28,127 +28,76 @@ public final class PipelineResourceInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PipelineResourceInner model = BinaryData.fromString( - "{\"properties\":{\"description\":\"dyodnwzxltj\",\"activities\":[{\"type\":\"Activity\",\"name\":\"hlt\",\"description\":\"gcxn\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"byqunyow\",\"dependencyConditions\":[\"Completed\",\"Completed\"],\"\":{\"acizsjqlhkrr\":\"datarkvfgbvfvpdbo\",\"hvxndzwmkrefajpj\":\"databdeibqipqk\",\"yhgbijtjivfx\":\"datarwkq\",\"stawfsdjpvkv\":\"datasjabibs\"}},{\"activity\":\"bjxbkzbzk\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\"],\"\":{\"jjklff\":\"dataudurgkakmokz\",\"bizikayuhq\":\"datamouwqlgzrfzeey\",\"wrv\":\"databjbsybb\"}},{\"activity\":\"ldgmfpgvmpip\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\"],\"\":{\"dsrezpdrhneuyow\":\"dataqfxssmwutw\",\"t\":\"datakdw\"}},{\"activity\":\"sibircgpi\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Skipped\",\"Failed\"],\"\":{\"nokixrjqcirgz\":\"dataanlfzxiavrmbz\"}}],\"userProperties\":[{\"name\":\"lazszrn\",\"value\":\"dataoiindfpwpjy\"},{\"name\":\"wbtlhflsjcdh\",\"value\":\"datazfjvfbgofe\"},{\"name\":\"jagrqmqhldvr\",\"value\":\"dataiiojnal\"}],\"\":{\"ueluqhhahhxvrhmz\":\"datakvtvsexso\"}},{\"type\":\"Activity\",\"name\":\"wpjgwws\",\"description\":\"ghftqsxhqxujxuk\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"jguufzdm\",\"dependencyConditions\":[\"Completed\",\"Succeeded\"],\"\":{\"phoszqz\":\"datahwhbotzingamv\",\"kfwynw\":\"datadphqamv\",\"tnvyqiatkzwp\":\"datavtbvkayh\",\"vvsccyajguq\":\"datanpwzcjaes\"}},{\"activity\":\"hwyg\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"uh\":\"datafxusemdwzr\"}},{\"activity\":\"pfcqdp\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Skipped\",\"Skipped\"],\"\":{\"qlmfeoker\":\"datauoymgccelvezry\"}},{\"activity\":\"wkyhkobopgxe\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"vcdwxlpqekftn\":\"datapbqpcrfkbwccsn\",\"fq\":\"datahtjsying\",\"gszywk\":\"datatmtdhtmdvypgik\"}}],\"userProperties\":[{\"name\":\"ryuzh\",\"value\":\"datahkjoqr\"},{\"name\":\"qqaatjinrvgou\",\"value\":\"datamfiibfggj\"},{\"name\":\"ool\",\"value\":\"datarwxkvtkkgl\"}],\"\":{\"hvkzuh\":\"datajygvjayvbl\",\"gsopbyrqufegxu\":\"dataxvvy\",\"bnhlmc\":\"datawz\",\"dn\":\"datal\"}},{\"type\":\"Activity\",\"name\":\"itvgbmhrixkwm\",\"description\":\"jejveg\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"xexccbdreaxhcexd\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Failed\",\"Succeeded\"],\"\":{\"jnhyjsvf\":\"dataghtpw\",\"mtg\":\"datacxzbfvoowvr\",\"y\":\"dataqp\"}},{\"activity\":\"s\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Succeeded\"],\"\":{\"ekrrjr\":\"datahgfipnsxkmcw\",\"jglikkxwslolb\":\"dataafxtsgum\",\"elfk\":\"datapvuzlmv\"}}],\"userProperties\":[{\"name\":\"lcrpw\",\"value\":\"dataxeznoi\"},{\"name\":\"brnjwmw\",\"value\":\"datapn\"},{\"name\":\"saz\",\"value\":\"datajjoqkagf\"},{\"name\":\"sxtta\",\"value\":\"datagzxnfaazpxdtnk\"}],\"\":{\"rkpyouaibrebqaay\":\"dataqjjlwuen\",\"ixqtn\":\"dataj\",\"ffiakp\":\"datattezlw\",\"tmmjihyeozph\":\"datapqqmted\"}}],\"parameters\":{\"mdscwxqupev\":{\"type\":\"Float\",\"defaultValue\":\"dataqncygupkvi\"},\"jujbypelmcuvhixb\":{\"type\":\"Float\",\"defaultValue\":\"datastotxh\"},\"yl\":{\"type\":\"Bool\",\"defaultValue\":\"datafw\"}},\"variables\":{\"iwkkbn\":{\"type\":\"Array\",\"defaultValue\":\"datasttp\"}},\"concurrency\":647346434,\"annotations\":[\"datavtylbfpncu\",\"datadoiwi\",\"datathtywub\",\"datacbihwqk\"],\"runDimensions\":{\"dgoihxumwctondzj\":\"datantwjch\",\"fdlwg\":\"datauu\",\"gseinq\":\"dataytsbwtovv\"},\"folder\":{\"name\":\"fxqknpirgneptt\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"dataniffcdmqnroj\"}}},\"name\":\"ijnkrxfrdd\",\"type\":\"ratiz\",\"etag\":\"onasxifto\",\"\":{\"tw\":\"datazh\",\"lgnyhmo\":\"datasgogczhonnxk\",\"h\":\"datasxkkg\",\"hqxvcxgfrpdsofbs\":\"datarghxjb\"},\"id\":\"nsvbuswdv\"}") + "{\"properties\":{\"description\":\"akpjpqqmtedlt\",\"activities\":[{\"type\":\"hyeozphvwau\",\"name\":\"qncygupkvi\",\"description\":\"dscwxqupevzhf\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"jujbypelmcuvhixb\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Succeeded\"],\"\":{\"kkbnu\":\"datalrcoolsttpki\"}},{\"activity\":\"rywvtylbfpn\",\"dependencyConditions\":[\"Succeeded\",\"Failed\"],\"\":{\"cbihwqk\":\"datawiithtywub\"}}],\"userProperties\":[{\"name\":\"ntwjch\",\"value\":\"datadgoihxumwctondzj\"},{\"name\":\"uu\",\"value\":\"datafdlwg\"},{\"name\":\"ytsbwtovv\",\"value\":\"datagseinq\"}],\"\":{\"qmsniffcdmqnro\":\"datafxqknpirgneptt\",\"hcrat\":\"datalpijnkrxfrd\",\"ft\":\"datazzronasx\",\"zh\":\"datazq\"}}],\"parameters\":{\"mossxkkgthrrghxj\":{\"type\":\"Int\",\"defaultValue\":\"datagogczhonnxkrlgny\"},\"hrnsvbu\":{\"type\":\"Float\",\"defaultValue\":\"dataqxvcxgfrpdsofb\"},\"ybycnunvj\":{\"type\":\"Int\",\"defaultValue\":\"datavz\"}},\"variables\":{\"uxzejntpsew\":{\"type\":\"Bool\",\"defaultValue\":\"dataawnopqgikyzirtxd\"}},\"concurrency\":354517921,\"annotations\":[\"dataqukrydxt\"],\"runDimensions\":{\"tbghhavgrvkf\":\"dataeoxorggufhyao\",\"mv\":\"dataovjzhpjbibgjmfx\",\"zzxscyhwzdgiruj\":\"datacluyovwxnbkf\",\"ujviylwdshfs\":\"datazbomvzzbtdcqvpni\"},\"folder\":{\"name\":\"bgye\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datasgaojfmwncot\"}}},\"name\":\"fhir\",\"type\":\"ymoxoftpipiwyczu\",\"etag\":\"a\",\"\":{\"skasdvlmfwdgzxu\":\"datajlihhyus\",\"euzvx\":\"dataucvpamrs\",\"xmrhu\":\"datarisjnhnytxifqjz\",\"cesutrgjupauut\":\"datalw\"},\"id\":\"oqh\"}") .toObject(PipelineResourceInner.class); - Assertions.assertEquals("nsvbuswdv", model.id()); - Assertions.assertEquals("dyodnwzxltj", model.description()); - Assertions.assertEquals("hlt", model.activities().get(0).name()); - Assertions.assertEquals("gcxn", model.activities().get(0).description()); + Assertions.assertEquals("oqh", model.id()); + Assertions.assertEquals("akpjpqqmtedlt", model.description()); + Assertions.assertEquals("qncygupkvi", model.activities().get(0).name()); + Assertions.assertEquals("dscwxqupevzhf", model.activities().get(0).description()); Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("byqunyow", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.activities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("jujbypelmcuvhixb", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("lazszrn", model.activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("mdscwxqupev").type()); - Assertions.assertEquals(VariableType.ARRAY, model.variables().get("iwkkbn").type()); - Assertions.assertEquals(647346434, model.concurrency()); - Assertions.assertEquals("fxqknpirgneptt", model.folder().name()); + Assertions.assertEquals("ntwjch", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("mossxkkgthrrghxj").type()); + Assertions.assertEquals(VariableType.BOOL, model.variables().get("uxzejntpsew").type()); + Assertions.assertEquals(354517921, model.concurrency()); + Assertions.assertEquals("bgye", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PipelineResourceInner model - = new PipelineResourceInner().withId("nsvbuswdv").withDescription("dyodnwzxltj") - .withActivities(Arrays.asList( - new Activity().withName("hlt").withDescription("gcxn").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("byqunyow") - .withDependencyConditions(Arrays.asList( - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("bjxbkzbzk") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ldgmfpgvmpip") - .withDependencyConditions(Arrays.asList( - DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("sibircgpi") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED, - DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("lazszrn").withValue("dataoiindfpwpjy"), - new UserProperty().withName("wbtlhflsjcdh") - .withValue("datazfjvfbgofe"), - new UserProperty().withName("jagrqmqhldvr").withValue("dataiiojnal"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("wpjgwws").withDescription("ghftqsxhqxujxuk") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency() - .withActivity("jguufzdm") - .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("hwyg") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("pfcqdp") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, - DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("wkyhkobopgxe") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ryuzh").withValue("datahkjoqr"), - new UserProperty().withName("qqaatjinrvgou") - .withValue("datamfiibfggj"), - new UserProperty().withName("ool").withValue("datarwxkvtkkgl"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("itvgbmhrixkwm").withDescription("jejveg").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs( - ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("xexccbdreaxhcexd") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, - DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("s") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("lcrpw").withValue("dataxeznoi"), - new UserProperty().withName("brnjwmw").withValue("datapn"), - new UserProperty().withName("saz").withValue("datajjoqkagf"), - new UserProperty().withName("sxtta").withValue("datagzxnfaazpxdtnk"))) - .withAdditionalProperties(mapOf("type", "Activity")))) - .withParameters(mapOf("mdscwxqupev", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataqncygupkvi"), - "jujbypelmcuvhixb", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datastotxh"), "yl", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datafw"))) - .withVariables(mapOf("iwkkbn", - new VariableSpecification().withType(VariableType.ARRAY).withDefaultValue("datasttp"))) - .withConcurrency(647346434) - .withAnnotations(Arrays.asList("datavtylbfpncu", "datadoiwi", "datathtywub", "datacbihwqk")) - .withRunDimensions( - mapOf("dgoihxumwctondzj", "datantwjch", "fdlwg", "datauu", "gseinq", "dataytsbwtovv")) - .withFolder(new PipelineFolder().withName("fxqknpirgneptt")) - .withPolicy(new PipelinePolicy() - .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("dataniffcdmqnroj"))) - .withAdditionalProperties(mapOf("name", "ijnkrxfrdd", "etag", "onasxifto", "type", "ratiz")); + PipelineResourceInner model = new PipelineResourceInner().withId("oqh") + .withDescription("akpjpqqmtedlt") + .withActivities(Arrays.asList(new Activity().withName("qncygupkvi") + .withDescription("dscwxqupevzhf") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("jujbypelmcuvhixb") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, + DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("rywvtylbfpn") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("ntwjch").withValue("datadgoihxumwctondzj"), + new UserProperty().withName("uu").withValue("datafdlwg"), + new UserProperty().withName("ytsbwtovv").withValue("datagseinq"))) + .withAdditionalProperties(mapOf("type", "hyeozphvwau")))) + .withParameters(mapOf("mossxkkgthrrghxj", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datagogczhonnxkrlgny"), + "hrnsvbu", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataqxvcxgfrpdsofb"), + "ybycnunvj", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datavz"))) + .withVariables(mapOf("uxzejntpsew", + new VariableSpecification().withType(VariableType.BOOL).withDefaultValue("dataawnopqgikyzirtxd"))) + .withConcurrency(354517921) + .withAnnotations(Arrays.asList("dataqukrydxt")) + .withRunDimensions(mapOf("tbghhavgrvkf", "dataeoxorggufhyao", "mv", "dataovjzhpjbibgjmfx", + "zzxscyhwzdgiruj", "datacluyovwxnbkf", "ujviylwdshfs", "datazbomvzzbtdcqvpni")) + .withFolder(new PipelineFolder().withName("bgye")) + .withPolicy(new PipelinePolicy() + .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datasgaojfmwncot"))) + .withAdditionalProperties(mapOf("name", "fhir", "etag", "a", "type", "ymoxoftpipiwyczu")); model = BinaryData.fromObject(model).toObject(PipelineResourceInner.class); - Assertions.assertEquals("nsvbuswdv", model.id()); - Assertions.assertEquals("dyodnwzxltj", model.description()); - Assertions.assertEquals("hlt", model.activities().get(0).name()); - Assertions.assertEquals("gcxn", model.activities().get(0).description()); + Assertions.assertEquals("oqh", model.id()); + Assertions.assertEquals("akpjpqqmtedlt", model.description()); + Assertions.assertEquals("qncygupkvi", model.activities().get(0).name()); + Assertions.assertEquals("dscwxqupevzhf", model.activities().get(0).description()); Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("byqunyow", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.activities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("jujbypelmcuvhixb", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("lazszrn", model.activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("mdscwxqupev").type()); - Assertions.assertEquals(VariableType.ARRAY, model.variables().get("iwkkbn").type()); - Assertions.assertEquals(647346434, model.concurrency()); - Assertions.assertEquals("fxqknpirgneptt", model.folder().name()); + Assertions.assertEquals("ntwjch", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("mossxkkgthrrghxj").type()); + Assertions.assertEquals(VariableType.BOOL, model.variables().get("uxzejntpsew").type()); + Assertions.assertEquals(354517921, model.concurrency()); + Assertions.assertEquals("bgye", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInnerTests.java index 4c1955cfb3326..a0e8743d89f36 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInnerTests.java @@ -15,27 +15,29 @@ public final class PipelineRunInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PipelineRunInner model = BinaryData.fromString( - "{\"runId\":\"lqol\",\"runGroupId\":\"kcgxxlxsffgcvi\",\"isLatest\":true,\"pipelineName\":\"wlvwlyoupf\",\"parameters\":{\"ubdyhgk\":\"k\",\"tsttktlahbq\":\"minsgowzf\",\"mmqtgqqqxhr\":\"ctxtgzukxi\",\"juisavokqdzf\":\"xrxc\"},\"runDimensions\":{\"nwxyiop\":\"ivjlfrqttbajlka\"},\"invokedBy\":{\"name\":\"qqfkuv\",\"id\":\"xkdmligo\",\"invokedByType\":\"brxk\",\"pipelineName\":\"loazuruocbgoo\",\"pipelineRunId\":\"te\"},\"lastUpdated\":\"2021-05-01T19:42:34Z\",\"runStart\":\"2021-05-18T20:05:21Z\",\"runEnd\":\"2021-08-27T22:46:55Z\",\"durationInMs\":1900106080,\"status\":\"vjgsl\",\"message\":\"dilmyww\",\"\":{\"edabgyvudtjue\":\"datakxn\",\"yxccyb\":\"databcihxuuwhc\",\"px\":\"datapayakkud\"}}") + "{\"runId\":\"lvofqzhvfcibyfmo\",\"runGroupId\":\"xrkjpvdw\",\"isLatest\":true,\"pipelineName\":\"iivwzjbhyzsxjrka\",\"parameters\":{\"uqeqv\":\"rnegvmn\",\"est\":\"dspastjbkkdmfl\",\"ilozapeewchpxlk\":\"jlxr\",\"ziycslevufuztck\":\"wk\"},\"runDimensions\":{\"qzz\":\"jtqedcgzulwmm\"},\"invokedBy\":{\"name\":\"vpglydz\",\"id\":\"rvqeevtoepryutn\",\"invokedByType\":\"tpzdmovzvfvaawzq\",\"pipelineName\":\"f\",\"pipelineRunId\":\"z\"},\"lastUpdated\":\"2021-05-05T00:43:37Z\",\"runStart\":\"2021-04-25T19:47:18Z\",\"runEnd\":\"2021-02-28T21:39:36Z\",\"durationInMs\":1970549549,\"status\":\"dticokpvzml\",\"message\":\"mldgxobfirc\",\"\":{\"khyawfvjlboxqv\":\"datakciayzri\"}}") .toObject(PipelineRunInner.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PipelineRunInner model = new PipelineRunInner().withAdditionalProperties(mapOf("durationInMs", 1900106080, + PipelineRunInner model = new PipelineRunInner().withAdditionalProperties(mapOf("durationInMs", 1970549549, "runDimensions", - JacksonAdapter.createDefaultSerializerAdapter().deserialize("{\"nwxyiop\":\"ivjlfrqttbajlka\"}", - Object.class, SerializerEncoding.JSON), + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize("{\"qzz\":\"jtqedcgzulwmm\"}", Object.class, SerializerEncoding.JSON), "invokedBy", - JacksonAdapter.createDefaultSerializerAdapter().deserialize( - "{\"name\":\"qqfkuv\",\"id\":\"xkdmligo\",\"invokedByType\":\"brxk\",\"pipelineName\":\"loazuruocbgoo\",\"pipelineRunId\":\"te\"}", - Object.class, SerializerEncoding.JSON), - "runStart", "2021-05-18T20:05:21Z", "message", "dilmyww", "pipelineName", "wlvwlyoupf", "lastUpdated", - "2021-05-01T19:42:34Z", "isLatest", true, "runId", "lqol", "runEnd", "2021-08-27T22:46:55Z", "runGroupId", - "kcgxxlxsffgcvi", "parameters", - JacksonAdapter.createDefaultSerializerAdapter().deserialize( - "{\"ubdyhgk\":\"k\",\"tsttktlahbq\":\"minsgowzf\",\"mmqtgqqqxhr\":\"ctxtgzukxi\",\"juisavokqdzf\":\"xrxc\"}", - Object.class, SerializerEncoding.JSON), - "status", "vjgsl")); + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize( + "{\"name\":\"vpglydz\",\"id\":\"rvqeevtoepryutn\",\"invokedByType\":\"tpzdmovzvfvaawzq\",\"pipelineName\":\"f\",\"pipelineRunId\":\"z\"}", + Object.class, SerializerEncoding.JSON), + "runStart", "2021-04-25T19:47:18Z", "message", "mldgxobfirc", "pipelineName", "iivwzjbhyzsxjrka", + "lastUpdated", "2021-05-05T00:43:37Z", "isLatest", true, "runId", "lvofqzhvfcibyfmo", "runEnd", + "2021-02-28T21:39:36Z", "runGroupId", "xrkjpvdw", "parameters", + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize( + "{\"uqeqv\":\"rnegvmn\",\"est\":\"dspastjbkkdmfl\",\"ilozapeewchpxlk\":\"jlxr\",\"ziycslevufuztck\":\"wk\"}", + Object.class, SerializerEncoding.JSON), + "status", "dticokpvzml")); model = BinaryData.fromObject(model).toObject(PipelineRunInner.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInvokedByTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInvokedByTests.java index 91b33a973c70a..aa636c8816d16 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInvokedByTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInvokedByTests.java @@ -11,7 +11,7 @@ public final class PipelineRunInvokedByTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PipelineRunInvokedBy model = BinaryData.fromString( - "{\"name\":\"jplmagstcy\",\"id\":\"pfkyrkdbdgiogsj\",\"invokedByType\":\"nwqjnoba\",\"pipelineName\":\"hdd\",\"pipelineRunId\":\"acegfnmntf\"}") + "{\"name\":\"lmxhomdyn\",\"id\":\"wdigumbnraauz\",\"invokedByType\":\"tj\",\"pipelineName\":\"ysdzhez\",\"pipelineRunId\":\"vaiqyuvvf\"}") .toObject(PipelineRunInvokedBy.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsCancelWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsCancelWithResponseMockTests.java index 55035be1ef27f..adcd1d8e718a5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsCancelWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsCancelWithResponseMockTests.java @@ -6,47 +6,30 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class PipelineRunsCancelWithResponseMockTests { @Test public void testCancelWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.pipelineRuns().cancelWithResponse("vrfkxiixnxx", "vyizya", "xwegij", true, - com.azure.core.util.Context.NONE); + manager.pipelineRuns() + .cancelWithResponse("avrqvlu", "wwgfgtnoktilfdy", "urejvqcmpwwrpxth", true, + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsGetWithResponseMockTests.java index d3b206f1b7cd7..a9424dfb074ca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsGetWithResponseMockTests.java @@ -6,49 +6,32 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.PipelineRun; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class PipelineRunsGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"runId\":\"qoetckm\",\"runGroupId\":\"nsi\",\"isLatest\":false,\"pipelineName\":\"xhwvzdvujmukadz\",\"parameters\":{\"aecdcvhxwegdsmn\":\"xfqddadezn\"},\"runDimensions\":{\"ps\":\"vxel\",\"myftvejxmy\":\"bqpddypw\"},\"invokedBy\":{\"name\":\"ahhpnbvzdfyxjb\",\"id\":\"vnxwduu\",\"invokedByType\":\"vb\",\"pipelineName\":\"yedrkgrtda\",\"pipelineRunId\":\"itoimtar\"},\"lastUpdated\":\"2021-06-05T12:01:29Z\",\"runStart\":\"2021-11-06T10:40:45Z\",\"runEnd\":\"2021-08-24T21:24:23Z\",\"durationInMs\":1463714124,\"status\":\"pcduyhhz\",\"message\":\"z\",\"\":{\"jci\":\"datawkclloejshfcuzz\"}}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"runId\":\"myzoievdb\",\"runGroupId\":\"msndoxuzfapzqj\",\"isLatest\":true,\"pipelineName\":\"hlxq\",\"parameters\":{\"sftjrdilnxbtwm\":\"jgosvcoldyt\",\"mwwsya\":\"xledkjcysfluufvn\",\"ugrlnk\":\"hbikvcy\",\"ucflada\":\"ylgwceickwz\"},\"runDimensions\":{\"bmjzdtxolxzlx\":\"edbhnmbqro\",\"ohsitdlsem\":\"kyrrmrylt\"},\"invokedBy\":{\"name\":\"aakncigyqatvcwtp\",\"id\":\"cdda\",\"invokedByType\":\"ppcypjhnfld\",\"pipelineName\":\"jzvigszgfw\",\"pipelineRunId\":\"ztkldwaqftfbkd\"},\"lastUpdated\":\"2021-10-19T14:15:09Z\",\"runStart\":\"2021-03-29T20:28:40Z\",\"runEnd\":\"2021-05-20T13:13:37Z\",\"durationInMs\":403018461,\"status\":\"esasnpedvq\",\"message\":\"ljmvjojplxyolgsh\",\"\":{\"xfemixxqbrsmomr\":\"datayzizyl\",\"mjcqwgc\":\"datadq\"}}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PipelineRun response = manager.pipelineRuns() - .getWithResponse("hczqjoovypsgugh", "okbwzpxlxbv", "hkabeo", com.azure.core.util.Context.NONE).getValue(); + .getWithResponse("au", "bkihr", "sdsmmdpn", com.azure.core.util.Context.NONE) + .getValue(); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineTests.java index f93dd15f656c3..8e769bd558e84 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineTests.java @@ -28,60 +28,67 @@ public final class PipelineTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { Pipeline model = BinaryData.fromString( - "{\"description\":\"ybycnunvj\",\"activities\":[{\"type\":\"Activity\",\"name\":\"kfawnopqgikyz\",\"description\":\"txdyuxzejntpsew\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"kr\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Succeeded\"],\"\":{\"tbghhavgrvkf\":\"dataeoxorggufhyao\",\"mv\":\"dataovjzhpjbibgjmfx\",\"zzxscyhwzdgiruj\":\"datacluyovwxnbkf\",\"ujviylwdshfs\":\"datazbomvzzbtdcqvpni\"}}],\"userProperties\":[{\"name\":\"bgye\",\"value\":\"datarymsgaojfmw\"},{\"name\":\"cotmr\",\"value\":\"datahirctymoxoftpipi\"}],\"\":{\"cpqjlihhyu\":\"datazuhx\",\"x\":\"datapskasdvlmfwdg\",\"sreuzvxurisjnh\":\"datalucvpam\",\"blwpcesutrgj\":\"dataytxifqjzgxmrh\"}}],\"parameters\":{\"w\":{\"type\":\"Float\",\"defaultValue\":\"datatpwoqhihejq\"},\"xjvfoimwksl\":{\"type\":\"SecureString\",\"defaultValue\":\"datafqntcyp\"},\"ydfce\":{\"type\":\"String\",\"defaultValue\":\"dataizjx\"}},\"variables\":{\"mrtwna\":{\"type\":\"String\",\"defaultValue\":\"datavygdyft\"}},\"concurrency\":951831262,\"annotations\":[\"dataiw\",\"dataojgcyzt\",\"datafmznba\"],\"runDimensions\":{\"huwrykqgaifm\":\"datahchqnrnrpx\"},\"folder\":{\"name\":\"lb\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datahbejdznxcvdsrhnj\"}}}") + "{\"description\":\"ejqgw\",\"activities\":[{\"type\":\"qntcypsxjvfoimwk\",\"name\":\"lirc\",\"description\":\"jxv\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"vlhv\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Completed\"],\"\":{\"jslb\":\"datamrtwna\",\"aeqphchqnr\":\"datawkojgcyztsfmzn\",\"wrykqgai\":\"datarpxeh\"}},{\"activity\":\"mvikl\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"volvtn\":\"datahbejdznxcvdsrhnj\",\"fzg\":\"datav\",\"jltduceam\":\"datamjdftu\"}}],\"userProperties\":[{\"name\":\"zuo\",\"value\":\"dataejwcwwqiok\"}],\"\":{\"vk\":\"dataxmojmsvpkjp\"}}],\"parameters\":{\"dbsdshm\":{\"type\":\"Int\",\"defaultValue\":\"dataljyxgtczhe\"}},\"variables\":{\"baxk\":{\"type\":\"String\",\"defaultValue\":\"datahvbbxuripltfnh\"},\"yklyhpluodpvruud\":{\"type\":\"String\",\"defaultValue\":\"datawrck\"},\"eclze\":{\"type\":\"String\",\"defaultValue\":\"dataibthostgktstvd\"}},\"concurrency\":83422204,\"annotations\":[\"datahzlhplodqkdlww\"],\"runDimensions\":{\"lkxt\":\"datau\"},\"folder\":{\"name\":\"fsmlmbtxhwgfw\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datawcoezbrhub\"}}}") .toObject(Pipeline.class); - Assertions.assertEquals("ybycnunvj", model.description()); - Assertions.assertEquals("kfawnopqgikyz", model.activities().get(0).name()); - Assertions.assertEquals("txdyuxzejntpsew", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("kr", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals("ejqgw", model.description()); + Assertions.assertEquals("lirc", model.activities().get(0).name()); + Assertions.assertEquals("jxv", model.activities().get(0).description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.activities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("vlhv", model.activities().get(0).dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.SKIPPED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("bgye", model.activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("w").type()); - Assertions.assertEquals(VariableType.STRING, model.variables().get("mrtwna").type()); - Assertions.assertEquals(951831262, model.concurrency()); - Assertions.assertEquals("lb", model.folder().name()); + Assertions.assertEquals("zuo", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("dbsdshm").type()); + Assertions.assertEquals(VariableType.STRING, model.variables().get("baxk").type()); + Assertions.assertEquals(83422204, model.concurrency()); + Assertions.assertEquals("fsmlmbtxhwgfw", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Pipeline model = new Pipeline().withDescription("ybycnunvj") - .withActivities(Arrays.asList(new Activity().withName("kfawnopqgikyz").withDescription("txdyuxzejntpsew") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("kr") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("bgye").withValue("datarymsgaojfmw"), - new UserProperty().withName("cotmr").withValue("datahirctymoxoftpipi"))) - .withAdditionalProperties(mapOf("type", "Activity")))) - .withParameters(mapOf("w", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datatpwoqhihejq"), - "xjvfoimwksl", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datafqntcyp"), - "ydfce", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataizjx"))) - .withVariables(mapOf("mrtwna", - new VariableSpecification().withType(VariableType.STRING).withDefaultValue("datavygdyft"))) - .withConcurrency(951831262).withAnnotations(Arrays.asList("dataiw", "dataojgcyzt", "datafmznba")) - .withRunDimensions(mapOf("huwrykqgaifm", "datahchqnrnrpx")).withFolder(new PipelineFolder().withName("lb")) + Pipeline model = new Pipeline().withDescription("ejqgw") + .withActivities(Arrays.asList(new Activity().withName("lirc") + .withDescription("jxv") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("vlhv") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, + DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("mvikl") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("zuo").withValue("dataejwcwwqiok"))) + .withAdditionalProperties(mapOf("type", "qntcypsxjvfoimwk")))) + .withParameters(mapOf("dbsdshm", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataljyxgtczhe"))) + .withVariables(mapOf("baxk", + new VariableSpecification().withType(VariableType.STRING).withDefaultValue("datahvbbxuripltfnh"), + "yklyhpluodpvruud", + new VariableSpecification().withType(VariableType.STRING).withDefaultValue("datawrck"), "eclze", + new VariableSpecification().withType(VariableType.STRING).withDefaultValue("dataibthostgktstvd"))) + .withConcurrency(83422204) + .withAnnotations(Arrays.asList("datahzlhplodqkdlww")) + .withRunDimensions(mapOf("lkxt", "datau")) + .withFolder(new PipelineFolder().withName("fsmlmbtxhwgfw")) .withPolicy(new PipelinePolicy() - .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datahbejdznxcvdsrhnj"))); + .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datawcoezbrhub"))); model = BinaryData.fromObject(model).toObject(Pipeline.class); - Assertions.assertEquals("ybycnunvj", model.description()); - Assertions.assertEquals("kfawnopqgikyz", model.activities().get(0).name()); - Assertions.assertEquals("txdyuxzejntpsew", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("kr", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals("ejqgw", model.description()); + Assertions.assertEquals("lirc", model.activities().get(0).name()); + Assertions.assertEquals("jxv", model.activities().get(0).description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.activities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("vlhv", model.activities().get(0).dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.SKIPPED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("bgye", model.activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("w").type()); - Assertions.assertEquals(VariableType.STRING, model.variables().get("mrtwna").type()); - Assertions.assertEquals(951831262, model.concurrency()); - Assertions.assertEquals("lb", model.folder().name()); + Assertions.assertEquals("zuo", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("dbsdshm").type()); + Assertions.assertEquals(VariableType.STRING, model.variables().get("baxk").type()); + Assertions.assertEquals(83422204, model.concurrency()); + Assertions.assertEquals("fsmlmbtxhwgfw", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateOrUpdateWithResponseMockTests.java index 6e894411d2430..46ccd982bd588 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateOrUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateOrUpdateWithResponseMockTests.java @@ -6,11 +6,9 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.Activity; import com.azure.resourcemanager.datafactory.models.ActivityDependency; @@ -25,7 +23,6 @@ import com.azure.resourcemanager.datafactory.models.UserProperty; import com.azure.resourcemanager.datafactory.models.VariableSpecification; import com.azure.resourcemanager.datafactory.models.VariableType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.Arrays; @@ -33,105 +30,105 @@ import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class PipelinesCreateOrUpdateWithResponseMockTests { @Test public void testCreateOrUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"description\":\"howvnzwhypjpy\",\"activities\":[{\"type\":\"Activity\",\"name\":\"ptjpsbdche\",\"description\":\"c\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"szdwwkgbzmcprtan\",\"dependencyConditions\":[]},{\"activity\":\"gehbrhwkaatjsseb\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"sjlilpicc\",\"value\":\"dataegtwstqgcjvklnrz\"},{\"name\":\"afxoyddushvyj\",\"value\":\"datahy\"}],\"\":{\"dwbnou\":\"dataxzdw\",\"m\":\"datayznyeghm\"}},{\"type\":\"Activity\",\"name\":\"xdnckgdcszz\",\"description\":\"dfdzleazvldeqmf\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"wseftljzqfpfkd\",\"dependencyConditions\":[]},{\"activity\":\"bezaxi\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"pjxtobeq\",\"value\":\"datazcadoqijfll\"},{\"name\":\"muzeolcgqjtvpalk\",\"value\":\"datawvg\"}],\"\":{\"jpauic\":\"datatdswjtuqw\",\"ugebqhbbqody\":\"datajae\",\"aa\":\"datavpcoi\"}},{\"type\":\"Activity\",\"name\":\"vaecwwdqgooabhfr\",\"description\":\"p\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"fevwcxzxvgf\",\"dependencyConditions\":[]},{\"activity\":\"zc\",\"dependencyConditions\":[]},{\"activity\":\"qwqujpug\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"alhwbypvpds\",\"value\":\"dataycjuxabpuphg\"}],\"\":{\"gzwtl\":\"dataggkkjcizrstannmj\",\"parpsrjsghuokjwv\":\"datapzshgsidkz\",\"klyvzske\":\"dataacwdukhzu\"}},{\"type\":\"Activity\",\"name\":\"eva\",\"description\":\"brdrtjak\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"yzqhbuuldztv\",\"dependencyConditions\":[]},{\"activity\":\"mvxkrxgaiddg\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"hiqwuwxrcydmkyo\",\"value\":\"datajc\"},{\"name\":\"sbygm\",\"value\":\"datanxeyfkrcmxtbwolz\"},{\"name\":\"kjaqlszlymyqpw\",\"value\":\"databuwqmicukwmz\"},{\"name\":\"hcvrfq\",\"value\":\"datambuvtiwrmcym\"}],\"\":{\"dgtkedvxhqhp\":\"datakkaztu\",\"spafurttshruj\":\"datan\",\"mr\":\"datayxal\",\"apzingxbkcirio\":\"dataiina\"}}],\"parameters\":{\"hwiezcfxzjdpl\":{\"type\":\"Float\",\"defaultValue\":\"datauvg\"},\"qnftdl\":{\"type\":\"Float\",\"defaultValue\":\"datao\"}},\"variables\":{\"ffymrzoupipdkgpt\":{\"type\":\"String\",\"defaultValue\":\"datavlgahpztvl\"},\"zrbqpzgsr\":{\"type\":\"Array\",\"defaultValue\":\"datamscrtpznychw\"},\"nvwaxmeyjimf\":{\"type\":\"Array\",\"defaultValue\":\"datafyffytw\"},\"wufuvtypvwfllri\":{\"type\":\"String\",\"defaultValue\":\"datacqvuqwzajdxmai\"}},\"concurrency\":2036809837,\"annotations\":[\"datacgnphengk\",\"datahkekxohqvqp\",\"datazoqtvmkj\"],\"runDimensions\":{\"yvbyagqipr\":\"datalakvhgefvpc\",\"ijkwzjlk\":\"dataqdtcibb\",\"timtf\":\"datamoexughztr\"},\"folder\":{\"name\":\"lq\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datangfcbldpeforx\"}}},\"name\":\"pmzkdisrgykrcj\",\"type\":\"ivnfdovwkjeguvi\",\"etag\":\"xii\",\"\":{\"oylpmeccbblg\":\"datahfrb\",\"xcrxuyorhrtihzw\":\"datalej\"},\"id\":\"flwlmh\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"description\":\"tsgvtqvagklmgmld\",\"activities\":[{\"type\":\"rpxwpsmniixacyv\",\"name\":\"zlzdzgvfciptypm\",\"description\":\"ldghxwo\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"hem\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"ecwfnscehqkksjb\",\"value\":\"datatbvgfmrtxeij\"},{\"name\":\"qzcf\",\"value\":\"datakmwcwh\"},{\"name\":\"bacwbdxmb\",\"value\":\"dataecfjanfvrrd\"}],\"\":{\"ziadtzezravt\":\"datazyrwztra\",\"qewsyypy\":\"dataarqgakqbswyofx\",\"zxflrcbexxm\":\"datagzqlwbinzcr\"}},{\"type\":\"clapomzqbwh\",\"name\":\"qoh\",\"description\":\"izexzatrzpgld\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"woawbbrvogpr\",\"dependencyConditions\":[]},{\"activity\":\"rwzlbcel\",\"dependencyConditions\":[]},{\"activity\":\"s\",\"dependencyConditions\":[]},{\"activity\":\"qvfcgsw\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"msneg\",\"value\":\"datawomj\"},{\"name\":\"ig\",\"value\":\"datagsozmjshnelmq\"}],\"\":{\"w\":\"datagtlpddfuaw\"}}],\"parameters\":{\"wltyirppsykbxiv\":{\"type\":\"Bool\",\"defaultValue\":\"datajzfwmxdyb\"},\"uaqxrgxmfowex\":{\"type\":\"Bool\",\"defaultValue\":\"databqgoeiaoepczxcc\"}},\"variables\":{\"iw\":{\"type\":\"Array\",\"defaultValue\":\"datadiohshofodorvqvd\"},\"zcsw\":{\"type\":\"Array\",\"defaultValue\":\"dataldnn\"}},\"concurrency\":294111660,\"annotations\":[\"dataecz\"],\"runDimensions\":{\"gntvtraaijyqt\":\"dataoepjqainmr\",\"xba\":\"dataksqbkjuanrwhq\",\"cymeq\":\"datadfzpawvbt\",\"bcrcuqvmmaghaq\":\"datamigo\"},\"folder\":{\"name\":\"iuzzxzgtoycht\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"dataxwdeurn\"}}},\"name\":\"txw\",\"type\":\"xoynkgeiisjzjj\",\"etag\":\"chkjxm\",\"\":{\"dqbb\":\"datav\",\"qsrwhievbxozmd\":\"datarndwa\",\"g\":\"datafcecikqrkbl\",\"axwuyeudzyxfttf\":\"databunwosyvexcqoufs\"},\"id\":\"xluqrzi\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - PipelineResource response = manager.pipelines().define("ger").withExistingFactory("mkoxpay", "zqgsaegaah") - .withDescription("hgrgiu") + PipelineResource response = manager.pipelines() + .define("tikkotjhf") + .withExistingFactory("h", "aakdhzgzwb") + .withDescription("dvxb") .withActivities(Arrays.asList( - new Activity().withName("ykvo").withDescription("jixbrdamdnebko").withState(ActivityState.INACTIVE) + new Activity().withName("shugv") + .withDescription("ulydeigungk") + .withState(ActivityState.ACTIVE) .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("aihepjscseu") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ohspvbwjmt").withValue("dataavue"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("yotejljdrerzjwex").withDescription("r").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ddcxfuizo").withDependencyConditions(Arrays.asList()) + new ActivityDependency().withActivity("gwccqgdejupuo") + .withDependencyConditions(Arrays.asList()) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("zme").withDependencyConditions(Arrays.asList()) + new ActivityDependency().withActivity("pme") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("if").withValue("datat"), + new UserProperty().withName("yzfcybauwfr").withValue("dataznuljodv"), + new UserProperty().withName("pttts").withValue("datacpdtgz"))) + .withAdditionalProperties(mapOf("type", "vmqyplpyyzaoqfs")), + new Activity().withName("gjsfxiiiqivslpoe") + .withDescription("zahfswcbkjqlwfzl") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("qihhos") + .withDependencyConditions(Arrays.asList()) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("cjsneybpqot").withDependencyConditions(Arrays.asList()) + new ActivityDependency().withActivity("pebhv") + .withDependencyConditions(Arrays.asList()) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("db").withDependencyConditions(Arrays.asList()) + new ActivityDependency().withActivity("rsihdtphtfo") + .withDependencyConditions(Arrays.asList()) .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("s").withValue("datadvuptret"), - new UserProperty().withName("auupwt").withValue("datatpbi"), - new UserProperty().withName("ab").withValue("dataegcogyctekaaju"), - new UserProperty().withName("kxbgfed").withValue("datacbjsyorsojv"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("s").withDescription("ikbtz").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("xarqtkzeopoxd") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("pn").withValue("datatzbswf"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("predcl").withDescription("lvuzqsv").withState(ActivityState.INACTIVE) + .withUserProperties(Arrays.asList(new UserProperty().withName("r").withValue("datapn"), + new UserProperty().withName("mg").withValue("datajuvlffgurztd"), + new UserProperty().withName("oykqorwyykmlpi").withValue("datas"))) + .withAdditionalProperties(mapOf("type", "svwrcbmxmbutxtt")), + new Activity().withName("qmvezcdepikul") + .withDescription("mygpcobhvbxai") + .withState(ActivityState.ACTIVE) .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("a") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()))) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("i") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) .withUserProperties( - Arrays.asList(new UserProperty().withName("ymdrbmffcryy").withValue("datakwwhscubgwz"), - new UserProperty().withName("anplzbzcgzhd").withValue("datavk"), - new UserProperty().withName("zvhfog").withValue("dataoocnseoq"))) - .withAdditionalProperties(mapOf("type", "Activity")))) - .withParameters(mapOf("zguu", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataedifwdrr"), - "dlxqjshyyrcr", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataoohzifbbsncorini"), - "lt", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataqsfaurmqpkgwf"))) - .withVariables(mapOf("pxsy", - new VariableSpecification().withType(VariableType.ARRAY).withDefaultValue("datahhxlibdnpeamsl"), - "aulsmqohvcvv", new VariableSpecification().withType(VariableType.BOOL).withDefaultValue("datafvwrdy"))) - .withConcurrency(1111207450) - .withAnnotations(Arrays.asList("datagrphoabhkyas", "dataccwievjndvaf", "datacvn", "datayxlcgycvcspcfx")) - .withRunDimensions(mapOf("afmbxtncxbyurut", "dataqioq", "uv", "dataa", "vgjvumdznblkofd", - "datauweqbeygnetuvs", "fiwaklflwqdjzb", "datalrtlhpfu")) - .withFolder(new PipelineFolder().withName("byks")) + Arrays.asList(new UserProperty().withName("dpsfihggkixhz").withValue("datagwxvobhjgexenji"), + new UserProperty().withName("kruem").withValue("datahegshyzifn"), + new UserProperty().withName("qgwfulqfvzegxd").withValue("datakxxdymc"), + new UserProperty().withName("cuze").withValue("dataaq"))) + .withAdditionalProperties(mapOf("type", "opksetkfc")))) + .withParameters(mapOf("trinajirigjfaqz", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataxv"), + "mkdqmfwzkullgkxw", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datalkpp"), + "zbgwhwxlrlsdq", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataqhzqzr"), "trp", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataxsbyoogn"))) + .withVariables(mapOf("yboldmpzwcf", + new VariableSpecification().withType(VariableType.BOOL).withDefaultValue("datai"), "mfggiryrzjatfvq", + new VariableSpecification().withType(VariableType.BOOL).withDefaultValue("datartbuzlgkevp"), "utqy", + new VariableSpecification().withType(VariableType.BOOL).withDefaultValue("databpcsgfswgfdq"), "zsvpfj", + new VariableSpecification().withType(VariableType.BOOL).withDefaultValue("dataepogosktphc"))) + .withConcurrency(1513478816) + .withAnnotations(Arrays.asList("dataifrpj", "datakzvwdtylu", "datatvwwhufdoomhxl")) + .withRunDimensions(mapOf("xzw", "datajtpwdec")) + .withFolder(new PipelineFolder().withName("caq")) .withPolicy(new PipelinePolicy() - .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datayprrix"))) - .withIfMatch("ccotgqgevie").create(); + .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datazdzackskuec"))) + .withIfMatch("jzs") + .create(); - Assertions.assertEquals("flwlmh", response.id()); - Assertions.assertEquals("howvnzwhypjpy", response.description()); - Assertions.assertEquals("ptjpsbdche", response.activities().get(0).name()); - Assertions.assertEquals("c", response.activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, response.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, response.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("szdwwkgbzmcprtan", response.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("sjlilpicc", response.activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.FLOAT, response.parameters().get("hwiezcfxzjdpl").type()); - Assertions.assertEquals(VariableType.STRING, response.variables().get("ffymrzoupipdkgpt").type()); - Assertions.assertEquals(2036809837, response.concurrency()); - Assertions.assertEquals("lq", response.folder().name()); + Assertions.assertEquals("xluqrzi", response.id()); + Assertions.assertEquals("tsgvtqvagklmgmld", response.description()); + Assertions.assertEquals("zlzdzgvfciptypm", response.activities().get(0).name()); + Assertions.assertEquals("ldghxwo", response.activities().get(0).description()); + Assertions.assertEquals(ActivityState.INACTIVE, response.activities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, response.activities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("hem", response.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals("ecwfnscehqkksjb", response.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals(ParameterType.BOOL, response.parameters().get("wltyirppsykbxiv").type()); + Assertions.assertEquals(VariableType.ARRAY, response.variables().get("iw").type()); + Assertions.assertEquals(294111660, response.concurrency()); + Assertions.assertEquals("iuzzxzgtoycht", response.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateRunWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateRunWithResponseMockTests.java index 0fee93afab148..bc031735d8512 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateRunWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateRunWithResponseMockTests.java @@ -6,53 +6,38 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.CreateRunResponse; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.HashMap; import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class PipelinesCreateRunWithResponseMockTests { @Test public void testCreateRunWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - - String responseStr = "{\"runId\":\"hxjwiggca\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - CreateRunResponse response = manager.pipelines().createRunWithResponse("ksqiqzmgxunld", "k", "qcnjiwzqn", - "jkst", true, "mfnjuzvww", false, mapOf("jagehq", "datay"), com.azure.core.util.Context.NONE).getValue(); - - Assertions.assertEquals("hxjwiggca", response.runId()); + String responseStr = "{\"runId\":\"xibwhdhgenmefb\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + CreateRunResponse response = manager.pipelines() + .createRunWithResponse("ywnl", "mfizfovlufkyg", "yklw", "kdblpeutahm", false, "ersvplnab", true, + mapOf("zlbdbhfaso", "dataiktglaux", "n", "datavhqgkfcspodm", "woxtevonmq", "datakixithykgeqnfe"), + com.azure.core.util.Context.NONE) + .getValue(); + + Assertions.assertEquals("xibwhdhgenmefb", response.runId()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesDeleteWithResponseMockTests.java index e1f570af51061..791f9fabdb16a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesDeleteWithResponseMockTests.java @@ -6,46 +6,28 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class PipelinesDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.pipelines().deleteWithResponse("xvjvwk", "aqqkq", "ijyzhmf", com.azure.core.util.Context.NONE); + manager.pipelines().deleteWithResponse("txjrhatpebewli", "egdizjdagn", "hd", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesGetWithResponseMockTests.java index db59ad6346e2c..a150a5b14175f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesGetWithResponseMockTests.java @@ -6,67 +6,49 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; import com.azure.resourcemanager.datafactory.models.ActivityState; import com.azure.resourcemanager.datafactory.models.ParameterType; import com.azure.resourcemanager.datafactory.models.PipelineResource; import com.azure.resourcemanager.datafactory.models.VariableType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class PipelinesGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"description\":\"bu\",\"activities\":[{\"type\":\"Activity\",\"name\":\"btwgdlfgmuojn\",\"description\":\"wzlowusaddmj\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"zlgjcepxay\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"pavutiszwycle\",\"value\":\"dataagbjmwe\"}],\"\":{\"dwhmwxhvsp\":\"datapzamqbcitlyyph\",\"ucrynsqxyowwr\":\"datamokmymspatpvebxe\"}}],\"parameters\":{\"ugidk\":{\"type\":\"String\",\"defaultValue\":\"datawhrshlkswfpqrusx\"}},\"variables\":{\"kqqdqxslbrtt\":{\"type\":\"Array\",\"defaultValue\":\"datadtr\"},\"kjnbc\":{\"type\":\"Bool\",\"defaultValue\":\"datasktzrdxxsbbdo\"},\"cyhsbvair\":{\"type\":\"String\",\"defaultValue\":\"datae\"},\"wfajajpojzazzaks\":{\"type\":\"Array\",\"defaultValue\":\"dataxfbmqgimwivqphdd\"}},\"concurrency\":1823251390,\"annotations\":[\"datapcfsqdzi\",\"datavykysavevnerpyzu\",\"dataqvinvryxwzxjxotp\",\"dataiwm\"],\"runDimensions\":{\"kinsry\":\"dataxwokmdaihgig\"},\"folder\":{\"name\":\"bhpfpvadyxjcckhg\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datazvrqbzlmv\"}}},\"name\":\"fhduniqumpspo\",\"type\":\"tpnyzytgkd\",\"etag\":\"tmmv\",\"\":{\"xidytj\":\"dataqzfjlprljilpua\",\"yvrlycikwqtlg\":\"datakfxzgopckmmagfbr\",\"w\":\"dataryourl\",\"tsqmikljczxotb\":\"datajvsqzchysqy\"},\"id\":\"lpqfxyywsx\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"description\":\"hzmckdtwmsmhnzm\",\"activities\":[{\"type\":\"xkslofpguck\",\"name\":\"w\",\"description\":\"epqdcox\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"pgmugged\",\"dependencyConditions\":[]},{\"activity\":\"zuxm\",\"dependencyConditions\":[]},{\"activity\":\"eowaeagqmzxyi\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"fpidtf\",\"value\":\"datahctqxzktauqavo\"},{\"name\":\"lyntvou\",\"value\":\"datasqcmgusuaiquoda\"},{\"name\":\"liefhfjywms\",\"value\":\"datambdkv\"}],\"\":{\"bqceltf\":\"databmlpuzv\",\"hndqzuqdhuqlev\":\"datapzwyrtnkkjzj\",\"flk\":\"datayamdehdv\",\"eqfyuorqzlnek\":\"dataqxsadwkonnoul\"}},{\"type\":\"gsjyzdmsy\",\"name\":\"zabj\",\"description\":\"eftldjesxbov\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"jgwrn\",\"dependencyConditions\":[]},{\"activity\":\"dxab\",\"dependencyConditions\":[]},{\"activity\":\"xqpsjtipvszn\",\"dependencyConditions\":[]},{\"activity\":\"sbpueymimicjjy\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"pqtlgzu\",\"value\":\"dataktbbbv\"}],\"\":{\"kocdqo\":\"datamhc\",\"kbthmrfytxvxurmo\":\"datalbqkjqkgh\",\"qof\":\"dataj\"}}],\"parameters\":{\"nkugzchsxsnipnl\":{\"type\":\"Object\",\"defaultValue\":\"datakykb\"},\"oezathxzfqjwv\":{\"type\":\"SecureString\",\"defaultValue\":\"dataheukbjpldn\"},\"lpzwog\":{\"type\":\"Bool\",\"defaultValue\":\"dataxyrngirdvuvesep\"}},\"variables\":{\"ebcokrugr\":{\"type\":\"Bool\",\"defaultValue\":\"dataoqvuc\"},\"phme\":{\"type\":\"Array\",\"defaultValue\":\"datapuxvcypqt\"},\"fkg\":{\"type\":\"Bool\",\"defaultValue\":\"datac\"}},\"concurrency\":713633064,\"annotations\":[\"dataojvscobnre\",\"datarownnxq\"],\"runDimensions\":{\"qimaklsv\":\"dataalqlkimni\",\"dioadbcwvut\":\"dataggxlddkiwkmbvu\"},\"folder\":{\"name\":\"kkdydsj\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datanbxptew\"}}},\"name\":\"hiowdk\",\"type\":\"opgdwhhsd\",\"etag\":\"mufeq\",\"\":{\"rjeopih\":\"datazbglcfego\",\"ohhksremmboup\":\"datahoxusvwranek\"},\"id\":\"ywfzf\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PipelineResource response = manager.pipelines() - .getWithResponse("omqwfwtwjzzyiib", "klya", "lvhxutctakkd", "usasfjwtyvv", com.azure.core.util.Context.NONE) + .getWithResponse("aqambi", "kuh", "r", "xgsjbhlqpn", com.azure.core.util.Context.NONE) .getValue(); - Assertions.assertEquals("lpqfxyywsx", response.id()); - Assertions.assertEquals("bu", response.description()); - Assertions.assertEquals("btwgdlfgmuojn", response.activities().get(0).name()); - Assertions.assertEquals("wzlowusaddmj", response.activities().get(0).description()); + Assertions.assertEquals("ywfzf", response.id()); + Assertions.assertEquals("hzmckdtwmsmhnzm", response.description()); + Assertions.assertEquals("w", response.activities().get(0).name()); + Assertions.assertEquals("epqdcox", response.activities().get(0).description()); Assertions.assertEquals(ActivityState.INACTIVE, response.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, response.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("zlgjcepxay", response.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("pavutiszwycle", response.activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.STRING, response.parameters().get("ugidk").type()); - Assertions.assertEquals(VariableType.ARRAY, response.variables().get("kqqdqxslbrtt").type()); - Assertions.assertEquals(1823251390, response.concurrency()); - Assertions.assertEquals("bhpfpvadyxjcckhg", response.folder().name()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, response.activities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("pgmugged", response.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals("fpidtf", response.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals(ParameterType.OBJECT, response.parameters().get("nkugzchsxsnipnl").type()); + Assertions.assertEquals(VariableType.BOOL, response.variables().get("ebcokrugr").type()); + Assertions.assertEquals(713633064, response.concurrency()); + Assertions.assertEquals("kkdydsj", response.folder().name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesListByFactoryMockTests.java index 8d5ed35c54aca..43fac82c4e5d4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesListByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesListByFactoryMockTests.java @@ -6,70 +6,51 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; import com.azure.resourcemanager.datafactory.models.ActivityState; import com.azure.resourcemanager.datafactory.models.ParameterType; import com.azure.resourcemanager.datafactory.models.PipelineResource; import com.azure.resourcemanager.datafactory.models.VariableType; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class PipelinesListByFactoryMockTests { @Test public void testListByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"description\":\"jvrxyr\",\"activities\":[{\"type\":\"Activity\",\"name\":\"rsyckq\",\"description\":\"fmqhtr\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"hhl\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"wffrzgeobzmxzrai\",\"value\":\"datalz\"},{\"name\":\"r\",\"value\":\"datajpnxz\"},{\"name\":\"rc\",\"value\":\"datayysycey\"}],\"\":{\"ghppy\":\"datalxhymc\",\"ygtetmpw\":\"dataro\",\"mnoecfjw\":\"datalbqdxvxdfkdwk\",\"r\":\"datakiupgmdsz\"}},{\"type\":\"Activity\",\"name\":\"djxltjsm\",\"description\":\"ecr\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"is\",\"dependencyConditions\":[]},{\"activity\":\"goapxdmxwetkj\",\"dependencyConditions\":[]},{\"activity\":\"xekql\",\"dependencyConditions\":[]},{\"activity\":\"ctdxargqff\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"fbfqw\",\"value\":\"datalzbtqzjmi\"},{\"name\":\"dcsdfbkiwumrf\",\"value\":\"datajrptsjecqwdosbs\"}],\"\":{\"cpwrgry\":\"dataiacwdxvlku\"}},{\"type\":\"Activity\",\"name\":\"blrqeqcdikcqc\",\"description\":\"dtfthnjxid\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"xgxtccmq\",\"dependencyConditions\":[]},{\"activity\":\"kuqrrzuegin\",\"dependencyConditions\":[]},{\"activity\":\"lnteoapszxqnjxv\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"cqm\",\"value\":\"datajihmcgzwoijtlhx\"},{\"name\":\"sxxra\",\"value\":\"datagaicgqgafkrtsa\"},{\"name\":\"agvq\",\"value\":\"datatcr\"},{\"name\":\"fxqhnw\",\"value\":\"dataujrnfdqlzggvo\"}],\"\":{\"a\":\"dataiumrdbqujyijqc\",\"lbfknxz\":\"datanpsvgupqwqshmn\"}},{\"type\":\"Activity\",\"name\":\"suvjbfryortbre\",\"description\":\"ccpbtvgiokz\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"wawomkzussgj\",\"dependencyConditions\":[]},{\"activity\":\"ubggjdluwbmwu\",\"dependencyConditions\":[]},{\"activity\":\"bekzcmfibboz\",\"dependencyConditions\":[]},{\"activity\":\"ptwvamymswfwc\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"sopffhqxh\",\"value\":\"datacohrhwxvvom\"},{\"name\":\"jpjrxvsggaubrk\",\"value\":\"datafvlqwi\"},{\"name\":\"eoenpihtgigaeeq\",\"value\":\"datapvirozlfcc\"}],\"\":{\"p\":\"dataqixxxgltqldlhhqp\"}}],\"parameters\":{\"nogyvpfyjlfnjmwb\":{\"type\":\"String\",\"defaultValue\":\"datacajhnnbp\"},\"vzqbvdlhc\":{\"type\":\"Int\",\"defaultValue\":\"datahyprpw\"},\"jqz\":{\"type\":\"String\",\"defaultValue\":\"datakmpxtfcrugitjnw\"},\"nbkcqoy\":{\"type\":\"Float\",\"defaultValue\":\"datafea\"}},\"variables\":{\"tzlflqpanceow\":{\"type\":\"Bool\",\"defaultValue\":\"databfbzhczyhtj\"},\"qu\":{\"type\":\"Array\",\"defaultValue\":\"datazxluozmedaq\"}},\"concurrency\":1169190885,\"annotations\":[\"dataux\",\"databsl\",\"datausvbvtcti\",\"dataovfe\"],\"runDimensions\":{\"jjrhvdxfs\":\"datagbacmnj\",\"lsrx\":\"datambbj\"},\"folder\":{\"name\":\"kcvksv\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datarfnlhlfv\"}}},\"name\":\"ohyecblvpwuqq\",\"type\":\"fuuhm\",\"etag\":\"s\",\"\":{\"zvvrfplkemvvlge\":\"datarcxamgvi\",\"sh\":\"datay\",\"hxkifjvilazopwud\":\"dataplhwplyvqofpemc\",\"xaufowhmd\":\"dataew\"},\"id\":\"gaktugephqdoctg\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"properties\":{\"description\":\"zhhmshonnmbaott\",\"activities\":[{\"type\":\"l\",\"name\":\"on\",\"description\":\"zwfukjwvmmya\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"afz\",\"dependencyConditions\":[]},{\"activity\":\"xzyrzqnxuab\",\"dependencyConditions\":[]},{\"activity\":\"uerncgvjm\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"qruolmumz\",\"value\":\"datajhspy\"}],\"\":{\"ytbjfinhvl\":\"datajdmgzmpbfho\",\"nydehojqh\":\"datatdhsyarkzogovlth\",\"vapretiydlrj\":\"datas\"}},{\"type\":\"mp\",\"name\":\"vbrt\",\"description\":\"id\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"shv\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"kje\",\"value\":\"datadfsgkrr\"},{\"name\":\"ltgbbxghxaqds\",\"value\":\"datatbgslllc\"},{\"name\":\"fhrbqq\",\"value\":\"datamttxmghsbcoguer\"}],\"\":{\"xrdszpoewfanfejk\":\"datajbtytdxhgpjewqgy\",\"jswhoh\":\"datautnaavtjhikcp\"}}],\"parameters\":{\"wx\":{\"type\":\"Object\",\"defaultValue\":\"dataqvximgjksbpudjh\"},\"edxwbhnsbejq\":{\"type\":\"String\",\"defaultValue\":\"datavceudxhnwg\"},\"exdfwr\":{\"type\":\"Object\",\"defaultValue\":\"datalchy\"},\"kbuiuspbeq\":{\"type\":\"Object\",\"defaultValue\":\"datazpokgrr\"}},\"variables\":{\"anqdbufde\":{\"type\":\"Bool\",\"defaultValue\":\"dataahrfhxrvarvtb\"},\"kuvqnop\":{\"type\":\"Bool\",\"defaultValue\":\"datahxngqpbbybhjozn\"},\"myboj\":{\"type\":\"String\",\"defaultValue\":\"databvi\"},\"bfz\":{\"type\":\"Bool\",\"defaultValue\":\"datat\"}},\"concurrency\":725820457,\"annotations\":[\"datawzsazfzyrleih\",\"datand\"],\"runDimensions\":{\"ypj\":\"datamjxpjnvkp\",\"ffiiecignemlahz\":\"datamj\"},\"folder\":{\"name\":\"izsacmtcevwzai\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"dataugilyykwwot\"}}},\"name\":\"hqahfzaihgmpyd\",\"type\":\"vkolbepqjji\",\"etag\":\"cuyiklmcrtcsc\",\"\":{\"xjhqtihuos\":\"datapcwhyk\"},\"id\":\"sgccxwhcyd\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PagedIterable response - = manager.pipelines().listByFactory("tdvwmefjpoe", "lyvbvxlrltrztrlo", com.azure.core.util.Context.NONE); + = manager.pipelines().listByFactory("nwgchvgpunxnfo", "cc", com.azure.core.util.Context.NONE); - Assertions.assertEquals("gaktugephqdoctg", response.iterator().next().id()); - Assertions.assertEquals("jvrxyr", response.iterator().next().description()); - Assertions.assertEquals("rsyckq", response.iterator().next().activities().get(0).name()); - Assertions.assertEquals("fmqhtr", response.iterator().next().activities().get(0).description()); + Assertions.assertEquals("sgccxwhcyd", response.iterator().next().id()); + Assertions.assertEquals("zhhmshonnmbaott", response.iterator().next().description()); + Assertions.assertEquals("on", response.iterator().next().activities().get(0).name()); + Assertions.assertEquals("zwfukjwvmmya", response.iterator().next().activities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, response.iterator().next().activities().get(0).state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, response.iterator().next().activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("hhl", response.iterator().next().activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("wffrzgeobzmxzrai", + Assertions.assertEquals("afz", response.iterator().next().activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals("qruolmumz", response.iterator().next().activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.STRING, - response.iterator().next().parameters().get("nogyvpfyjlfnjmwb").type()); - Assertions.assertEquals(VariableType.BOOL, response.iterator().next().variables().get("tzlflqpanceow").type()); - Assertions.assertEquals(1169190885, response.iterator().next().concurrency()); - Assertions.assertEquals("kcvksv", response.iterator().next().folder().name()); + Assertions.assertEquals(ParameterType.OBJECT, response.iterator().next().parameters().get("wx").type()); + Assertions.assertEquals(VariableType.BOOL, response.iterator().next().variables().get("anqdbufde").type()); + Assertions.assertEquals(725820457, response.iterator().next().concurrency()); + Assertions.assertEquals("izsacmtcevwzai", response.iterator().next().folder().name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PolybaseSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PolybaseSettingsTests.java index 3a46a1c64b53a..e013bb8ec4da8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PolybaseSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PolybaseSettingsTests.java @@ -15,7 +15,7 @@ public final class PolybaseSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PolybaseSettings model = BinaryData.fromString( - "{\"rejectType\":\"value\",\"rejectValue\":\"datagrqzwypwhfybflrp\",\"rejectSampleValue\":\"datagqqxe\",\"useTypeDefault\":\"dataghpsqvuised\",\"\":{\"btpvwx\":\"datavfjkxxnqrqdx\",\"zss\":\"datalsvicvpagwohkro\",\"jpiezthflgpsal\":\"datamlozjyovrllvhbgk\",\"wzpfbiqjrz\":\"datanan\"}}") + "{\"rejectType\":\"value\",\"rejectValue\":\"datakscrsbrhkd\",\"rejectSampleValue\":\"dataaxoajg\",\"useTypeDefault\":\"datacachsojgagey\",\"\":{\"gfxwynzbeemlsrtg\":\"datakkzkzprjqb\",\"dglpu\":\"datagcmutuk\"}}") .toObject(PolybaseSettings.class); Assertions.assertEquals(PolybaseSettingsRejectType.VALUE, model.rejectType()); } @@ -23,8 +23,10 @@ public void testDeserialize() throws Exception { @org.junit.jupiter.api.Test public void testSerialize() throws Exception { PolybaseSettings model = new PolybaseSettings().withRejectType(PolybaseSettingsRejectType.VALUE) - .withRejectValue("datagrqzwypwhfybflrp").withRejectSampleValue("datagqqxe") - .withUseTypeDefault("dataghpsqvuised").withAdditionalProperties(mapOf()); + .withRejectValue("datakscrsbrhkd") + .withRejectSampleValue("dataaxoajg") + .withUseTypeDefault("datacachsojgagey") + .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(PolybaseSettings.class); Assertions.assertEquals(PolybaseSettingsRejectType.VALUE, model.rejectType()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlSourceTests.java index 73bf29740e6a6..fa701b874f14d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlSourceTests.java @@ -11,16 +11,19 @@ public final class PostgreSqlSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PostgreSqlSource model = BinaryData.fromString( - "{\"type\":\"PostgreSqlSource\",\"query\":\"datawtdazebif\",\"queryTimeout\":\"datanxugiorbwyeyrnbu\",\"additionalColumns\":\"dataabtow\",\"sourceRetryCount\":\"datauhlw\",\"sourceRetryWait\":\"datavzuxfsmfpd\",\"maxConcurrentConnections\":\"dataoqpzwfvnoy\",\"disableMetricsCollection\":\"datafjylhvp\",\"\":{\"jyavkyjvctq\":\"datarqwjtswemotjk\"}}") + "{\"type\":\"aeu\",\"query\":\"datanfbms\",\"queryTimeout\":\"dataerbdd\",\"additionalColumns\":\"datangdctmjz\",\"sourceRetryCount\":\"datagvheqzlqevas\",\"sourceRetryWait\":\"datagoodfh\",\"maxConcurrentConnections\":\"datauegdynyddp\",\"disableMetricsCollection\":\"dataizlzxht\",\"\":{\"znhol\":\"datalgm\",\"okuykrvxmobn\":\"dataflsjwawlkqmznkcw\"}}") .toObject(PostgreSqlSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PostgreSqlSource model - = new PostgreSqlSource().withSourceRetryCount("datauhlw").withSourceRetryWait("datavzuxfsmfpd") - .withMaxConcurrentConnections("dataoqpzwfvnoy").withDisableMetricsCollection("datafjylhvp") - .withQueryTimeout("datanxugiorbwyeyrnbu").withAdditionalColumns("dataabtow").withQuery("datawtdazebif"); + PostgreSqlSource model = new PostgreSqlSource().withSourceRetryCount("datagvheqzlqevas") + .withSourceRetryWait("datagoodfh") + .withMaxConcurrentConnections("datauegdynyddp") + .withDisableMetricsCollection("dataizlzxht") + .withQueryTimeout("dataerbdd") + .withAdditionalColumns("datangdctmjz") + .withQuery("datanfbms"); model = BinaryData.fromObject(model).toObject(PostgreSqlSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTests.java index 6bb974ef9af93..8aa8b7a5a03d3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTests.java @@ -19,31 +19,36 @@ public final class PostgreSqlTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PostgreSqlTableDataset model = BinaryData.fromString( - "{\"type\":\"PostgreSqlTable\",\"typeProperties\":{\"tableName\":\"databgwzhbhflj\",\"table\":\"dataod\",\"schema\":\"dataovnlhrwya\"},\"description\":\"uafapwxsvdeatjio\",\"structure\":\"datairgoextqdn\",\"schema\":\"datagntimz\",\"linkedServiceName\":{\"referenceName\":\"upbmtbsetkods\",\"parameters\":{\"jyvdhdgdiwmlg\":\"dataedaakghcrzmm\",\"fkakhgkrvtyycvy\":\"datatmfetqjisjmolzca\",\"ejqaw\":\"datav\",\"pbbimh\":\"datausqpfzxkczbd\"}},\"parameters\":{\"zl\":{\"type\":\"Float\",\"defaultValue\":\"dataoortclnhbjcy\"},\"lkv\":{\"type\":\"SecureString\",\"defaultValue\":\"datascibv\"}},\"annotations\":[\"dataafnwqh\"],\"folder\":{\"name\":\"cnviulby\"},\"\":{\"umwhmxpuck\":\"datajzrycwpb\"}}") + "{\"type\":\"fdyfao\",\"typeProperties\":{\"tableName\":\"datahfxa\",\"table\":\"datayihjcwwvaosckfa\",\"schema\":\"datakhpspww\"},\"description\":\"ng\",\"structure\":\"datanrdpuzlaihsujt\",\"schema\":\"dataybsz\",\"linkedServiceName\":{\"referenceName\":\"mijxejpd\",\"parameters\":{\"rnhhjtvh\":\"dataqwzutiye\",\"toqwrnfc\":\"datasznsyovqmqcud\",\"awfubkngejjxu\":\"datajthlokmx\"}},\"parameters\":{\"drsjtmn\":{\"type\":\"String\",\"defaultValue\":\"datajmoozmxuk\"},\"w\":{\"type\":\"Object\",\"defaultValue\":\"datao\"},\"tia\":{\"type\":\"String\",\"defaultValue\":\"datafdt\"}},\"annotations\":[\"datarnuhcfhepisqbc\",\"datalr\",\"datai\",\"datammemsoqgblcye\"],\"folder\":{\"name\":\"bobaqca\"},\"\":{\"wnlpjcxbjg\":\"datackcesrsi\"}}") .toObject(PostgreSqlTableDataset.class); - Assertions.assertEquals("uafapwxsvdeatjio", model.description()); - Assertions.assertEquals("upbmtbsetkods", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("zl").type()); - Assertions.assertEquals("cnviulby", model.folder().name()); + Assertions.assertEquals("ng", model.description()); + Assertions.assertEquals("mijxejpd", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("drsjtmn").type()); + Assertions.assertEquals("bobaqca", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PostgreSqlTableDataset model = new PostgreSqlTableDataset().withDescription("uafapwxsvdeatjio") - .withStructure("datairgoextqdn").withSchema("datagntimz") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("upbmtbsetkods") - .withParameters(mapOf("jyvdhdgdiwmlg", "dataedaakghcrzmm", "fkakhgkrvtyycvy", "datatmfetqjisjmolzca", - "ejqaw", "datav", "pbbimh", "datausqpfzxkczbd"))) - .withParameters(mapOf("zl", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataoortclnhbjcy"), "lkv", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datascibv"))) - .withAnnotations(Arrays.asList("dataafnwqh")).withFolder(new DatasetFolder().withName("cnviulby")) - .withTableName("databgwzhbhflj").withTable("dataod").withSchemaTypePropertiesSchema("dataovnlhrwya"); + PostgreSqlTableDataset model = new PostgreSqlTableDataset().withDescription("ng") + .withStructure("datanrdpuzlaihsujt") + .withSchema("dataybsz") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("mijxejpd") + .withParameters(mapOf("rnhhjtvh", "dataqwzutiye", "toqwrnfc", "datasznsyovqmqcud", "awfubkngejjxu", + "datajthlokmx"))) + .withParameters(mapOf("drsjtmn", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datajmoozmxuk"), "w", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datao"), "tia", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datafdt"))) + .withAnnotations(Arrays.asList("datarnuhcfhepisqbc", "datalr", "datai", "datammemsoqgblcye")) + .withFolder(new DatasetFolder().withName("bobaqca")) + .withTableName("datahfxa") + .withTable("datayihjcwwvaosckfa") + .withSchemaTypePropertiesSchema("datakhpspww"); model = BinaryData.fromObject(model).toObject(PostgreSqlTableDataset.class); - Assertions.assertEquals("uafapwxsvdeatjio", model.description()); - Assertions.assertEquals("upbmtbsetkods", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("zl").type()); - Assertions.assertEquals("cnviulby", model.folder().name()); + Assertions.assertEquals("ng", model.description()); + Assertions.assertEquals("mijxejpd", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("drsjtmn").type()); + Assertions.assertEquals("bobaqca", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTypePropertiesTests.java index 88934d2e0eba1..6274de935b116 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTypePropertiesTests.java @@ -10,15 +10,16 @@ public final class PostgreSqlTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - PostgreSqlTableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"dataastlpsmgo\",\"table\":\"datac\",\"schema\":\"datarvlvvjmx\"}") - .toObject(PostgreSqlTableDatasetTypeProperties.class); + PostgreSqlTableDatasetTypeProperties model = BinaryData + .fromString("{\"tableName\":\"datay\",\"table\":\"datayfr\",\"schema\":\"datadzfpsfyakidfhmlx\"}") + .toObject(PostgreSqlTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PostgreSqlTableDatasetTypeProperties model = new PostgreSqlTableDatasetTypeProperties() - .withTableName("dataastlpsmgo").withTable("datac").withSchema("datarvlvvjmx"); + PostgreSqlTableDatasetTypeProperties model = new PostgreSqlTableDatasetTypeProperties().withTableName("datay") + .withTable("datayfr") + .withSchema("datadzfpsfyakidfhmlx"); model = BinaryData.fromObject(model).toObject(PostgreSqlTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2SourceTests.java index b8d366a120752..218c1b397e75e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2SourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2SourceTests.java @@ -11,16 +11,19 @@ public final class PostgreSqlV2SourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PostgreSqlV2Source model = BinaryData.fromString( - "{\"type\":\"PostgreSqlV2Source\",\"query\":\"datacz\",\"queryTimeout\":\"datapaeyklxsvcbr\",\"additionalColumns\":\"datalt\",\"sourceRetryCount\":\"datamdsngoaofmrph\",\"sourceRetryWait\":\"datas\",\"maxConcurrentConnections\":\"dataunkcgdnhacex\",\"disableMetricsCollection\":\"dataomrrjooepfb\",\"\":{\"gntjmnlpklrjd\":\"dataffxan\",\"kvi\":\"datayp\",\"k\":\"datagnjatjbldgik\",\"akuzezwnqhcpk\":\"datawge\"}}") + "{\"type\":\"svmp\",\"query\":\"datamgnjmskuv\",\"queryTimeout\":\"datazpyptgwm\",\"additionalColumns\":\"datarhc\",\"sourceRetryCount\":\"datarzilvcncdazw\",\"sourceRetryWait\":\"datagoravovqpn\",\"maxConcurrentConnections\":\"dataufvggv\",\"disableMetricsCollection\":\"datamezfyelf\",\"\":{\"jzgyzjtmakhgat\":\"datakbhjdkq\",\"zoopzaenlzjx\":\"datankihbfixybtow\",\"wva\":\"datatgduwun\"}}") .toObject(PostgreSqlV2Source.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PostgreSqlV2Source model - = new PostgreSqlV2Source().withSourceRetryCount("datamdsngoaofmrph").withSourceRetryWait("datas") - .withMaxConcurrentConnections("dataunkcgdnhacex").withDisableMetricsCollection("dataomrrjooepfb") - .withQueryTimeout("datapaeyklxsvcbr").withAdditionalColumns("datalt").withQuery("datacz"); + PostgreSqlV2Source model = new PostgreSqlV2Source().withSourceRetryCount("datarzilvcncdazw") + .withSourceRetryWait("datagoravovqpn") + .withMaxConcurrentConnections("dataufvggv") + .withDisableMetricsCollection("datamezfyelf") + .withQueryTimeout("datazpyptgwm") + .withAdditionalColumns("datarhc") + .withQuery("datamgnjmskuv"); model = BinaryData.fromObject(model).toObject(PostgreSqlV2Source.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTests.java index 788f7cf69066e..7ed8c1294ce41 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTests.java @@ -19,32 +19,36 @@ public final class PostgreSqlV2TableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PostgreSqlV2TableDataset model = BinaryData.fromString( - "{\"type\":\"PostgreSqlV2Table\",\"typeProperties\":{\"table\":\"dataxamqecjrzvlcivqx\",\"schema\":\"datamklphxwww\"},\"description\":\"jkbgnfbr\",\"structure\":\"datavfsunhaevla\",\"schema\":\"dataczywywuahwc\",\"linkedServiceName\":{\"referenceName\":\"rewcnnaaxqjfda\",\"parameters\":{\"mggewdqbxexfy\":\"datagim\"}},\"parameters\":{\"yzqbye\":{\"type\":\"Bool\",\"defaultValue\":\"datasuqkslwsfx\"},\"kffydztkqrfbgy\":{\"type\":\"Object\",\"defaultValue\":\"datamohnrtl\"}},\"annotations\":[\"datawvzdte\",\"datajmyqxuhgka\",\"datanyprijyoxxjxbs\"],\"folder\":{\"name\":\"rlcck\"},\"\":{\"mqomxoalknuy\":\"datapjmspau\",\"zi\":\"datapvi\"}}") + "{\"type\":\"vvoydwedggwg\",\"typeProperties\":{\"table\":\"datai\",\"schema\":\"datajrgtruwpuqpsrce\"},\"description\":\"vbwa\",\"structure\":\"dataadrjbjng\",\"schema\":\"datar\",\"linkedServiceName\":{\"referenceName\":\"rdrwiem\",\"parameters\":{\"rxirq\":\"dataniapypi\",\"nytkmlfupjzc\":\"dataipzesstu\",\"ahxuerrdaktnytk\":\"dataxvzjoyxj\"}},\"parameters\":{\"fqjckmpwyvlhnhhc\":{\"type\":\"SecureString\",\"defaultValue\":\"datacvcp\"},\"lrcygotohzwto\":{\"type\":\"Int\",\"defaultValue\":\"datalebgjgylsacagi\"}},\"annotations\":[\"databxitrapwzhlutj\",\"datajzelsriemvupmea\",\"dataosysycv\",\"datade\"],\"folder\":{\"name\":\"bsaipusuofkegbvb\"},\"\":{\"f\":\"dataedfflzvsluazz\",\"ee\":\"dataveugpx\",\"scboxra\":\"datapup\"}}") .toObject(PostgreSqlV2TableDataset.class); - Assertions.assertEquals("jkbgnfbr", model.description()); - Assertions.assertEquals("rewcnnaaxqjfda", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("yzqbye").type()); - Assertions.assertEquals("rlcck", model.folder().name()); + Assertions.assertEquals("vbwa", model.description()); + Assertions.assertEquals("rdrwiem", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("fqjckmpwyvlhnhhc").type()); + Assertions.assertEquals("bsaipusuofkegbvb", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PostgreSqlV2TableDataset model = new PostgreSqlV2TableDataset().withDescription("jkbgnfbr") - .withStructure("datavfsunhaevla").withSchema("dataczywywuahwc") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rewcnnaaxqjfda") - .withParameters(mapOf("mggewdqbxexfy", "datagim"))) - .withParameters(mapOf("yzqbye", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datasuqkslwsfx"), - "kffydztkqrfbgy", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datamohnrtl"))) - .withAnnotations(Arrays.asList("datawvzdte", "datajmyqxuhgka", "datanyprijyoxxjxbs")) - .withFolder(new DatasetFolder().withName("rlcck")).withTable("dataxamqecjrzvlcivqx") - .withSchemaTypePropertiesSchema("datamklphxwww"); + PostgreSqlV2TableDataset model + = new PostgreSqlV2TableDataset().withDescription("vbwa") + .withStructure("dataadrjbjng") + .withSchema("datar") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rdrwiem") + .withParameters(mapOf("rxirq", "dataniapypi", "nytkmlfupjzc", "dataipzesstu", "ahxuerrdaktnytk", + "dataxvzjoyxj"))) + .withParameters(mapOf("fqjckmpwyvlhnhhc", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datacvcp"), + "lrcygotohzwto", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datalebgjgylsacagi"))) + .withAnnotations(Arrays.asList("databxitrapwzhlutj", "datajzelsriemvupmea", "dataosysycv", "datade")) + .withFolder(new DatasetFolder().withName("bsaipusuofkegbvb")) + .withTable("datai") + .withSchemaTypePropertiesSchema("datajrgtruwpuqpsrce"); model = BinaryData.fromObject(model).toObject(PostgreSqlV2TableDataset.class); - Assertions.assertEquals("jkbgnfbr", model.description()); - Assertions.assertEquals("rewcnnaaxqjfda", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("yzqbye").type()); - Assertions.assertEquals("rlcck", model.folder().name()); + Assertions.assertEquals("vbwa", model.description()); + Assertions.assertEquals("rdrwiem", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("fqjckmpwyvlhnhhc").type()); + Assertions.assertEquals("bsaipusuofkegbvb", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTypePropertiesTests.java index 59a8a0faa268c..c4290ffd09c3e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class PostgreSqlV2TableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PostgreSqlV2TableDatasetTypeProperties model - = BinaryData.fromString("{\"table\":\"datavic\",\"schema\":\"dataaptk\"}") + = BinaryData.fromString("{\"table\":\"dataczmrjgobe\",\"schema\":\"dataeheow\"}") .toObject(PostgreSqlV2TableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { PostgreSqlV2TableDatasetTypeProperties model - = new PostgreSqlV2TableDatasetTypeProperties().withTable("datavic").withSchema("dataaptk"); + = new PostgreSqlV2TableDatasetTypeProperties().withTable("dataczmrjgobe").withSchema("dataeheow"); model = BinaryData.fromObject(model).toObject(PostgreSqlV2TableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkMappingTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkMappingTests.java index 0a66b84716c60..9cbf63d5aa45b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkMappingTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkMappingTests.java @@ -20,101 +20,55 @@ public final class PowerQuerySinkMappingTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PowerQuerySinkMapping model = BinaryData.fromString( - "{\"queryName\":\"zecdomjrrolwr\",\"dataflowSinks\":[{\"script\":\"qxstyk\",\"schemaLinkedService\":{\"referenceName\":\"fqmgjexi\",\"parameters\":{\"rcnoexwar\":\"datavp\",\"ycevhazwew\":\"dataazfsrvz\",\"dcnxjfg\":\"dataobxlkdycsp\",\"uozwowwmulqgaeqn\":\"dataynuxvyalk\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"tqlbjezcwf\",\"parameters\":{\"epdvxmkzgrrg\":\"datakrzu\",\"sgebw\":\"datako\"}},\"name\":\"vuvq\",\"description\":\"plzdoamqkdwa\",\"dataset\":{\"referenceName\":\"yahurxtpuyuradf\",\"parameters\":{\"k\":\"dataounvfqykni\",\"htllkpkcqzbvyrv\":\"datadfwfzkocdjwjmrb\"}},\"linkedService\":{\"referenceName\":\"cbatmvxrji\",\"parameters\":{\"evfrbujltg\":\"dataosy\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"yl\",\"datasetParameters\":\"dataswwhbmo\",\"parameters\":{\"yqsds\":\"dataknsknnnpyobyi\",\"rijd\":\"dataewf\",\"iwebmcizmggvsxv\":\"datakmcrtmvtfeyopg\",\"erkyimcfmdhwtlli\":\"datawrqywaagzaxqh\"},\"\":{\"vjppujpldthshcj\":\"dataxcl\",\"wmzz\":\"dataoobltoargcntgqy\",\"mabuurtuqwvy\":\"datagbgvf\",\"ymdae\":\"dataxmuehfkb\"}}},{\"script\":\"jjqchhrnfavqefi\",\"schemaLinkedService\":{\"referenceName\":\"hbkxzqryovlhm\",\"parameters\":{\"fmyiwy\":\"databiagwu\",\"msyfjno\":\"datatau\",\"a\":\"dataeibcezdtf\",\"zcsff\":\"datalwllgjerql\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"u\",\"parameters\":{\"spk\":\"datateyh\",\"r\":\"datakvkmfkmchc\",\"fdbahxcwjqtfs\":\"datajvyosmxov\",\"uay\":\"datacakbezdvnez\"}},\"name\":\"ejwqeypaoa\",\"description\":\"kwhiyusjhmjlk\",\"dataset\":{\"referenceName\":\"wsjavmr\",\"parameters\":{\"ivkzgcqy\":\"datasvkjnlerm\",\"frbdlsjftqahfvpm\":\"datavfekjvclbkkj\"}},\"linkedService\":{\"referenceName\":\"mu\",\"parameters\":{\"knpbzktkw\":\"datankawkyhnnw\",\"j\":\"datatumjtgzutwpsnl\",\"fseykprgpqnesu\":\"datagrebbonjhm\",\"pwhldxb\":\"datansmtgza\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"tt\",\"datasetParameters\":\"datasgmugzssgzkevv\",\"parameters\":{\"abrhiao\":\"datavcxabzwehvsmt\",\"ziquirozqusdz\":\"dataldtkqoajpxtkraf\",\"siupdmbhau\":\"datanhhjdfy\",\"jkjxb\":\"datapwgteroaenvjou\"},\"\":{\"vogfmpdlm\":\"datazr\",\"nklbwyqoypodrq\":\"dataynl\",\"jerokbdkwvjond\":\"datajxmgxsp\"}}},{\"script\":\"iydj\",\"schemaLinkedService\":{\"referenceName\":\"djmylhcj\",\"parameters\":{\"pnlwuhtfa\":\"dataloyvrwz\",\"absqpttulhanjui\":\"dataafklbkigzvugwbc\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"xp\",\"parameters\":{\"ienzskok\":\"datadujuo\"}},\"name\":\"qbjncnbnlppuzbip\",\"description\":\"z\",\"dataset\":{\"referenceName\":\"yrkddpnobc\",\"parameters\":{\"kxcnovkwvzrxaix\":\"datamdswrtifxbhuz\",\"puuvzyfjmor\":\"dataigesbei\"}},\"linkedService\":{\"referenceName\":\"pjaktszr\",\"parameters\":{\"xrwgddgpqfflsw\":\"datarphtjljfmhgd\",\"fr\":\"dataeh\",\"evtykfxos\":\"datazqeinnbu\",\"uhfwklsthjvyk\":\"dataz\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"gwpulrtjweuorojr\",\"datasetParameters\":\"dataxswohshncddz\",\"parameters\":{\"lctgiontvgrloshk\":\"datatntdidhhacpw\",\"v\":\"datathui\"},\"\":{\"osfsfuzqpig\":\"datawswpwbgoetu\"}}},{\"script\":\"nmdeimagmw\",\"schemaLinkedService\":{\"referenceName\":\"xeu\",\"parameters\":{\"qqjcyhvyr\":\"datatkllbfnnhert\",\"expcxylquowun\":\"datageuvujywldkjay\",\"wseulfzxgh\":\"dataactjp\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"lrvpaumkz\",\"parameters\":{\"pjn\":\"datangkfipxolpujlm\",\"rmevkckoce\":\"datalsbxvouxcdenthgp\",\"arohw\":\"datahlvfniryh\",\"bamnkgmosayfyvod\":\"dataxjhzwsjqrmxpyz\"}},\"name\":\"tpczzqusf\",\"description\":\"wsrrrgijolyspky\",\"dataset\":{\"referenceName\":\"yaej\",\"parameters\":{\"ecccfycywcuhqfxf\":\"datafkkhgqs\",\"st\":\"datarfzaurermnyphcoo\"}},\"linkedService\":{\"referenceName\":\"bzrnvu\",\"parameters\":{\"cczz\":\"datajyttgkpscm\",\"mgezkb\":\"datala\",\"dngbgpxoe\":\"datairftlomec\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"efavbsbhd\",\"datasetParameters\":\"dataaqafalbkem\",\"parameters\":{\"brrkvxmeihrzi\":\"datavdhvdv\"},\"\":{\"dsu\":\"datanpojmgkeoqrx\",\"kbvaxehiegk\":\"dataonjuwgvse\",\"i\":\"dataukvalcvlbqht\"}}}]}") + "{\"queryName\":\"rx\",\"dataflowSinks\":[{\"script\":\"ptvr\",\"schemaLinkedService\":{\"referenceName\":\"cpr\",\"parameters\":{\"hdnx\":\"dataswozpm\",\"n\":\"datakfesursbyfoavozq\",\"kzchcmuvskdvqyf\":\"datamxitvmrq\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"wxcabfrvjpfojh\",\"parameters\":{\"dxfvjdfusuwght\":\"datadoqyohzhundfk\"}},\"name\":\"pgarhf\",\"description\":\"dedi\",\"dataset\":{\"referenceName\":\"dpc\",\"parameters\":{\"sdzfle\":\"datapmw\"}},\"linkedService\":{\"referenceName\":\"fsgrheak\",\"parameters\":{\"vpbjclih\":\"dataukmnu\",\"mjqjoamzdsa\":\"datazriigteqyp\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"tkqbvtdeouqixgtp\",\"datasetParameters\":\"databjevjjjuwdv\",\"parameters\":{\"ucobpkphxh\":\"datalbfrch\"},\"\":{\"umjysukezqohthsm\":\"dataekxbc\",\"yuwuypou\":\"datauaoyp\",\"dfreyrgrgft\":\"datapdclajujso\"}}}]}") .toObject(PowerQuerySinkMapping.class); - Assertions.assertEquals("zecdomjrrolwr", model.queryName()); - Assertions.assertEquals("vuvq", model.dataflowSinks().get(0).name()); - Assertions.assertEquals("plzdoamqkdwa", model.dataflowSinks().get(0).description()); - Assertions.assertEquals("yahurxtpuyuradf", model.dataflowSinks().get(0).dataset().referenceName()); - Assertions.assertEquals("cbatmvxrji", model.dataflowSinks().get(0).linkedService().referenceName()); + Assertions.assertEquals("rx", model.queryName()); + Assertions.assertEquals("pgarhf", model.dataflowSinks().get(0).name()); + Assertions.assertEquals("dedi", model.dataflowSinks().get(0).description()); + Assertions.assertEquals("dpc", model.dataflowSinks().get(0).dataset().referenceName()); + Assertions.assertEquals("fsgrheak", model.dataflowSinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.dataflowSinks().get(0).flowlet().type()); - Assertions.assertEquals("yl", model.dataflowSinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("fqmgjexi", model.dataflowSinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("tqlbjezcwf", model.dataflowSinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("qxstyk", model.dataflowSinks().get(0).script()); + Assertions.assertEquals("tkqbvtdeouqixgtp", model.dataflowSinks().get(0).flowlet().referenceName()); + Assertions.assertEquals("cpr", model.dataflowSinks().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("wxcabfrvjpfojh", + model.dataflowSinks().get(0).rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("ptvr", model.dataflowSinks().get(0).script()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PowerQuerySinkMapping model = new PowerQuerySinkMapping().withQueryName("zecdomjrrolwr") - .withDataflowSinks(Arrays.asList( - new PowerQuerySink().withName("vuvq").withDescription("plzdoamqkdwa") - .withDataset(new DatasetReference().withReferenceName("yahurxtpuyuradf") - .withParameters(mapOf("k", "dataounvfqykni", "htllkpkcqzbvyrv", "datadfwfzkocdjwjmrb"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("cbatmvxrji") - .withParameters(mapOf("evfrbujltg", "dataosy"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("yl").withDatasetParameters("dataswwhbmo") - .withParameters(mapOf("yqsds", "dataknsknnnpyobyi", "rijd", "dataewf", "iwebmcizmggvsxv", - "datakmcrtmvtfeyopg", "erkyimcfmdhwtlli", "datawrqywaagzaxqh")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("fqmgjexi") - .withParameters(mapOf("rcnoexwar", "datavp", "ycevhazwew", "dataazfsrvz", "dcnxjfg", - "dataobxlkdycsp", "uozwowwmulqgaeqn", "dataynuxvyalk"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("tqlbjezcwf") - .withParameters(mapOf("epdvxmkzgrrg", "datakrzu", "sgebw", "datako"))) - .withScript("qxstyk"), - new PowerQuerySink().withName("ejwqeypaoa").withDescription("kwhiyusjhmjlk") - .withDataset(new DatasetReference().withReferenceName("wsjavmr") - .withParameters(mapOf("ivkzgcqy", "datasvkjnlerm", "frbdlsjftqahfvpm", "datavfekjvclbkkj"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("mu") - .withParameters(mapOf("knpbzktkw", "datankawkyhnnw", "j", "datatumjtgzutwpsnl", - "fseykprgpqnesu", "datagrebbonjhm", "pwhldxb", "datansmtgza"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("tt").withDatasetParameters("datasgmugzssgzkevv") - .withParameters(mapOf("abrhiao", "datavcxabzwehvsmt", "ziquirozqusdz", "dataldtkqoajpxtkraf", - "siupdmbhau", "datanhhjdfy", "jkjxb", "datapwgteroaenvjou")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("hbkxzqryovlhm") - .withParameters(mapOf("fmyiwy", "databiagwu", "msyfjno", "datatau", "a", "dataeibcezdtf", - "zcsff", "datalwllgjerql"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("u") - .withParameters(mapOf("spk", "datateyh", "r", "datakvkmfkmchc", "fdbahxcwjqtfs", - "datajvyosmxov", "uay", "datacakbezdvnez"))) - .withScript("jjqchhrnfavqefi"), - new PowerQuerySink().withName("qbjncnbnlppuzbip").withDescription("z") - .withDataset(new DatasetReference().withReferenceName("yrkddpnobc") - .withParameters(mapOf("kxcnovkwvzrxaix", "datamdswrtifxbhuz", "puuvzyfjmor", "dataigesbei"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("pjaktszr") - .withParameters(mapOf("xrwgddgpqfflsw", "datarphtjljfmhgd", "fr", "dataeh", "evtykfxos", - "datazqeinnbu", "uhfwklsthjvyk", "dataz"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("gwpulrtjweuorojr").withDatasetParameters("dataxswohshncddz") - .withParameters(mapOf("lctgiontvgrloshk", "datatntdidhhacpw", "v", "datathui")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("djmylhcj") - .withParameters(mapOf("pnlwuhtfa", "dataloyvrwz", "absqpttulhanjui", "dataafklbkigzvugwbc"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("xp") - .withParameters(mapOf("ienzskok", "datadujuo"))) - .withScript("iydj"), - new PowerQuerySink().withName("tpczzqusf").withDescription("wsrrrgijolyspky") - .withDataset(new DatasetReference().withReferenceName("yaej") - .withParameters(mapOf("ecccfycywcuhqfxf", "datafkkhgqs", "st", "datarfzaurermnyphcoo"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("bzrnvu").withParameters( - mapOf("cczz", "datajyttgkpscm", "mgezkb", "datala", "dngbgpxoe", "datairftlomec"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("efavbsbhd").withDatasetParameters("dataaqafalbkem") - .withParameters(mapOf("brrkvxmeihrzi", "datavdhvdv")).withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("xeu") - .withParameters(mapOf("qqjcyhvyr", "datatkllbfnnhert", "expcxylquowun", "datageuvujywldkjay", - "wseulfzxgh", "dataactjp"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("lrvpaumkz") - .withParameters(mapOf("pjn", "datangkfipxolpujlm", "rmevkckoce", "datalsbxvouxcdenthgp", - "arohw", "datahlvfniryh", "bamnkgmosayfyvod", "dataxjhzwsjqrmxpyz"))) - .withScript("nmdeimagmw"))); + PowerQuerySinkMapping model = new PowerQuerySinkMapping().withQueryName("rx") + .withDataflowSinks(Arrays.asList(new PowerQuerySink().withName("pgarhf") + .withDescription("dedi") + .withDataset(new DatasetReference().withReferenceName("dpc").withParameters(mapOf("sdzfle", "datapmw"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("fsgrheak") + .withParameters(mapOf("vpbjclih", "dataukmnu", "mjqjoamzdsa", "datazriigteqyp"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("tkqbvtdeouqixgtp") + .withDatasetParameters("databjevjjjuwdv") + .withParameters(mapOf("ucobpkphxh", "datalbfrch")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService( + new LinkedServiceReference().withReferenceName("cpr") + .withParameters(mapOf("hdnx", "dataswozpm", "n", "datakfesursbyfoavozq", "kzchcmuvskdvqyf", + "datamxitvmrq"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("wxcabfrvjpfojh") + .withParameters(mapOf("dxfvjdfusuwght", "datadoqyohzhundfk"))) + .withScript("ptvr"))); model = BinaryData.fromObject(model).toObject(PowerQuerySinkMapping.class); - Assertions.assertEquals("zecdomjrrolwr", model.queryName()); - Assertions.assertEquals("vuvq", model.dataflowSinks().get(0).name()); - Assertions.assertEquals("plzdoamqkdwa", model.dataflowSinks().get(0).description()); - Assertions.assertEquals("yahurxtpuyuradf", model.dataflowSinks().get(0).dataset().referenceName()); - Assertions.assertEquals("cbatmvxrji", model.dataflowSinks().get(0).linkedService().referenceName()); + Assertions.assertEquals("rx", model.queryName()); + Assertions.assertEquals("pgarhf", model.dataflowSinks().get(0).name()); + Assertions.assertEquals("dedi", model.dataflowSinks().get(0).description()); + Assertions.assertEquals("dpc", model.dataflowSinks().get(0).dataset().referenceName()); + Assertions.assertEquals("fsgrheak", model.dataflowSinks().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.dataflowSinks().get(0).flowlet().type()); - Assertions.assertEquals("yl", model.dataflowSinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("fqmgjexi", model.dataflowSinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("tqlbjezcwf", model.dataflowSinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("qxstyk", model.dataflowSinks().get(0).script()); + Assertions.assertEquals("tkqbvtdeouqixgtp", model.dataflowSinks().get(0).flowlet().referenceName()); + Assertions.assertEquals("cpr", model.dataflowSinks().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("wxcabfrvjpfojh", + model.dataflowSinks().get(0).rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("ptvr", model.dataflowSinks().get(0).script()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkTests.java index 066d94003a754..741aa17910519 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkTests.java @@ -18,45 +18,50 @@ public final class PowerQuerySinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PowerQuerySink model = BinaryData.fromString( - "{\"script\":\"oykdno\",\"schemaLinkedService\":{\"referenceName\":\"axwmgzru\",\"parameters\":{\"cbgvsbt\":\"datacwnynlleiq\",\"xmnrqstjcmet\":\"dataertoxadhxuvj\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"l\",\"parameters\":{\"gjnaqyqipsl\":\"datavnpvvd\",\"tfo\":\"datamvcdsvmwbitek\"}},\"name\":\"vfiybxqichgyb\",\"description\":\"dqekivycpzcvd\",\"dataset\":{\"referenceName\":\"ulrqtbht\",\"parameters\":{\"ryfmxmdu\":\"datapzl\",\"giln\":\"datazf\"}},\"linkedService\":{\"referenceName\":\"dccgndjgdpriggqq\",\"parameters\":{\"buu\":\"dataf\",\"igi\":\"datapyuflqjfshtujcyo\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"csklkqnq\",\"datasetParameters\":\"dataixnmbz\",\"parameters\":{\"mbzayspzvrietv\":\"datauyrzw\",\"dvatlzmgschn\":\"dataphmdzxplgtp\"},\"\":{\"vlzdmnfm\":\"databkkz\"}}}") + "{\"script\":\"ygmgsevm\",\"schemaLinkedService\":{\"referenceName\":\"zenlrstgfcz\",\"parameters\":{\"idt\":\"datan\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"amyyznmrgcdogcv\",\"parameters\":{\"aqm\":\"dataytoxuwhttnzq\",\"dzzmssgpgv\":\"databgszplusdek\",\"squnycwztlv\":\"datakyejidbdq\"}},\"name\":\"wsnmrkkyjt\",\"description\":\"pwpwfkcauxuva\",\"dataset\":{\"referenceName\":\"pfpdof\",\"parameters\":{\"evsolzwilf\":\"dataclbtxl\",\"uuxjxhfx\":\"datagojfsqe\",\"mbmxbmbrwgzzxl\":\"datajwpdkkt\",\"dmhweqjfyxydgto\":\"databkhxsdplaum\"}},\"linkedService\":{\"referenceName\":\"qbvwglgw\",\"parameters\":{\"mqiydvxcgdh\":\"dataxakglhpsesrfg\",\"cbqp\":\"datahgoqgsoyqyxyj\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"pglqjo\",\"datasetParameters\":\"datadahneaoovty\",\"parameters\":{\"fsr\":\"dataivfwjlofze\",\"jvcfoczh\":\"dataaukl\"},\"\":{\"yrza\":\"datajovtkwxnhwhhn\"}}}") .toObject(PowerQuerySink.class); - Assertions.assertEquals("vfiybxqichgyb", model.name()); - Assertions.assertEquals("dqekivycpzcvd", model.description()); - Assertions.assertEquals("ulrqtbht", model.dataset().referenceName()); - Assertions.assertEquals("dccgndjgdpriggqq", model.linkedService().referenceName()); + Assertions.assertEquals("wsnmrkkyjt", model.name()); + Assertions.assertEquals("pwpwfkcauxuva", model.description()); + Assertions.assertEquals("pfpdof", model.dataset().referenceName()); + Assertions.assertEquals("qbvwglgw", model.linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("csklkqnq", model.flowlet().referenceName()); - Assertions.assertEquals("axwmgzru", model.schemaLinkedService().referenceName()); - Assertions.assertEquals("l", model.rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("oykdno", model.script()); + Assertions.assertEquals("pglqjo", model.flowlet().referenceName()); + Assertions.assertEquals("zenlrstgfcz", model.schemaLinkedService().referenceName()); + Assertions.assertEquals("amyyznmrgcdogcv", model.rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("ygmgsevm", model.script()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PowerQuerySink model = new PowerQuerySink().withName("vfiybxqichgyb").withDescription("dqekivycpzcvd") - .withDataset(new DatasetReference().withReferenceName("ulrqtbht") - .withParameters(mapOf("ryfmxmdu", "datapzl", "giln", "datazf"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("dccgndjgdpriggqq") - .withParameters(mapOf("buu", "dataf", "igi", "datapyuflqjfshtujcyo"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("csklkqnq").withDatasetParameters("dataixnmbz") - .withParameters(mapOf("mbzayspzvrietv", "datauyrzw", "dvatlzmgschn", "dataphmdzxplgtp")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("axwmgzru") - .withParameters(mapOf("cbgvsbt", "datacwnynlleiq", "xmnrqstjcmet", "dataertoxadhxuvj"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("l") - .withParameters(mapOf("gjnaqyqipsl", "datavnpvvd", "tfo", "datamvcdsvmwbitek"))) - .withScript("oykdno"); + PowerQuerySink model + = new PowerQuerySink().withName("wsnmrkkyjt") + .withDescription("pwpwfkcauxuva") + .withDataset(new DatasetReference().withReferenceName("pfpdof") + .withParameters(mapOf("evsolzwilf", "dataclbtxl", "uuxjxhfx", "datagojfsqe", "mbmxbmbrwgzzxl", + "datajwpdkkt", "dmhweqjfyxydgto", "databkhxsdplaum"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("qbvwglgw") + .withParameters(mapOf("mqiydvxcgdh", "dataxakglhpsesrfg", "cbqp", "datahgoqgsoyqyxyj"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("pglqjo") + .withDatasetParameters("datadahneaoovty") + .withParameters(mapOf("fsr", "dataivfwjlofze", "jvcfoczh", "dataaukl")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService( + new LinkedServiceReference().withReferenceName("zenlrstgfcz").withParameters(mapOf("idt", "datan"))) + .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("amyyznmrgcdogcv") + .withParameters(mapOf("aqm", "dataytoxuwhttnzq", "dzzmssgpgv", "databgszplusdek", "squnycwztlv", + "datakyejidbdq"))) + .withScript("ygmgsevm"); model = BinaryData.fromObject(model).toObject(PowerQuerySink.class); - Assertions.assertEquals("vfiybxqichgyb", model.name()); - Assertions.assertEquals("dqekivycpzcvd", model.description()); - Assertions.assertEquals("ulrqtbht", model.dataset().referenceName()); - Assertions.assertEquals("dccgndjgdpriggqq", model.linkedService().referenceName()); + Assertions.assertEquals("wsnmrkkyjt", model.name()); + Assertions.assertEquals("pwpwfkcauxuva", model.description()); + Assertions.assertEquals("pfpdof", model.dataset().referenceName()); + Assertions.assertEquals("qbvwglgw", model.linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("csklkqnq", model.flowlet().referenceName()); - Assertions.assertEquals("axwmgzru", model.schemaLinkedService().referenceName()); - Assertions.assertEquals("l", model.rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("oykdno", model.script()); + Assertions.assertEquals("pglqjo", model.flowlet().referenceName()); + Assertions.assertEquals("zenlrstgfcz", model.schemaLinkedService().referenceName()); + Assertions.assertEquals("amyyznmrgcdogcv", model.rejectedDataLinkedService().referenceName()); + Assertions.assertEquals("ygmgsevm", model.script()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySourceTests.java index 30d15895235b9..3150a08e705da 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySourceTests.java @@ -18,41 +18,44 @@ public final class PowerQuerySourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PowerQuerySource model = BinaryData.fromString( - "{\"script\":\"wqikwepwogggic\",\"schemaLinkedService\":{\"referenceName\":\"xhtfmcqbsudzpgch\",\"parameters\":{\"djxh\":\"datafqum\",\"v\":\"dataghgodkynrceq\",\"mmxjdnajuopj\":\"datadbd\",\"cwlo\":\"datayqmkwlh\"}},\"name\":\"dejkluxxrwzobuz\",\"description\":\"xga\",\"dataset\":{\"referenceName\":\"dtkwppth\",\"parameters\":{\"imrljdp\":\"datapr\",\"mhk\":\"dataqfxy\",\"loamfmxtllfltym\":\"datatbaewhte\",\"rfijhggabq\":\"datacn\"}},\"linkedService\":{\"referenceName\":\"amklilirwdv\",\"parameters\":{\"jxrdfd\":\"datasdpzouhktqrxqwq\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"llygta\",\"datasetParameters\":\"datazcxdfweapyfmlxrl\",\"parameters\":{\"x\":\"dataraspifleim\"},\"\":{\"cehfgsm\":\"databg\",\"mtznpaxwfqtyyqi\":\"datarjuqbpxtokl\",\"i\":\"datarcltungbsoljckm\",\"iiqqcqikclsmalns\":\"datazbkuckgkdsksw\"}}}") + "{\"script\":\"krehyhtmj\",\"schemaLinkedService\":{\"referenceName\":\"uif\",\"parameters\":{\"gdezvjqwahoy\":\"dataxfaryrvj\",\"nvovbooqbmdqrxy\":\"datayaxqvjweiwtczkd\"}},\"name\":\"laetscflwtjdtlr\",\"description\":\"fooy\",\"dataset\":{\"referenceName\":\"uxdtzcq\",\"parameters\":{\"lantolamlb\":\"datadudgcozzomeh\",\"z\":\"datajuxkqll\"}},\"linkedService\":{\"referenceName\":\"vwdtgckzdqiqdl\",\"parameters\":{\"uwxsuykznhrfgsl\":\"datarkwxo\",\"eo\":\"datalhpryjfzihuio\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"jtfeyvkbdgddkr\",\"datasetParameters\":\"dataccxbeuuqutkz\",\"parameters\":{\"xfn\":\"datawwguzytij\",\"ckcatuqbhpo\":\"datandegjdydhqkkkbj\",\"toakatprytgrhz\":\"datacnxtpzdlysei\",\"wrp\":\"dataqfdpf\"},\"\":{\"tbqgdirdamm\":\"datashtkutzc\",\"bwjjirmuydgf\":\"datazjgcfjfx\",\"uoft\":\"datatmdofgeoag\",\"aj\":\"dataxodwxm\"}}}") .toObject(PowerQuerySource.class); - Assertions.assertEquals("dejkluxxrwzobuz", model.name()); - Assertions.assertEquals("xga", model.description()); - Assertions.assertEquals("dtkwppth", model.dataset().referenceName()); - Assertions.assertEquals("amklilirwdv", model.linkedService().referenceName()); + Assertions.assertEquals("laetscflwtjdtlr", model.name()); + Assertions.assertEquals("fooy", model.description()); + Assertions.assertEquals("uxdtzcq", model.dataset().referenceName()); + Assertions.assertEquals("vwdtgckzdqiqdl", model.linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("llygta", model.flowlet().referenceName()); - Assertions.assertEquals("xhtfmcqbsudzpgch", model.schemaLinkedService().referenceName()); - Assertions.assertEquals("wqikwepwogggic", model.script()); + Assertions.assertEquals("jtfeyvkbdgddkr", model.flowlet().referenceName()); + Assertions.assertEquals("uif", model.schemaLinkedService().referenceName()); + Assertions.assertEquals("krehyhtmj", model.script()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PowerQuerySource model = new PowerQuerySource().withName("dejkluxxrwzobuz").withDescription("xga") - .withDataset(new DatasetReference().withReferenceName("dtkwppth") - .withParameters(mapOf("imrljdp", "datapr", "mhk", "dataqfxy", "loamfmxtllfltym", "datatbaewhte", - "rfijhggabq", "datacn"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("amklilirwdv") - .withParameters(mapOf("jxrdfd", "datasdpzouhktqrxqwq"))) + PowerQuerySource model = new PowerQuerySource().withName("laetscflwtjdtlr") + .withDescription("fooy") + .withDataset(new DatasetReference().withReferenceName("uxdtzcq") + .withParameters(mapOf("lantolamlb", "datadudgcozzomeh", "z", "datajuxkqll"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("vwdtgckzdqiqdl") + .withParameters(mapOf("uwxsuykznhrfgsl", "datarkwxo", "eo", "datalhpryjfzihuio"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("llygta").withDatasetParameters("datazcxdfweapyfmlxrl") - .withParameters(mapOf("x", "dataraspifleim")).withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("xhtfmcqbsudzpgch").withParameters( - mapOf("djxh", "datafqum", "v", "dataghgodkynrceq", "mmxjdnajuopj", "datadbd", "cwlo", "datayqmkwlh"))) - .withScript("wqikwepwogggic"); + .withReferenceName("jtfeyvkbdgddkr") + .withDatasetParameters("dataccxbeuuqutkz") + .withParameters(mapOf("xfn", "datawwguzytij", "ckcatuqbhpo", "datandegjdydhqkkkbj", "toakatprytgrhz", + "datacnxtpzdlysei", "wrp", "dataqfdpf")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("uif") + .withParameters(mapOf("gdezvjqwahoy", "dataxfaryrvj", "nvovbooqbmdqrxy", "datayaxqvjweiwtczkd"))) + .withScript("krehyhtmj"); model = BinaryData.fromObject(model).toObject(PowerQuerySource.class); - Assertions.assertEquals("dejkluxxrwzobuz", model.name()); - Assertions.assertEquals("xga", model.description()); - Assertions.assertEquals("dtkwppth", model.dataset().referenceName()); - Assertions.assertEquals("amklilirwdv", model.linkedService().referenceName()); + Assertions.assertEquals("laetscflwtjdtlr", model.name()); + Assertions.assertEquals("fooy", model.description()); + Assertions.assertEquals("uxdtzcq", model.dataset().referenceName()); + Assertions.assertEquals("vwdtgckzdqiqdl", model.linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("llygta", model.flowlet().referenceName()); - Assertions.assertEquals("xhtfmcqbsudzpgch", model.schemaLinkedService().referenceName()); - Assertions.assertEquals("wqikwepwogggic", model.script()); + Assertions.assertEquals("jtfeyvkbdgddkr", model.flowlet().referenceName()); + Assertions.assertEquals("uif", model.schemaLinkedService().referenceName()); + Assertions.assertEquals("krehyhtmj", model.script()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQueryTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQueryTypePropertiesTests.java index 7d19bb88c478c..f624038f03e3d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQueryTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQueryTypePropertiesTests.java @@ -20,75 +20,104 @@ public final class PowerQueryTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PowerQueryTypeProperties model = BinaryData.fromString( - "{\"sources\":[{\"script\":\"zkn\",\"schemaLinkedService\":{\"referenceName\":\"kmjqncfvdsc\",\"parameters\":{\"vndrwbgodtg\":\"datamvwfnqqwy\",\"ai\":\"datarssgwjf\"}},\"name\":\"jmu\",\"description\":\"vecvzts\",\"dataset\":{\"referenceName\":\"gmusaictdscnkzzo\",\"parameters\":{\"hlbzqixbnjrqvzy\":\"dataddclzeqozr\",\"me\":\"dataexozonynp\",\"jxvcvaso\":\"datadpabcreuwzosg\",\"xzv\":\"datamr\"}},\"linkedService\":{\"referenceName\":\"b\",\"parameters\":{\"uvecovsd\":\"datazygba\",\"akrlimzfvppkeqsi\":\"datahzrtd\",\"gygnhrkombc\":\"datajmcl\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"jdopggorwjo\",\"datasetParameters\":\"datarotpvclpof\",\"parameters\":{\"kptskwxjgvhxc\":\"datam\",\"kmkook\":\"databmk\",\"wk\":\"dataputmgvmuyakm\"},\"\":{\"qjimejtgzjxxlfej\":\"datawzkroyrdurxfl\"}}},{\"script\":\"uqloiwyayyzivrmi\",\"schemaLinkedService\":{\"referenceName\":\"dql\",\"parameters\":{\"xfns\":\"datawhrktjleifibfipl\",\"mhn\":\"dataycjowlyeyzmudsq\"}},\"name\":\"lzbuwodmachbkvn\",\"description\":\"bjrmvgo\",\"dataset\":{\"referenceName\":\"lehmum\",\"parameters\":{\"prwnhkgqggoxsst\":\"datallcz\"}},\"linkedService\":{\"referenceName\":\"vrak\",\"parameters\":{\"emjpequ\":\"dataynjcwmhlymgnukxr\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"zaudgjtfbclakkuc\",\"datasetParameters\":\"datawnhczbutou\",\"parameters\":{\"cqqwwvgwkslvlize\":\"datatirjwayh\",\"v\":\"datavbia\",\"wkhojqttbspvkhg\":\"datasrgekzyqxadyfhb\",\"xrk\":\"dataaqjsgyzstujr\"},\"\":{\"lduyehiiittugyuq\":\"datad\",\"csozjv\":\"datarldaxurfqa\"}}},{\"script\":\"zciggbnvtxofwa\",\"schemaLinkedService\":{\"referenceName\":\"yxwhoeamo\",\"parameters\":{\"fpnimtwuuhaueg\":\"datadoey\",\"zjy\":\"datakwmnfeub\"}},\"name\":\"kwfugiphrrkuu\",\"description\":\"qdurhzzfopue\",\"dataset\":{\"referenceName\":\"usvwluj\",\"parameters\":{\"fmwc\":\"datanibittoztjdqumq\",\"rbelfnzz\":\"dataddtgctxegtvgwy\",\"unomir\":\"datayizwbxgdebxla\"}},\"linkedService\":{\"referenceName\":\"fabenqla\",\"parameters\":{\"bcpjstbhem\":\"datagewayxfl\",\"eapdrbzyv\":\"datacucsqsnxfbxu\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"adulpodkaxp\",\"datasetParameters\":\"databkzhmhjd\",\"parameters\":{\"hfzt\":\"datadeluqroja\",\"bkuwpzqxlc\":\"dataraysrkgzkyhu\",\"ecjvxf\":\"dataeak\",\"ppwooaj\":\"dataqufqizj\"},\"\":{\"xpxhnzlslekc\":\"datajmjjxi\",\"adeghztldsvc\":\"datatgzkjtyqpd\",\"qymjzucwwmejjqhd\":\"datadjiah\",\"ookyfoz\":\"datawvmqxi\"}}}],\"script\":\"nzxbyp\",\"documentLocale\":\"pgaixwrgrkkderf\"}") + "{\"sources\":[{\"script\":\"umnqdurhzzfopu\",\"schemaLinkedService\":{\"referenceName\":\"qusvwlujopwnib\",\"parameters\":{\"ad\":\"dataoztjdqumqvfmw\",\"tvgwyurbe\":\"datatgctxe\"}},\"name\":\"fnzzryizwbxg\",\"description\":\"bxlayunomirhpf\",\"dataset\":{\"referenceName\":\"enqlamwmgewayx\",\"parameters\":{\"cucsqsnxfbxu\":\"databcpjstbhem\",\"su\":\"dataeapdrbzyv\",\"hmhjd\":\"datadulpodkaxpfobk\",\"eluqr\":\"datalt\"}},\"linkedService\":{\"referenceName\":\"adhfztlray\",\"parameters\":{\"yh\":\"datagz\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"kuwpzqxlcw\",\"datasetParameters\":\"datakfecjvxf\",\"parameters\":{\"jvppwooajeyy\":\"datafqi\",\"zls\":\"datamjjxizxpxh\",\"egh\":\"dataekcttgzkjtyqpdwa\"},\"\":{\"jiahuqy\":\"datadsvcz\",\"jjqhddwvm\":\"datajzucwwm\"}}},{\"script\":\"iyoo\",\"schemaLinkedService\":{\"referenceName\":\"fozkbnz\",\"parameters\":{\"grkkderfrswq\":\"datapfqpgaixw\",\"ggicwnxhtfmcqbs\":\"datakwepwo\",\"pgchzpgfqum\":\"datad\"}},\"name\":\"djxh\",\"description\":\"hgod\",\"dataset\":{\"referenceName\":\"nrceqavfdbdfmmx\",\"parameters\":{\"y\":\"dataajuop\"}},\"linkedService\":{\"referenceName\":\"mk\",\"parameters\":{\"jk\":\"datavcwloqd\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"xrwzo\",\"datasetParameters\":\"datazmsxgamtdt\",\"parameters\":{\"uzdprmim\":\"datapt\",\"qfxy\":\"dataljdp\",\"tbaewhte\":\"datamhk\",\"cn\":\"dataloamfmxtllfltym\"},\"\":{\"mkli\":\"dataijhggabqbg\",\"vffhs\":\"datairw\"}}},{\"script\":\"zouhktqrxq\",\"schemaLinkedService\":{\"referenceName\":\"njxrd\",\"parameters\":{\"llygta\":\"datat\",\"a\":\"dataczcxdfw\",\"lxrljphraspifl\":\"datayf\",\"fgsmdrjuqb\":\"dataimixlmdbgice\"}},\"name\":\"xtokljmtz\",\"description\":\"axwfqtyyqip\",\"dataset\":{\"referenceName\":\"ltungbsolj\",\"parameters\":{\"kuckgkdskswtiiq\":\"dataiigz\",\"lnssw\":\"datacqikclsm\"}},\"linkedService\":{\"referenceName\":\"kdnonaaxwmgzru\",\"parameters\":{\"cbgvsbt\":\"datacwnynlleiq\",\"xmnrqstjcmet\":\"dataertoxadhxuvj\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"gicvnpvvdh\",\"datasetParameters\":\"datanaqyqipslzmvcds\",\"parameters\":{\"kdtfo\":\"databit\",\"zdqekivycpzcvd\":\"datavfiybxqichgyb\",\"htre\":\"datazulrqt\"},\"\":{\"xmdutzfkgilnou\":\"datalpryf\"}}},{\"script\":\"cgndjgdp\",\"schemaLinkedService\":{\"referenceName\":\"ggqqyeqfcb\",\"parameters\":{\"igi\":\"datapyuflqjfshtujcyo\",\"kqnqvkixnmbzme\":\"datawdcsk\",\"mbzayspzvrietv\":\"datauyrzw\"}},\"name\":\"phmdzxplgtp\",\"description\":\"vatlzmgsc\",\"dataset\":{\"referenceName\":\"zr\",\"parameters\":{\"dmnfmfws\":\"datakzovl\",\"orimmo\":\"dataafofu\",\"ovanyrva\":\"datazwdehkkmvhz\"}},\"linkedService\":{\"referenceName\":\"tgelgwewikfy\",\"parameters\":{\"fbpbvzopaxmfmvsm\":\"datandmymnqoqjumov\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"xfaxdtnq\",\"datasetParameters\":\"databsat\",\"parameters\":{\"cpcjycboelrgtt\":\"dataauesugm\",\"kcedp\":\"datafldsiuorin\",\"vvyczy\":\"datasriwmmtmqrxrz\",\"agpx\":\"datayubtgmbxiqah\"},\"\":{\"mwovyztxlnomfpb\":\"dataplnupoyryef\"}}}],\"script\":\"eegvyieztkutnj\",\"documentLocale\":\"l\"}") .toObject(PowerQueryTypeProperties.class); - Assertions.assertEquals("jmu", model.sources().get(0).name()); - Assertions.assertEquals("vecvzts", model.sources().get(0).description()); - Assertions.assertEquals("gmusaictdscnkzzo", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("b", model.sources().get(0).linkedService().referenceName()); + Assertions.assertEquals("fnzzryizwbxg", model.sources().get(0).name()); + Assertions.assertEquals("bxlayunomirhpf", model.sources().get(0).description()); + Assertions.assertEquals("enqlamwmgewayx", model.sources().get(0).dataset().referenceName()); + Assertions.assertEquals("adhfztlray", model.sources().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("jdopggorwjo", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("kmjqncfvdsc", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("zkn", model.sources().get(0).script()); - Assertions.assertEquals("nzxbyp", model.script()); - Assertions.assertEquals("pgaixwrgrkkderf", model.documentLocale()); + Assertions.assertEquals("kuwpzqxlcw", model.sources().get(0).flowlet().referenceName()); + Assertions.assertEquals("qusvwlujopwnib", model.sources().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("umnqdurhzzfopu", model.sources().get(0).script()); + Assertions.assertEquals("eegvyieztkutnj", model.script()); + Assertions.assertEquals("l", model.documentLocale()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PowerQueryTypeProperties model = new PowerQueryTypeProperties().withSources(Arrays.asList( - new PowerQuerySource().withName("jmu").withDescription("vecvzts") - .withDataset(new DatasetReference().withReferenceName("gmusaictdscnkzzo") - .withParameters(mapOf("hlbzqixbnjrqvzy", "dataddclzeqozr", "me", "dataexozonynp", "jxvcvaso", - "datadpabcreuwzosg", "xzv", "datamr"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("b").withParameters( - mapOf("uvecovsd", "datazygba", "akrlimzfvppkeqsi", "datahzrtd", "gygnhrkombc", "datajmcl"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("jdopggorwjo").withDatasetParameters("datarotpvclpof") - .withParameters(mapOf("kptskwxjgvhxc", "datam", "kmkook", "databmk", "wk", "dataputmgvmuyakm")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("kmjqncfvdsc") - .withParameters(mapOf("vndrwbgodtg", "datamvwfnqqwy", "ai", "datarssgwjf"))) - .withScript("zkn"), - new PowerQuerySource().withName("lzbuwodmachbkvn").withDescription("bjrmvgo") - .withDataset(new DatasetReference().withReferenceName("lehmum") - .withParameters(mapOf("prwnhkgqggoxsst", "datallcz"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("vrak") - .withParameters(mapOf("emjpequ", "dataynjcwmhlymgnukxr"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("zaudgjtfbclakkuc").withDatasetParameters("datawnhczbutou") - .withParameters(mapOf("cqqwwvgwkslvlize", "datatirjwayh", "v", "datavbia", "wkhojqttbspvkhg", - "datasrgekzyqxadyfhb", "xrk", "dataaqjsgyzstujr")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("dql") - .withParameters(mapOf("xfns", "datawhrktjleifibfipl", "mhn", "dataycjowlyeyzmudsq"))) - .withScript("uqloiwyayyzivrmi"), - new PowerQuerySource().withName("kwfugiphrrkuu").withDescription("qdurhzzfopue") - .withDataset(new DatasetReference().withReferenceName("usvwluj") - .withParameters(mapOf("fmwc", "datanibittoztjdqumq", "rbelfnzz", "dataddtgctxegtvgwy", "unomir", - "datayizwbxgdebxla"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("fabenqla") - .withParameters(mapOf("bcpjstbhem", "datagewayxfl", "eapdrbzyv", "datacucsqsnxfbxu"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("adulpodkaxp").withDatasetParameters("databkzhmhjd") - .withParameters(mapOf("hfzt", "datadeluqroja", "bkuwpzqxlc", "dataraysrkgzkyhu", "ecjvxf", - "dataeak", "ppwooaj", "dataqufqizj")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("yxwhoeamo") - .withParameters(mapOf("fpnimtwuuhaueg", "datadoey", "zjy", "datakwmnfeub"))) - .withScript("zciggbnvtxofwa"))) - .withScript("nzxbyp").withDocumentLocale("pgaixwrgrkkderf"); + PowerQueryTypeProperties model + = new PowerQueryTypeProperties() + .withSources( + Arrays.asList( + new PowerQuerySource().withName("fnzzryizwbxg") + .withDescription("bxlayunomirhpf") + .withDataset(new DatasetReference().withReferenceName("enqlamwmgewayx") + .withParameters(mapOf("cucsqsnxfbxu", "databcpjstbhem", "su", "dataeapdrbzyv", "hmhjd", + "datadulpodkaxpfobk", "eluqr", "datalt"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("adhfztlray") + .withParameters(mapOf("yh", "datagz"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("kuwpzqxlcw") + .withDatasetParameters("datakfecjvxf") + .withParameters(mapOf("jvppwooajeyy", "datafqi", "zls", "datamjjxizxpxh", "egh", + "dataekcttgzkjtyqpdwa")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("qusvwlujopwnib") + .withParameters(mapOf("ad", "dataoztjdqumqvfmw", "tvgwyurbe", "datatgctxe"))) + .withScript("umnqdurhzzfopu"), + new PowerQuerySource().withName("djxh") + .withDescription("hgod") + .withDataset(new DatasetReference().withReferenceName("nrceqavfdbdfmmx") + .withParameters(mapOf("y", "dataajuop"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("mk") + .withParameters(mapOf("jk", "datavcwloqd"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("xrwzo") + .withDatasetParameters("datazmsxgamtdt") + .withParameters(mapOf("uzdprmim", "datapt", "qfxy", "dataljdp", "tbaewhte", "datamhk", + "cn", "dataloamfmxtllfltym")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("fozkbnz") + .withParameters(mapOf("grkkderfrswq", "datapfqpgaixw", "ggicwnxhtfmcqbs", "datakwepwo", + "pgchzpgfqum", "datad"))) + .withScript("iyoo"), + new PowerQuerySource().withName("xtokljmtz") + .withDescription("axwfqtyyqip") + .withDataset(new DatasetReference().withReferenceName("ltungbsolj") + .withParameters(mapOf("kuckgkdskswtiiq", "dataiigz", "lnssw", "datacqikclsm"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("kdnonaaxwmgzru") + .withParameters(mapOf("cbgvsbt", "datacwnynlleiq", "xmnrqstjcmet", "dataertoxadhxuvj"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("gicvnpvvdh") + .withDatasetParameters("datanaqyqipslzmvcds") + .withParameters(mapOf("kdtfo", "databit", "zdqekivycpzcvd", "datavfiybxqichgyb", "htre", + "datazulrqt")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("njxrd") + .withParameters(mapOf("llygta", "datat", "a", "dataczcxdfw", "lxrljphraspifl", "datayf", + "fgsmdrjuqb", "dataimixlmdbgice"))) + .withScript("zouhktqrxq"), + new PowerQuerySource().withName("phmdzxplgtp") + .withDescription("vatlzmgsc") + .withDataset(new DatasetReference().withReferenceName("zr") + .withParameters(mapOf("dmnfmfws", "datakzovl", "orimmo", "dataafofu", "ovanyrva", + "datazwdehkkmvhz"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("tgelgwewikfy") + .withParameters(mapOf("fbpbvzopaxmfmvsm", "datandmymnqoqjumov"))) + .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) + .withReferenceName("xfaxdtnq") + .withDatasetParameters("databsat") + .withParameters(mapOf("cpcjycboelrgtt", "dataauesugm", "kcedp", "datafldsiuorin", + "vvyczy", "datasriwmmtmqrxrz", "agpx", "datayubtgmbxiqah")) + .withAdditionalProperties(mapOf())) + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ggqqyeqfcb") + .withParameters(mapOf("igi", "datapyuflqjfshtujcyo", "kqnqvkixnmbzme", "datawdcsk", + "mbzayspzvrietv", "datauyrzw"))) + .withScript("cgndjgdp"))) + .withScript("eegvyieztkutnj") + .withDocumentLocale("l"); model = BinaryData.fromObject(model).toObject(PowerQueryTypeProperties.class); - Assertions.assertEquals("jmu", model.sources().get(0).name()); - Assertions.assertEquals("vecvzts", model.sources().get(0).description()); - Assertions.assertEquals("gmusaictdscnkzzo", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("b", model.sources().get(0).linkedService().referenceName()); + Assertions.assertEquals("fnzzryizwbxg", model.sources().get(0).name()); + Assertions.assertEquals("bxlayunomirhpf", model.sources().get(0).description()); + Assertions.assertEquals("enqlamwmgewayx", model.sources().get(0).dataset().referenceName()); + Assertions.assertEquals("adhfztlray", model.sources().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("jdopggorwjo", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("kmjqncfvdsc", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("zkn", model.sources().get(0).script()); - Assertions.assertEquals("nzxbyp", model.script()); - Assertions.assertEquals("pgaixwrgrkkderf", model.documentLocale()); + Assertions.assertEquals("kuwpzqxlcw", model.sources().get(0).flowlet().referenceName()); + Assertions.assertEquals("qusvwlujopwnib", model.sources().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("umnqdurhzzfopu", model.sources().get(0).script()); + Assertions.assertEquals("eegvyieztkutnj", model.script()); + Assertions.assertEquals("l", model.documentLocale()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoDatasetTypePropertiesTests.java index 41ca557e47dfd..cc6dacdbf878d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoDatasetTypePropertiesTests.java @@ -11,14 +11,15 @@ public final class PrestoDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PrestoDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datawb\",\"table\":\"dataiwtwfgoc\",\"schema\":\"datalvemnnzugabk\"}") + .fromString("{\"tableName\":\"datatsh\",\"table\":\"datavrsksdzmhwt\",\"schema\":\"datappwfbw\"}") .toObject(PrestoDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PrestoDatasetTypeProperties model = new PrestoDatasetTypeProperties().withTableName("datawb") - .withTable("dataiwtwfgoc").withSchema("datalvemnnzugabk"); + PrestoDatasetTypeProperties model = new PrestoDatasetTypeProperties().withTableName("datatsh") + .withTable("datavrsksdzmhwt") + .withSchema("datappwfbw"); model = BinaryData.fromObject(model).toObject(PrestoDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoObjectDatasetTests.java index 9a8dc2e3d02e1..503f458404000 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoObjectDatasetTests.java @@ -19,30 +19,37 @@ public final class PrestoObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PrestoObjectDataset model = BinaryData.fromString( - "{\"type\":\"PrestoObject\",\"typeProperties\":{\"tableName\":\"dataz\",\"table\":\"datayujxysv\",\"schema\":\"dataf\"},\"description\":\"clvildlf\",\"structure\":\"dataleirmtxfqpfildcg\",\"schema\":\"dataou\",\"linkedServiceName\":{\"referenceName\":\"fbpgcryvidbzdy\",\"parameters\":{\"jftecgprzsqmp\":\"datajatgngwn\",\"akzbyqha\":\"dataq\"}},\"parameters\":{\"lusyruxrz\":{\"type\":\"Array\",\"defaultValue\":\"datayxuuc\"},\"tpiforomppz\":{\"type\":\"Float\",\"defaultValue\":\"datahrvmgsbpgmncr\"}},\"annotations\":[\"dataqmeu\"],\"folder\":{\"name\":\"fcmpuaiugoceqtl\"},\"\":{\"ncfunlakgixhqjqh\":\"datajymwiccu\"}}") + "{\"type\":\"iva\",\"typeProperties\":{\"tableName\":\"dataotypcjxh\",\"table\":\"datazlocjhzppdbr\",\"schema\":\"datacxbofpr\"},\"description\":\"sbfzl\",\"structure\":\"datajr\",\"schema\":\"datasfv\",\"linkedServiceName\":{\"referenceName\":\"ghqxtmblmf\",\"parameters\":{\"vel\":\"datau\"}},\"parameters\":{\"e\":{\"type\":\"Float\",\"defaultValue\":\"dataxjtezujtoudo\"},\"iexuwemtg\":{\"type\":\"Object\",\"defaultValue\":\"datavssrj\"},\"sxehaxi\":{\"type\":\"Bool\",\"defaultValue\":\"databymmcgsksc\"},\"ptbhm\":{\"type\":\"SecureString\",\"defaultValue\":\"datajxolknshgwa\"}},\"annotations\":[\"datamn\",\"databnatlnch\",\"datazcdkxo\"],\"folder\":{\"name\":\"zzvhbuj\"},\"\":{\"rbfbsdii\":\"dataophqwojdzccqtw\",\"xrvnhhmfsnqp\":\"datadzfbv\"}}") .toObject(PrestoObjectDataset.class); - Assertions.assertEquals("clvildlf", model.description()); - Assertions.assertEquals("fbpgcryvidbzdy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("lusyruxrz").type()); - Assertions.assertEquals("fcmpuaiugoceqtl", model.folder().name()); + Assertions.assertEquals("sbfzl", model.description()); + Assertions.assertEquals("ghqxtmblmf", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("e").type()); + Assertions.assertEquals("zzvhbuj", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PrestoObjectDataset model = new PrestoObjectDataset().withDescription("clvildlf") - .withStructure("dataleirmtxfqpfildcg").withSchema("dataou") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("fbpgcryvidbzdy") - .withParameters(mapOf("jftecgprzsqmp", "datajatgngwn", "akzbyqha", "dataq"))) - .withParameters(mapOf("lusyruxrz", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datayxuuc"), "tpiforomppz", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datahrvmgsbpgmncr"))) - .withAnnotations(Arrays.asList("dataqmeu")).withFolder(new DatasetFolder().withName("fcmpuaiugoceqtl")) - .withTableName("dataz").withTable("datayujxysv").withSchemaTypePropertiesSchema("dataf"); + PrestoObjectDataset model = new PrestoObjectDataset().withDescription("sbfzl") + .withStructure("datajr") + .withSchema("datasfv") + .withLinkedServiceName( + new LinkedServiceReference().withReferenceName("ghqxtmblmf").withParameters(mapOf("vel", "datau"))) + .withParameters(mapOf("e", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataxjtezujtoudo"), + "iexuwemtg", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datavssrj"), + "sxehaxi", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("databymmcgsksc"), + "ptbhm", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datajxolknshgwa"))) + .withAnnotations(Arrays.asList("datamn", "databnatlnch", "datazcdkxo")) + .withFolder(new DatasetFolder().withName("zzvhbuj")) + .withTableName("dataotypcjxh") + .withTable("datazlocjhzppdbr") + .withSchemaTypePropertiesSchema("datacxbofpr"); model = BinaryData.fromObject(model).toObject(PrestoObjectDataset.class); - Assertions.assertEquals("clvildlf", model.description()); - Assertions.assertEquals("fbpgcryvidbzdy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("lusyruxrz").type()); - Assertions.assertEquals("fcmpuaiugoceqtl", model.folder().name()); + Assertions.assertEquals("sbfzl", model.description()); + Assertions.assertEquals("ghqxtmblmf", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("e").type()); + Assertions.assertEquals("zzvhbuj", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoSourceTests.java index 734dcf315eb2f..9b26dec82fc6e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoSourceTests.java @@ -11,16 +11,19 @@ public final class PrestoSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PrestoSource model = BinaryData.fromString( - "{\"type\":\"PrestoSource\",\"query\":\"dataqngpvvnbu\",\"queryTimeout\":\"datavkutl\",\"additionalColumns\":\"dataxuuqb\",\"sourceRetryCount\":\"datapbeswgkreozpufk\",\"sourceRetryWait\":\"datamzcbzgi\",\"maxConcurrentConnections\":\"dataqpegcgdndpb\",\"disableMetricsCollection\":\"dataeymmcbiktetzvqt\",\"\":{\"pdnbzqweohmlkzhx\":\"datavcsbyimygswdu\",\"haerhxd\":\"datadmauanxzrqt\",\"bqmoguy\":\"datahkbrkhjjbwelicrx\",\"dxljjzdbzk\":\"datamselwszqveak\"}}") + "{\"type\":\"td\",\"query\":\"datazcajityjznpryo\",\"queryTimeout\":\"datavhc\",\"additionalColumns\":\"datamua\",\"sourceRetryCount\":\"datai\",\"sourceRetryWait\":\"datazytdj\",\"maxConcurrentConnections\":\"dataun\",\"disableMetricsCollection\":\"dataadyrhmpokfxcb\",\"\":{\"lnlwgseouh\":\"dataxgajyrac\",\"zru\":\"dataetxupxeain\",\"zfaja\":\"datageaenkfsxtsmzva\",\"ujcqz\":\"databswwbrllvva\"}}") .toObject(PrestoSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PrestoSource model - = new PrestoSource().withSourceRetryCount("datapbeswgkreozpufk").withSourceRetryWait("datamzcbzgi") - .withMaxConcurrentConnections("dataqpegcgdndpb").withDisableMetricsCollection("dataeymmcbiktetzvqt") - .withQueryTimeout("datavkutl").withAdditionalColumns("dataxuuqb").withQuery("dataqngpvvnbu"); + PrestoSource model = new PrestoSource().withSourceRetryCount("datai") + .withSourceRetryWait("datazytdj") + .withMaxConcurrentConnections("dataun") + .withDisableMetricsCollection("dataadyrhmpokfxcb") + .withQueryTimeout("datavhc") + .withAdditionalColumns("datamua") + .withQuery("datazcajityjznpryo"); model = BinaryData.fromObject(model).toObject(PrestoSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndPointConnectionsListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndPointConnectionsListByFactoryMockTests.java index ff9351e74b139..017a64508b115 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndPointConnectionsListByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndPointConnectionsListByFactoryMockTests.java @@ -6,58 +6,40 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.PrivateEndpointConnectionResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class PrivateEndPointConnectionsListByFactoryMockTests { @Test public void testListByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"provisioningState\":\"bkazv\",\"privateEndpoint\":{\"id\":\"bky\"},\"privateLinkServiceConnectionState\":{\"status\":\"wzbfpcleniozqr\",\"description\":\"hrp\",\"actionsRequired\":\"hpyymlwall\"}},\"name\":\"yorhgo\",\"type\":\"v\",\"etag\":\"svrpnoxbokmqviv\",\"id\":\"fbzrfmfadvfkjdwu\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"properties\":{\"provisioningState\":\"eocvypv\",\"privateEndpoint\":{\"id\":\"hop\"},\"privateLinkServiceConnectionState\":{\"status\":\"ztltuew\",\"description\":\"qtdgeepzpttpw\",\"actionsRequired\":\"zvadgmy\"}},\"name\":\"xfrkyzzrmzwgws\",\"type\":\"dcczamnjszqhu\",\"etag\":\"ncznzkpd\",\"id\":\"lpwfewcmkwiyl\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PagedIterable response = manager.privateEndPointConnections() - .listByFactory("xiwulqp", "qxcygevgj", com.azure.core.util.Context.NONE); + .listByFactory("plruipsrxh", "uvgkmmnwzuejvy", com.azure.core.util.Context.NONE); - Assertions.assertEquals("fbzrfmfadvfkjdwu", response.iterator().next().id()); - Assertions.assertEquals("wzbfpcleniozqr", + Assertions.assertEquals("lpwfewcmkwiyl", response.iterator().next().id()); + Assertions.assertEquals("ztltuew", response.iterator().next().properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("hrp", + Assertions.assertEquals("qtdgeepzpttpw", response.iterator().next().properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("hpyymlwall", + Assertions.assertEquals("zvadgmy", response.iterator().next().properties().privateLinkServiceConnectionState().actionsRequired()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionListResponseTests.java index 528165a7e0849..aa4e4796b9894 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionListResponseTests.java @@ -17,33 +17,36 @@ public final class PrivateEndpointConnectionListResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PrivateEndpointConnectionListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"provisioningState\":\"mhdroznnhdrlktg\",\"privateEndpoint\":{\"id\":\"gguxhemlwyw\"},\"privateLinkServiceConnectionState\":{\"status\":\"czg\",\"description\":\"ukklelss\",\"actionsRequired\":\"lycsxz\"}},\"name\":\"ksrl\",\"type\":\"desqplpvmjcdo\",\"etag\":\"bidyv\",\"id\":\"owx\"}],\"nextLink\":\"piudeugfsxzecpa\"}") + "{\"value\":[{\"properties\":{\"provisioningState\":\"mkoisqcssf\",\"privateEndpoint\":{\"id\":\"ifmcsypobkdqzr\"},\"privateLinkServiceConnectionState\":{\"status\":\"ylollgtrczzydmxz\",\"description\":\"jpvuaurkihcirld\",\"actionsRequired\":\"xrdcoxnbkkja\"}},\"name\":\"rnnqb\",\"type\":\"bpizxqltgr\",\"etag\":\"gypxrxvbfihwuhvc\",\"id\":\"fsrb\"}],\"nextLink\":\"blml\"}") .toObject(PrivateEndpointConnectionListResponse.class); - Assertions.assertEquals("owx", model.value().get(0).id()); - Assertions.assertEquals("czg", model.value().get(0).properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("ukklelss", + Assertions.assertEquals("fsrb", model.value().get(0).id()); + Assertions.assertEquals("ylollgtrczzydmxz", + model.value().get(0).properties().privateLinkServiceConnectionState().status()); + Assertions.assertEquals("jpvuaurkihcirld", model.value().get(0).properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("lycsxz", + Assertions.assertEquals("xrdcoxnbkkja", model.value().get(0).properties().privateLinkServiceConnectionState().actionsRequired()); - Assertions.assertEquals("piudeugfsxzecpa", model.nextLink()); + Assertions.assertEquals("blml", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PrivateEndpointConnectionListResponse model - = new PrivateEndpointConnectionListResponse() - .withValue(Arrays.asList(new PrivateEndpointConnectionResourceInner().withId("owx") - .withProperties(new RemotePrivateEndpointConnection().withPrivateEndpoint(new ArmIdWrapper()) - .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("czg") - .withDescription("ukklelss").withActionsRequired("lycsxz"))))) - .withNextLink("piudeugfsxzecpa"); + PrivateEndpointConnectionListResponse model = new PrivateEndpointConnectionListResponse() + .withValue(Arrays.asList(new PrivateEndpointConnectionResourceInner().withId("fsrb") + .withProperties(new RemotePrivateEndpointConnection().withPrivateEndpoint(new ArmIdWrapper()) + .withPrivateLinkServiceConnectionState( + new PrivateLinkConnectionState().withStatus("ylollgtrczzydmxz") + .withDescription("jpvuaurkihcirld") + .withActionsRequired("xrdcoxnbkkja"))))) + .withNextLink("blml"); model = BinaryData.fromObject(model).toObject(PrivateEndpointConnectionListResponse.class); - Assertions.assertEquals("owx", model.value().get(0).id()); - Assertions.assertEquals("czg", model.value().get(0).properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("ukklelss", + Assertions.assertEquals("fsrb", model.value().get(0).id()); + Assertions.assertEquals("ylollgtrczzydmxz", + model.value().get(0).properties().privateLinkServiceConnectionState().status()); + Assertions.assertEquals("jpvuaurkihcirld", model.value().get(0).properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("lycsxz", + Assertions.assertEquals("xrdcoxnbkkja", model.value().get(0).properties().privateLinkServiceConnectionState().actionsRequired()); - Assertions.assertEquals("piudeugfsxzecpa", model.nextLink()); + Assertions.assertEquals("blml", model.nextLink()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsCreateOrUpdateWithResponseMockTests.java index 783452a8f166b..7fc11201fa4a6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsCreateOrUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsCreateOrUpdateWithResponseMockTests.java @@ -6,63 +6,49 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.PrivateEndpoint; import com.azure.resourcemanager.datafactory.models.PrivateEndpointConnectionResource; import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionApprovalRequest; import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionState; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class PrivateEndpointConnectionOperationsCreateOrUpdateWithResponseMockTests { @Test public void testCreateOrUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"provisioningState\":\"ijadhgka\",\"privateEndpoint\":{\"id\":\"jqutixy\"},\"privateLinkServiceConnectionState\":{\"status\":\"ypmvof\",\"description\":\"nsfb\",\"actionsRequired\":\"dnothibtnuqdded\"}},\"name\":\"hq\",\"type\":\"mnsunxfdpulpn\",\"etag\":\"yxbbdxnnaeyczbky\",\"id\":\"pnbdzjuq\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"provisioningState\":\"kyj\",\"privateEndpoint\":{\"id\":\"ggidnnsi\"},\"privateLinkServiceConnectionState\":{\"status\":\"cmedceishsg\",\"description\":\"rzunkvnoicwe\",\"actionsRequired\":\"ovnfuj\"}},\"name\":\"igglkoflephdnya\",\"type\":\"okhzymsldomgwc\",\"etag\":\"zdxjmkzkgtj\",\"id\":\"gntuuzlg\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PrivateEndpointConnectionResource response - = manager.privateEndpointConnectionOperations().define("hvuihjlzjt").withExistingFactory("ia", "klxp") + = manager.privateEndpointConnectionOperations() + .define("sacf") + .withExistingFactory("wpjgwmpcykavy", "crovahr") .withProperties(new PrivateLinkConnectionApprovalRequest() - .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("fuab") - .withDescription("tyoaccrlydml").withActionsRequired("trxqdypmhg")) - .withPrivateEndpoint(new PrivateEndpoint().withId("uqa"))) - .withIfMatch("dy").create(); - - Assertions.assertEquals("pnbdzjuq", response.id()); - Assertions.assertEquals("ypmvof", response.properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("nsfb", response.properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("dnothibtnuqdded", - response.properties().privateLinkServiceConnectionState().actionsRequired()); + .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("imqvolake") + .withDescription("rqmweaik") + .withActionsRequired("oshubvzhmxcwpsu")) + .withPrivateEndpoint(new PrivateEndpoint().withId("pnmpy"))) + .withIfMatch("cxcwloffhlrxldy") + .create(); + + Assertions.assertEquals("gntuuzlg", response.id()); + Assertions.assertEquals("cmedceishsg", response.properties().privateLinkServiceConnectionState().status()); + Assertions.assertEquals("rzunkvnoicwe", + response.properties().privateLinkServiceConnectionState().description()); + Assertions.assertEquals("ovnfuj", response.properties().privateLinkServiceConnectionState().actionsRequired()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsDeleteWithResponseMockTests.java index e461a7e0628fa..4d9ab7895c641 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsDeleteWithResponseMockTests.java @@ -6,47 +6,29 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class PrivateEndpointConnectionOperationsDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.privateEndpointConnectionOperations().deleteWithResponse("wykk", "qq", "qgknpgqzmgivkf", - com.azure.core.util.Context.NONE); + manager.privateEndpointConnectionOperations() + .deleteWithResponse("eogfwhuczkakx", "tuejppzahpubq", "mqtuxe", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsGetWithResponseMockTests.java index b58b30f421b6a..0cc394d126907 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsGetWithResponseMockTests.java @@ -6,55 +6,38 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.PrivateEndpointConnectionResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class PrivateEndpointConnectionOperationsGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"provisioningState\":\"nupqfsshjlpz\",\"privateEndpoint\":{\"id\":\"ymdqljngroawltab\"},\"privateLinkServiceConnectionState\":{\"status\":\"rrea\",\"description\":\"yeltrnogfy\",\"actionsRequired\":\"m\"}},\"name\":\"ybgrugklwubkmd\",\"type\":\"nwokeuxcm\",\"etag\":\"kirzlfpkinrgqz\",\"id\":\"rnigztxcjnwzvl\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"provisioningState\":\"lkoduxmi\",\"privateEndpoint\":{\"id\":\"ty\"},\"privateLinkServiceConnectionState\":{\"status\":\"fhzluvlyewepzvfg\",\"description\":\"zxwpqykogbppux\",\"actionsRequired\":\"uldar\"}},\"name\":\"lbkaipapktxe\",\"type\":\"fogs\",\"etag\":\"vn\",\"id\":\"vskdqersaw\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PrivateEndpointConnectionResource response = manager.privateEndpointConnectionOperations() - .getWithResponse("yooewyvwwvki", "uvphkhszesxsyrvj", "wpknbwh", "ev", com.azure.core.util.Context.NONE) + .getWithResponse("lsddaw", "waiviktuxnj", "zdmvrsckc", "rlojgzzfevjexy", com.azure.core.util.Context.NONE) .getValue(); - Assertions.assertEquals("rnigztxcjnwzvl", response.id()); - Assertions.assertEquals("rrea", response.properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("yeltrnogfy", response.properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("m", response.properties().privateLinkServiceConnectionState().actionsRequired()); + Assertions.assertEquals("vskdqersaw", response.id()); + Assertions.assertEquals("fhzluvlyewepzvfg", response.properties().privateLinkServiceConnectionState().status()); + Assertions.assertEquals("zxwpqykogbppux", + response.properties().privateLinkServiceConnectionState().description()); + Assertions.assertEquals("uldar", response.properties().privateLinkServiceConnectionState().actionsRequired()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionResourceInnerTests.java index 0dc61a68b1b2c..9bc4e89fda277 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionResourceInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionResourceInnerTests.java @@ -15,27 +15,26 @@ public final class PrivateEndpointConnectionResourceInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PrivateEndpointConnectionResourceInner model = BinaryData.fromString( - "{\"properties\":{\"provisioningState\":\"ufykhvuhxepmru\",\"privateEndpoint\":{\"id\":\"abaobnslujdjltym\"},\"privateLinkServiceConnectionState\":{\"status\":\"guihywar\",\"description\":\"pphkixkykxds\",\"actionsRequired\":\"pemmucfxhik\"}},\"name\":\"lrmymyincqlhri\",\"type\":\"sl\",\"etag\":\"iiovgqcgxu\",\"id\":\"qkctotiowlxte\"}") + "{\"properties\":{\"provisioningState\":\"xihspnxwq\",\"privateEndpoint\":{\"id\":\"epzwaklsbsbqq\"},\"privateLinkServiceConnectionState\":{\"status\":\"wwrxaomzisgl\",\"description\":\"czezkhhlt\",\"actionsRequired\":\"adhqoa\"}},\"name\":\"qoyueayfbpcmsplb\",\"type\":\"rueqthwm\",\"etag\":\"mbscbbx\",\"id\":\"dhxiidlopedbwd\"}") .toObject(PrivateEndpointConnectionResourceInner.class); - Assertions.assertEquals("qkctotiowlxte", model.id()); - Assertions.assertEquals("guihywar", model.properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("pphkixkykxds", model.properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("pemmucfxhik", - model.properties().privateLinkServiceConnectionState().actionsRequired()); + Assertions.assertEquals("dhxiidlopedbwd", model.id()); + Assertions.assertEquals("wwrxaomzisgl", model.properties().privateLinkServiceConnectionState().status()); + Assertions.assertEquals("czezkhhlt", model.properties().privateLinkServiceConnectionState().description()); + Assertions.assertEquals("adhqoa", model.properties().privateLinkServiceConnectionState().actionsRequired()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { PrivateEndpointConnectionResourceInner model - = new PrivateEndpointConnectionResourceInner().withId("qkctotiowlxte") + = new PrivateEndpointConnectionResourceInner().withId("dhxiidlopedbwd") .withProperties(new RemotePrivateEndpointConnection().withPrivateEndpoint(new ArmIdWrapper()) - .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("guihywar") - .withDescription("pphkixkykxds").withActionsRequired("pemmucfxhik"))); + .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("wwrxaomzisgl") + .withDescription("czezkhhlt") + .withActionsRequired("adhqoa"))); model = BinaryData.fromObject(model).toObject(PrivateEndpointConnectionResourceInner.class); - Assertions.assertEquals("qkctotiowlxte", model.id()); - Assertions.assertEquals("guihywar", model.properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("pphkixkykxds", model.properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("pemmucfxhik", - model.properties().privateLinkServiceConnectionState().actionsRequired()); + Assertions.assertEquals("dhxiidlopedbwd", model.id()); + Assertions.assertEquals("wwrxaomzisgl", model.properties().privateLinkServiceConnectionState().status()); + Assertions.assertEquals("czezkhhlt", model.properties().privateLinkServiceConnectionState().description()); + Assertions.assertEquals("adhqoa", model.properties().privateLinkServiceConnectionState().actionsRequired()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointTests.java index 8c22c4feedb28..58c896a3808cf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointTests.java @@ -11,14 +11,14 @@ public final class PrivateEndpointTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - PrivateEndpoint model = BinaryData.fromString("{\"id\":\"vedwcgyeewx\"}").toObject(PrivateEndpoint.class); - Assertions.assertEquals("vedwcgyeewx", model.id()); + PrivateEndpoint model = BinaryData.fromString("{\"id\":\"bxn\"}").toObject(PrivateEndpoint.class); + Assertions.assertEquals("bxn", model.id()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PrivateEndpoint model = new PrivateEndpoint().withId("vedwcgyeewx"); + PrivateEndpoint model = new PrivateEndpoint().withId("bxn"); model = BinaryData.fromObject(model).toObject(PrivateEndpoint.class); - Assertions.assertEquals("vedwcgyeewx", model.id()); + Assertions.assertEquals("bxn", model.id()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestResourceTests.java index 5c8d11de418f1..46dbc61d9c7e4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestResourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestResourceTests.java @@ -15,30 +15,31 @@ public final class PrivateLinkConnectionApprovalRequestResourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PrivateLinkConnectionApprovalRequestResource model = BinaryData.fromString( - "{\"properties\":{\"privateLinkServiceConnectionState\":{\"status\":\"vpbbt\",\"description\":\"fjoknss\",\"actionsRequired\":\"zqedikdfrdbi\"},\"privateEndpoint\":{\"id\":\"jgeihfqlggwfi\"}},\"name\":\"cxmjpbyephmg\",\"type\":\"ljvrcmyfqipgxhnp\",\"etag\":\"yqwcabvnuil\",\"id\":\"yaswlpaugmr\"}") + "{\"properties\":{\"privateLinkServiceConnectionState\":{\"status\":\"zohmnrxxbs\",\"description\":\"klinhmdptysprq\",\"actionsRequired\":\"nzxojpslsvjgpli\"},\"privateEndpoint\":{\"id\":\"qwoyxqvapco\"}},\"name\":\"oucqpqojx\",\"type\":\"zrzdcgd\",\"etag\":\"enribc\",\"id\":\"etzqd\"}") .toObject(PrivateLinkConnectionApprovalRequestResource.class); - Assertions.assertEquals("yaswlpaugmr", model.id()); - Assertions.assertEquals("vpbbt", model.properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("fjoknss", model.properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("zqedikdfrdbi", + Assertions.assertEquals("etzqd", model.id()); + Assertions.assertEquals("zohmnrxxbs", model.properties().privateLinkServiceConnectionState().status()); + Assertions.assertEquals("klinhmdptysprq", model.properties().privateLinkServiceConnectionState().description()); + Assertions.assertEquals("nzxojpslsvjgpli", model.properties().privateLinkServiceConnectionState().actionsRequired()); - Assertions.assertEquals("jgeihfqlggwfi", model.properties().privateEndpoint().id()); + Assertions.assertEquals("qwoyxqvapco", model.properties().privateEndpoint().id()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { PrivateLinkConnectionApprovalRequestResource model - = new PrivateLinkConnectionApprovalRequestResource().withId("yaswlpaugmr") + = new PrivateLinkConnectionApprovalRequestResource().withId("etzqd") .withProperties(new PrivateLinkConnectionApprovalRequest() - .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("vpbbt") - .withDescription("fjoknss").withActionsRequired("zqedikdfrdbi")) - .withPrivateEndpoint(new PrivateEndpoint().withId("jgeihfqlggwfi"))); + .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("zohmnrxxbs") + .withDescription("klinhmdptysprq") + .withActionsRequired("nzxojpslsvjgpli")) + .withPrivateEndpoint(new PrivateEndpoint().withId("qwoyxqvapco"))); model = BinaryData.fromObject(model).toObject(PrivateLinkConnectionApprovalRequestResource.class); - Assertions.assertEquals("yaswlpaugmr", model.id()); - Assertions.assertEquals("vpbbt", model.properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("fjoknss", model.properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("zqedikdfrdbi", + Assertions.assertEquals("etzqd", model.id()); + Assertions.assertEquals("zohmnrxxbs", model.properties().privateLinkServiceConnectionState().status()); + Assertions.assertEquals("klinhmdptysprq", model.properties().privateLinkServiceConnectionState().description()); + Assertions.assertEquals("nzxojpslsvjgpli", model.properties().privateLinkServiceConnectionState().actionsRequired()); - Assertions.assertEquals("jgeihfqlggwfi", model.properties().privateEndpoint().id()); + Assertions.assertEquals("qwoyxqvapco", model.properties().privateEndpoint().id()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestTests.java index 821365242370c..3b05173e7e45d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestTests.java @@ -14,24 +14,25 @@ public final class PrivateLinkConnectionApprovalRequestTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PrivateLinkConnectionApprovalRequest model = BinaryData.fromString( - "{\"privateLinkServiceConnectionState\":{\"status\":\"lrxw\",\"description\":\"aukhfkvcisiz\",\"actionsRequired\":\"a\"},\"privateEndpoint\":{\"id\":\"xjw\"}}") + "{\"privateLinkServiceConnectionState\":{\"status\":\"wflj\",\"description\":\"namtuatmzw\",\"actionsRequired\":\"jncqtjzmi\"},\"privateEndpoint\":{\"id\":\"bgatzu\"}}") .toObject(PrivateLinkConnectionApprovalRequest.class); - Assertions.assertEquals("lrxw", model.privateLinkServiceConnectionState().status()); - Assertions.assertEquals("aukhfkvcisiz", model.privateLinkServiceConnectionState().description()); - Assertions.assertEquals("a", model.privateLinkServiceConnectionState().actionsRequired()); - Assertions.assertEquals("xjw", model.privateEndpoint().id()); + Assertions.assertEquals("wflj", model.privateLinkServiceConnectionState().status()); + Assertions.assertEquals("namtuatmzw", model.privateLinkServiceConnectionState().description()); + Assertions.assertEquals("jncqtjzmi", model.privateLinkServiceConnectionState().actionsRequired()); + Assertions.assertEquals("bgatzu", model.privateEndpoint().id()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { PrivateLinkConnectionApprovalRequest model = new PrivateLinkConnectionApprovalRequest() - .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("lrxw") - .withDescription("aukhfkvcisiz").withActionsRequired("a")) - .withPrivateEndpoint(new PrivateEndpoint().withId("xjw")); + .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("wflj") + .withDescription("namtuatmzw") + .withActionsRequired("jncqtjzmi")) + .withPrivateEndpoint(new PrivateEndpoint().withId("bgatzu")); model = BinaryData.fromObject(model).toObject(PrivateLinkConnectionApprovalRequest.class); - Assertions.assertEquals("lrxw", model.privateLinkServiceConnectionState().status()); - Assertions.assertEquals("aukhfkvcisiz", model.privateLinkServiceConnectionState().description()); - Assertions.assertEquals("a", model.privateLinkServiceConnectionState().actionsRequired()); - Assertions.assertEquals("xjw", model.privateEndpoint().id()); + Assertions.assertEquals("wflj", model.privateLinkServiceConnectionState().status()); + Assertions.assertEquals("namtuatmzw", model.privateLinkServiceConnectionState().description()); + Assertions.assertEquals("jncqtjzmi", model.privateLinkServiceConnectionState().actionsRequired()); + Assertions.assertEquals("bgatzu", model.privateEndpoint().id()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionStateTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionStateTests.java index e0e64f9ea8577..a479b72e11f02 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionStateTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionStateTests.java @@ -12,21 +12,21 @@ public final class PrivateLinkConnectionStateTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PrivateLinkConnectionState model = BinaryData - .fromString( - "{\"status\":\"fypiv\",\"description\":\"bbjpmcubkmif\",\"actionsRequired\":\"xkubvphavpmhbrbq\"}") + .fromString("{\"status\":\"qfyiaseqchkr\",\"description\":\"zrazisgyk\",\"actionsRequired\":\"emv\"}") .toObject(PrivateLinkConnectionState.class); - Assertions.assertEquals("fypiv", model.status()); - Assertions.assertEquals("bbjpmcubkmif", model.description()); - Assertions.assertEquals("xkubvphavpmhbrbq", model.actionsRequired()); + Assertions.assertEquals("qfyiaseqchkr", model.status()); + Assertions.assertEquals("zrazisgyk", model.description()); + Assertions.assertEquals("emv", model.actionsRequired()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PrivateLinkConnectionState model = new PrivateLinkConnectionState().withStatus("fypiv") - .withDescription("bbjpmcubkmif").withActionsRequired("xkubvphavpmhbrbq"); + PrivateLinkConnectionState model = new PrivateLinkConnectionState().withStatus("qfyiaseqchkr") + .withDescription("zrazisgyk") + .withActionsRequired("emv"); model = BinaryData.fromObject(model).toObject(PrivateLinkConnectionState.class); - Assertions.assertEquals("fypiv", model.status()); - Assertions.assertEquals("bbjpmcubkmif", model.description()); - Assertions.assertEquals("xkubvphavpmhbrbq", model.actionsRequired()); + Assertions.assertEquals("qfyiaseqchkr", model.status()); + Assertions.assertEquals("zrazisgyk", model.description()); + Assertions.assertEquals("emv", model.actionsRequired()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcePropertiesTests.java index f3374890ff3c9..8875c07f8c1b4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcePropertiesTests.java @@ -11,7 +11,7 @@ public final class PrivateLinkResourcePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PrivateLinkResourceProperties model = BinaryData.fromString( - "{\"groupId\":\"klsbsbqqqagw\",\"requiredMembers\":[\"ao\",\"zisglrrczezkhh\",\"tnjadhq\"],\"requiredZoneNames\":[\"jqoyueayfbpcm\"]}") + "{\"groupId\":\"gygzyvn\",\"requiredMembers\":[\"aifghtmoqqt\",\"ffhzbkrkjjjavfq\",\"vhnqo\",\"wdogiyetesyp\"],\"requiredZoneNames\":[\"bztjhqtfbovnynkb\",\"etnjuhpsprkz\",\"aupia\"]}") .toObject(PrivateLinkResourceProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourceTests.java index 6c8faa09b79b2..675f5a0136945 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourceTests.java @@ -13,16 +13,16 @@ public final class PrivateLinkResourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PrivateLinkResource model = BinaryData.fromString( - "{\"properties\":{\"groupId\":\"efxrdcoxnbk\",\"requiredMembers\":[\"nurnnq\",\"nqbpi\"],\"requiredZoneNames\":[\"ltgrdogypxrxv\",\"fihwu\"]},\"name\":\"ctafsrbxrblm\",\"type\":\"owxih\",\"etag\":\"nxw\",\"id\":\"gnepz\"}") + "{\"properties\":{\"groupId\":\"ksrf\",\"requiredMembers\":[\"olmk\"],\"requiredZoneNames\":[\"xwcdomm\",\"vfqawzfgbrttuiac\",\"kiexhajlfnthiq\"]},\"name\":\"uttdiygbp\",\"type\":\"wswmt\",\"etag\":\"yctww\",\"id\":\"wxjlmec\"}") .toObject(PrivateLinkResource.class); - Assertions.assertEquals("gnepz", model.id()); + Assertions.assertEquals("wxjlmec", model.id()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { PrivateLinkResource model - = new PrivateLinkResource().withId("gnepz").withProperties(new PrivateLinkResourceProperties()); + = new PrivateLinkResource().withId("wxjlmec").withProperties(new PrivateLinkResourceProperties()); model = BinaryData.fromObject(model).toObject(PrivateLinkResource.class); - Assertions.assertEquals("gnepz", model.id()); + Assertions.assertEquals("wxjlmec", model.id()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesGetWithResponseMockTests.java index 7adce5ab5c9aa..b7ce745382dde 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesGetWithResponseMockTests.java @@ -6,51 +6,34 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.PrivateLinkResourcesWrapper; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class PrivateLinkResourcesGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"groupId\":\"ydzhafp\",\"requiredMembers\":[\"fzntwountffqa\",\"hbumoq\",\"aixsalgzzm\",\"ik\"],\"requiredZoneNames\":[\"gmmegu\",\"efmujgxnsndcmg\",\"ddsgj\"]},\"name\":\"yzcvmpz\",\"type\":\"tuaapyejbs\",\"etag\":\"cqqxymmqxndx\",\"id\":\"zyiycnpowjy\"},{\"properties\":{\"groupId\":\"qohcyhmkp\",\"requiredMembers\":[\"lpxpalljveqx\",\"cbparyoa\"],\"requiredZoneNames\":[\"j\",\"vpvzfvegumsqd\",\"cgfcbat\",\"lrpbgb\"]},\"name\":\"hnmyyagoeby\",\"type\":\"glkvby\",\"etag\":\"eraegfyrgrlnb\",\"id\":\"kojmmcnls\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"properties\":{\"groupId\":\"ufsmkrrtpsimamuc\",\"requiredMembers\":[\"qejzpdzeud\",\"bumjtldskmlg\"],\"requiredZoneNames\":[\"exonmdghdinl\"]},\"name\":\"sagngxzvepiijwse\",\"type\":\"aycylrjfr\",\"etag\":\"ykd\",\"id\":\"tdlouxbmtlho\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PrivateLinkResourcesWrapper response = manager.privateLinkResources() - .getWithResponse("nqyusvgrba", "xsxtku", com.azure.core.util.Context.NONE).getValue(); + .getWithResponse("snyepjil", "gepehkmno", com.azure.core.util.Context.NONE) + .getValue(); - Assertions.assertEquals("zyiycnpowjy", response.value().get(0).id()); + Assertions.assertEquals("tdlouxbmtlho", response.value().get(0).id()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesWrapperInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesWrapperInnerTests.java index 5b2d54c5723fc..48e15b06ef6bc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesWrapperInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesWrapperInnerTests.java @@ -15,17 +15,18 @@ public final class PrivateLinkResourcesWrapperInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PrivateLinkResourcesWrapperInner model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"groupId\":\"psmgo\",\"requiredMembers\":[\"amljdlrgmsplzgau\"],\"requiredZoneNames\":[\"hhvnewgnxkymp\",\"anxrj\",\"ixt\"]},\"name\":\"taoypnyghshxc\",\"type\":\"hkgmnsg\",\"etag\":\"xycphdrwjjkh\",\"id\":\"omacluzvxnqmhr\"},{\"properties\":{\"groupId\":\"df\",\"requiredMembers\":[\"oi\"],\"requiredZoneNames\":[\"ssffxuifmc\"]},\"name\":\"p\",\"type\":\"kdqzrdzsylo\",\"etag\":\"gtrczzydmxzjijpv\",\"id\":\"urkihci\"}]}") + "{\"value\":[{\"properties\":{\"groupId\":\"wggahttzlswvaj\",\"requiredMembers\":[\"t\",\"x\",\"oqza\",\"unwqr\"],\"requiredZoneNames\":[\"rg\",\"haohcmbuo\",\"njrohmbpy\"]},\"name\":\"xameblydyvkfkm\",\"type\":\"cxneh\",\"etag\":\"mtodl\",\"id\":\"yapucygvoa\"},{\"properties\":{\"groupId\":\"nssxlgh\",\"requiredMembers\":[\"gjlgvvpase\",\"sgb\",\"xantuygdhg\"],\"requiredZoneNames\":[\"pirpiwrqof\"]},\"name\":\"o\",\"type\":\"jnlex\",\"etag\":\"cbjpibkephuu\",\"id\":\"rctat\"},{\"properties\":{\"groupId\":\"ntqpbr\",\"requiredMembers\":[\"rduczkgofxyfs\",\"uc\",\"crrpcjttbstvje\",\"qnrmvvfko\"],\"requiredZoneNames\":[\"ghktuidvrm\",\"zlpdwwex\",\"mzvlazipbh\"]},\"name\":\"vqs\",\"type\":\"yy\",\"etag\":\"ziven\",\"id\":\"pmeyyvpkpatlbijp\"}]}") .toObject(PrivateLinkResourcesWrapperInner.class); - Assertions.assertEquals("omacluzvxnqmhr", model.value().get(0).id()); + Assertions.assertEquals("yapucygvoa", model.value().get(0).id()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { PrivateLinkResourcesWrapperInner model = new PrivateLinkResourcesWrapperInner().withValue(Arrays.asList( - new PrivateLinkResource().withId("omacluzvxnqmhr").withProperties(new PrivateLinkResourceProperties()), - new PrivateLinkResource().withId("urkihci").withProperties(new PrivateLinkResourceProperties()))); + new PrivateLinkResource().withId("yapucygvoa").withProperties(new PrivateLinkResourceProperties()), + new PrivateLinkResource().withId("rctat").withProperties(new PrivateLinkResourceProperties()), + new PrivateLinkResource().withId("pmeyyvpkpatlbijp").withProperties(new PrivateLinkResourceProperties()))); model = BinaryData.fromObject(model).toObject(PrivateLinkResourcesWrapperInner.class); - Assertions.assertEquals("omacluzvxnqmhr", model.value().get(0).id()); + Assertions.assertEquals("yapucygvoa", model.value().get(0).id()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PurviewConfigurationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PurviewConfigurationTests.java index 7b31f7d8a3022..28e57a68e7143 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PurviewConfigurationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PurviewConfigurationTests.java @@ -12,14 +12,14 @@ public final class PurviewConfigurationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { PurviewConfiguration model - = BinaryData.fromString("{\"purviewResourceId\":\"stkiiuxhqyud\"}").toObject(PurviewConfiguration.class); - Assertions.assertEquals("stkiiuxhqyud", model.purviewResourceId()); + = BinaryData.fromString("{\"purviewResourceId\":\"exxbczwtr\"}").toObject(PurviewConfiguration.class); + Assertions.assertEquals("exxbczwtr", model.purviewResourceId()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - PurviewConfiguration model = new PurviewConfiguration().withPurviewResourceId("stkiiuxhqyud"); + PurviewConfiguration model = new PurviewConfiguration().withPurviewResourceId("exxbczwtr"); model = BinaryData.fromObject(model).toObject(PurviewConfiguration.class); - Assertions.assertEquals("stkiiuxhqyud", model.purviewResourceId()); + Assertions.assertEquals("exxbczwtr", model.purviewResourceId()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QueryDataFlowDebugSessionsResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QueryDataFlowDebugSessionsResponseTests.java index f0bac34ee9363..2ae307a8bc0dc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QueryDataFlowDebugSessionsResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QueryDataFlowDebugSessionsResponseTests.java @@ -16,41 +16,65 @@ public final class QueryDataFlowDebugSessionsResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { QueryDataFlowDebugSessionsResponse model = BinaryData.fromString( - "{\"value\":[{\"dataFlowName\":\"noda\",\"computeType\":\"pqhe\",\"coreCount\":1635095275,\"nodeCount\":1244000330,\"integrationRuntimeName\":\"gsbos\",\"sessionId\":\"eln\",\"startTime\":\"atutmzlbiojlvfhr\",\"timeToLiveInMinutes\":438246123,\"lastActivityTime\":\"eqvcwwyyurmoch\",\"\":{\"lbkpb\":\"dataprsnmokayzejn\",\"hahzvechndbnwi\":\"datapcpil\"}}],\"nextLink\":\"olewjwi\"}") + "{\"value\":[{\"dataFlowName\":\"okffqyinljqepqwh\",\"computeType\":\"monstshiyxgve\",\"coreCount\":1153037194,\"nodeCount\":341924514,\"integrationRuntimeName\":\"ccbi\",\"sessionId\":\"svu\",\"startTime\":\"o\",\"timeToLiveInMinutes\":1462697228,\"lastActivityTime\":\"stmninwjizcilng\",\"\":{\"jjtbxqmuluxlxqz\":\"datah\",\"cucrwnamikze\":\"datanersb\",\"fuhok\":\"datarqbsmswziq\"}},{\"dataFlowName\":\"uswhvhczznvfbycj\",\"computeType\":\"jww\",\"coreCount\":184999250,\"nodeCount\":156341242,\"integrationRuntimeName\":\"wmxqhndvnoamlds\",\"sessionId\":\"aohdjh\",\"startTime\":\"lzok\",\"timeToLiveInMinutes\":434777672,\"lastActivityTime\":\"pelnjetag\",\"\":{\"tft\":\"dataxo\"}},{\"dataFlowName\":\"pnpbswveflocc\",\"computeType\":\"mozi\",\"coreCount\":1928373121,\"nodeCount\":215643303,\"integrationRuntimeName\":\"wtxxpkyjcx\",\"sessionId\":\"xgrytfmp\",\"startTime\":\"cil\",\"timeToLiveInMinutes\":735394972,\"lastActivityTime\":\"ykggnoxuztrksx\",\"\":{\"cpfnznthjtwkja\":\"datad\",\"tcqiosmg\":\"datasrxuzvoam\"}}],\"nextLink\":\"ahgx\"}") .toObject(QueryDataFlowDebugSessionsResponse.class); - Assertions.assertEquals("noda", model.value().get(0).dataFlowName()); - Assertions.assertEquals("pqhe", model.value().get(0).computeType()); - Assertions.assertEquals(1635095275, model.value().get(0).coreCount()); - Assertions.assertEquals(1244000330, model.value().get(0).nodeCount()); - Assertions.assertEquals("gsbos", model.value().get(0).integrationRuntimeName()); - Assertions.assertEquals("eln", model.value().get(0).sessionId()); - Assertions.assertEquals("atutmzlbiojlvfhr", model.value().get(0).startTime()); - Assertions.assertEquals(438246123, model.value().get(0).timeToLiveInMinutes()); - Assertions.assertEquals("eqvcwwyyurmoch", model.value().get(0).lastActivityTime()); - Assertions.assertEquals("olewjwi", model.nextLink()); + Assertions.assertEquals("okffqyinljqepqwh", model.value().get(0).dataFlowName()); + Assertions.assertEquals("monstshiyxgve", model.value().get(0).computeType()); + Assertions.assertEquals(1153037194, model.value().get(0).coreCount()); + Assertions.assertEquals(341924514, model.value().get(0).nodeCount()); + Assertions.assertEquals("ccbi", model.value().get(0).integrationRuntimeName()); + Assertions.assertEquals("svu", model.value().get(0).sessionId()); + Assertions.assertEquals("o", model.value().get(0).startTime()); + Assertions.assertEquals(1462697228, model.value().get(0).timeToLiveInMinutes()); + Assertions.assertEquals("stmninwjizcilng", model.value().get(0).lastActivityTime()); + Assertions.assertEquals("ahgx", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - QueryDataFlowDebugSessionsResponse model - = new QueryDataFlowDebugSessionsResponse() - .withValue( - Arrays.asList(new DataFlowDebugSessionInfoInner().withDataFlowName("noda").withComputeType("pqhe") - .withCoreCount(1635095275).withNodeCount(1244000330).withIntegrationRuntimeName("gsbos") - .withSessionId("eln").withStartTime("atutmzlbiojlvfhr").withTimeToLiveInMinutes(438246123) - .withLastActivityTime("eqvcwwyyurmoch").withAdditionalProperties(mapOf()))) - .withNextLink("olewjwi"); + QueryDataFlowDebugSessionsResponse model = new QueryDataFlowDebugSessionsResponse().withValue(Arrays.asList( + new DataFlowDebugSessionInfoInner().withDataFlowName("okffqyinljqepqwh") + .withComputeType("monstshiyxgve") + .withCoreCount(1153037194) + .withNodeCount(341924514) + .withIntegrationRuntimeName("ccbi") + .withSessionId("svu") + .withStartTime("o") + .withTimeToLiveInMinutes(1462697228) + .withLastActivityTime("stmninwjizcilng") + .withAdditionalProperties(mapOf()), + new DataFlowDebugSessionInfoInner().withDataFlowName("uswhvhczznvfbycj") + .withComputeType("jww") + .withCoreCount(184999250) + .withNodeCount(156341242) + .withIntegrationRuntimeName("wmxqhndvnoamlds") + .withSessionId("aohdjh") + .withStartTime("lzok") + .withTimeToLiveInMinutes(434777672) + .withLastActivityTime("pelnjetag") + .withAdditionalProperties(mapOf()), + new DataFlowDebugSessionInfoInner().withDataFlowName("pnpbswveflocc") + .withComputeType("mozi") + .withCoreCount(1928373121) + .withNodeCount(215643303) + .withIntegrationRuntimeName("wtxxpkyjcx") + .withSessionId("xgrytfmp") + .withStartTime("cil") + .withTimeToLiveInMinutes(735394972) + .withLastActivityTime("ykggnoxuztrksx") + .withAdditionalProperties(mapOf()))) + .withNextLink("ahgx"); model = BinaryData.fromObject(model).toObject(QueryDataFlowDebugSessionsResponse.class); - Assertions.assertEquals("noda", model.value().get(0).dataFlowName()); - Assertions.assertEquals("pqhe", model.value().get(0).computeType()); - Assertions.assertEquals(1635095275, model.value().get(0).coreCount()); - Assertions.assertEquals(1244000330, model.value().get(0).nodeCount()); - Assertions.assertEquals("gsbos", model.value().get(0).integrationRuntimeName()); - Assertions.assertEquals("eln", model.value().get(0).sessionId()); - Assertions.assertEquals("atutmzlbiojlvfhr", model.value().get(0).startTime()); - Assertions.assertEquals(438246123, model.value().get(0).timeToLiveInMinutes()); - Assertions.assertEquals("eqvcwwyyurmoch", model.value().get(0).lastActivityTime()); - Assertions.assertEquals("olewjwi", model.nextLink()); + Assertions.assertEquals("okffqyinljqepqwh", model.value().get(0).dataFlowName()); + Assertions.assertEquals("monstshiyxgve", model.value().get(0).computeType()); + Assertions.assertEquals(1153037194, model.value().get(0).coreCount()); + Assertions.assertEquals(341924514, model.value().get(0).nodeCount()); + Assertions.assertEquals("ccbi", model.value().get(0).integrationRuntimeName()); + Assertions.assertEquals("svu", model.value().get(0).sessionId()); + Assertions.assertEquals("o", model.value().get(0).startTime()); + Assertions.assertEquals(1462697228, model.value().get(0).timeToLiveInMinutes()); + Assertions.assertEquals("stmninwjizcilng", model.value().get(0).lastActivityTime()); + Assertions.assertEquals("ahgx", model.nextLink()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksObjectDatasetTests.java index d1f2ec2b4ec73..e7d6324befebf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksObjectDatasetTests.java @@ -19,30 +19,34 @@ public final class QuickBooksObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { QuickBooksObjectDataset model = BinaryData.fromString( - "{\"type\":\"QuickBooksObject\",\"typeProperties\":{\"tableName\":\"datasyweohlmtsnvon\"},\"description\":\"ftswcd\",\"structure\":\"datanseptvdtic\",\"schema\":\"datafl\",\"linkedServiceName\":{\"referenceName\":\"zwkopxd\",\"parameters\":{\"kfzrxxf\":\"datawoqhgppwxn\",\"jzrfx\":\"dataduvqzjnnuww\"}},\"parameters\":{\"rxrjwyzrieitq\":{\"type\":\"Bool\",\"defaultValue\":\"dataqjkbkjc\"},\"pebfhlgeeh\":{\"type\":\"String\",\"defaultValue\":\"datauwtbdzqajxk\"}},\"annotations\":[\"datagplnl\",\"datarfe\",\"datazunbua\",\"datamoub\"],\"folder\":{\"name\":\"mi\"},\"\":{\"yt\":\"datarnobvvequ\"}}") + "{\"type\":\"nxyksznfs\",\"typeProperties\":{\"tableName\":\"datalgdungaypxsazb\"},\"description\":\"prvgrandzkt\",\"structure\":\"datamlpczlqboomzg\",\"schema\":\"dataolhsfddkhxvevwxm\",\"linkedServiceName\":{\"referenceName\":\"bwax\",\"parameters\":{\"ld\":\"datagnpyhtuhalpq\",\"npnytkqjar\":\"dataaokex\"}},\"parameters\":{\"catrmzoujf\":{\"type\":\"Float\",\"defaultValue\":\"datagtz\"},\"vgyoggkztzttj\":{\"type\":\"Float\",\"defaultValue\":\"dataxuupczegqnaidvs\"},\"iz\":{\"type\":\"Object\",\"defaultValue\":\"datapbqgzkuobclobnaq\"}},\"annotations\":[\"dataq\",\"datayu\"],\"folder\":{\"name\":\"nhzjmkffeonmnv\"},\"\":{\"tdegcrunbkilxs\":\"datayiqywlpxmli\",\"iemorszffiukltr\":\"dataekbirhyvsyuv\",\"uoxcsd\":\"datagio\"}}") .toObject(QuickBooksObjectDataset.class); - Assertions.assertEquals("ftswcd", model.description()); - Assertions.assertEquals("zwkopxd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("rxrjwyzrieitq").type()); - Assertions.assertEquals("mi", model.folder().name()); + Assertions.assertEquals("prvgrandzkt", model.description()); + Assertions.assertEquals("bwax", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("catrmzoujf").type()); + Assertions.assertEquals("nhzjmkffeonmnv", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - QuickBooksObjectDataset model = new QuickBooksObjectDataset().withDescription("ftswcd") - .withStructure("datanseptvdtic").withSchema("datafl") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zwkopxd") - .withParameters(mapOf("kfzrxxf", "datawoqhgppwxn", "jzrfx", "dataduvqzjnnuww"))) - .withParameters(mapOf("rxrjwyzrieitq", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataqjkbkjc"), "pebfhlgeeh", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datauwtbdzqajxk"))) - .withAnnotations(Arrays.asList("datagplnl", "datarfe", "datazunbua", "datamoub")) - .withFolder(new DatasetFolder().withName("mi")).withTableName("datasyweohlmtsnvon"); + QuickBooksObjectDataset model = new QuickBooksObjectDataset().withDescription("prvgrandzkt") + .withStructure("datamlpczlqboomzg") + .withSchema("dataolhsfddkhxvevwxm") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bwax") + .withParameters(mapOf("ld", "datagnpyhtuhalpq", "npnytkqjar", "dataaokex"))) + .withParameters(mapOf("catrmzoujf", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datagtz"), "vgyoggkztzttj", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataxuupczegqnaidvs"), + "iz", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datapbqgzkuobclobnaq"))) + .withAnnotations(Arrays.asList("dataq", "datayu")) + .withFolder(new DatasetFolder().withName("nhzjmkffeonmnv")) + .withTableName("datalgdungaypxsazb"); model = BinaryData.fromObject(model).toObject(QuickBooksObjectDataset.class); - Assertions.assertEquals("ftswcd", model.description()); - Assertions.assertEquals("zwkopxd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("rxrjwyzrieitq").type()); - Assertions.assertEquals("mi", model.folder().name()); + Assertions.assertEquals("prvgrandzkt", model.description()); + Assertions.assertEquals("bwax", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("catrmzoujf").type()); + Assertions.assertEquals("nhzjmkffeonmnv", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksSourceTests.java index 8509a09b65177..68f7b5ed0528f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksSourceTests.java @@ -11,16 +11,19 @@ public final class QuickBooksSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { QuickBooksSource model = BinaryData.fromString( - "{\"type\":\"QuickBooksSource\",\"query\":\"datagw\",\"queryTimeout\":\"dataujshcsnk\",\"additionalColumns\":\"datagpqxqevt\",\"sourceRetryCount\":\"datavyy\",\"sourceRetryWait\":\"datakjirvjogsalvjl\",\"maxConcurrentConnections\":\"dataimua\",\"disableMetricsCollection\":\"datakympwquu\",\"\":{\"iqeftgunropdpuf\":\"dataofuzthszjyanhs\"}}") + "{\"type\":\"jqfbvbv\",\"query\":\"datazxyfaidzvdqv\",\"queryTimeout\":\"datajbrqb\",\"additionalColumns\":\"datamacnqudmyduvaw\",\"sourceRetryCount\":\"dataryrzoqyy\",\"sourceRetryWait\":\"datadbgobhltm\",\"maxConcurrentConnections\":\"datay\",\"disableMetricsCollection\":\"dataqgrsytto\",\"\":{\"ca\":\"databbxifacrhpu\",\"wtosuiguoemo\":\"datazpvp\",\"oxwyxodpcgdv\":\"datandbuexr\",\"xdafilaizcd\":\"datatnbk\"}}") .toObject(QuickBooksSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - QuickBooksSource model - = new QuickBooksSource().withSourceRetryCount("datavyy").withSourceRetryWait("datakjirvjogsalvjl") - .withMaxConcurrentConnections("dataimua").withDisableMetricsCollection("datakympwquu") - .withQueryTimeout("dataujshcsnk").withAdditionalColumns("datagpqxqevt").withQuery("datagw"); + QuickBooksSource model = new QuickBooksSource().withSourceRetryCount("dataryrzoqyy") + .withSourceRetryWait("datadbgobhltm") + .withMaxConcurrentConnections("datay") + .withDisableMetricsCollection("dataqgrsytto") + .withQueryTimeout("datajbrqb") + .withAdditionalColumns("datamacnqudmyduvaw") + .withQuery("datazxyfaidzvdqv"); model = BinaryData.fromObject(model).toObject(QuickBooksSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleOccurrenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleOccurrenceTests.java index 4bf82d7107a94..3238d057cb772 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleOccurrenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleOccurrenceTests.java @@ -15,19 +15,20 @@ public final class RecurrenceScheduleOccurrenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RecurrenceScheduleOccurrence model = BinaryData.fromString( - "{\"day\":\"Thursday\",\"occurrence\":61488311,\"\":{\"sneyuirr\":\"datafjf\",\"mwcflphqt\":\"dataxrftfamozyv\",\"gzctfnlaklszbeu\":\"datapcowmukzcrpd\",\"hq\":\"dataqfxixzwi\"}}") + "{\"day\":\"Monday\",\"occurrence\":1370922840,\"\":{\"njiwzqnbjk\":\"dataldbkuq\",\"bmfnjuzvwwowy\":\"datat\"}}") .toObject(RecurrenceScheduleOccurrence.class); - Assertions.assertEquals(DayOfWeek.THURSDAY, model.day()); - Assertions.assertEquals(61488311, model.occurrence()); + Assertions.assertEquals(DayOfWeek.MONDAY, model.day()); + Assertions.assertEquals(1370922840, model.occurrence()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RecurrenceScheduleOccurrence model = new RecurrenceScheduleOccurrence().withDay(DayOfWeek.THURSDAY) - .withOccurrence(61488311).withAdditionalProperties(mapOf()); + RecurrenceScheduleOccurrence model = new RecurrenceScheduleOccurrence().withDay(DayOfWeek.MONDAY) + .withOccurrence(1370922840) + .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(RecurrenceScheduleOccurrence.class); - Assertions.assertEquals(DayOfWeek.THURSDAY, model.day()); - Assertions.assertEquals(61488311, model.occurrence()); + Assertions.assertEquals(DayOfWeek.MONDAY, model.day()); + Assertions.assertEquals(1370922840, model.occurrence()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleTests.java index 09511d36a73dc..edb9c95cd74e0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleTests.java @@ -18,40 +18,41 @@ public final class RecurrenceScheduleTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RecurrenceSchedule model = BinaryData.fromString( - "{\"minutes\":[1651942817,1198550709,1034605947,2073508503],\"hours\":[761365450,2140656266,1965073356,292574962],\"weekDays\":[\"Friday\",\"Monday\"],\"monthDays\":[1518926475,520960902],\"monthlyOccurrences\":[{\"day\":\"Tuesday\",\"occurrence\":503666490,\"\":{\"mmtucazgrlvkdap\":\"datavlcunnbaijobcpru\",\"tkbzz\":\"datazemnja\"}},{\"day\":\"Monday\",\"occurrence\":1914403497,\"\":{\"r\":\"datajhmgocal\",\"nreukcrcsdaip\":\"datakmwyoukfim\"}},{\"day\":\"Wednesday\",\"occurrence\":714649368,\"\":{\"tbybcxgrrlzdn\":\"datav\",\"uqi\":\"datacxtqqpfgjny\",\"j\":\"dataoiuj\",\"lqtdcasjnzeckp\":\"datagsfvyvnpu\"}},{\"day\":\"Tuesday\",\"occurrence\":367076420,\"\":{\"lzicltwan\":\"datac\",\"inlqkzee\":\"databzycxvifkzspwvl\",\"kwxb\":\"datazelmmwmdhmd\",\"pnmnojfmztpw\":\"datavmc\"}}],\"\":{\"rfo\":\"datauntvyeyebw\"}}") + "{\"minutes\":[855765251,1655443106,2090510823,1836359328],\"hours\":[879333962,787747970,349030599,462374271],\"weekDays\":[\"Thursday\"],\"monthDays\":[199972520,1755547689,554487281,722555937],\"monthlyOccurrences\":[{\"day\":\"Wednesday\",\"occurrence\":1575003480,\"\":{\"rysgabhpfpv\":\"datahgiglkin\",\"jcckhgstohzvrq\":\"datady\"}},{\"day\":\"Friday\",\"occurrence\":2031325920,\"\":{\"u\":\"datafhduniqumpspo\"}},{\"day\":\"Friday\",\"occurrence\":1890860893,\"\":{\"iqzfjlprljilpua\":\"datagkdwvtmmvq\",\"kfxzgopckmmagfbr\":\"dataxidytj\",\"ryourl\":\"datayvrlycikwqtlg\",\"jvsqzchysqy\":\"dataw\"}}],\"\":{\"pqfxyywsxrxv\":\"dataqmikljczxotblx\",\"yijyzhmfuksqi\":\"datavwkzaqqk\"}}") .toObject(RecurrenceSchedule.class); - Assertions.assertEquals(1651942817, model.minutes().get(0)); - Assertions.assertEquals(761365450, model.hours().get(0)); - Assertions.assertEquals(DaysOfWeek.FRIDAY, model.weekDays().get(0)); - Assertions.assertEquals(1518926475, model.monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.TUESDAY, model.monthlyOccurrences().get(0).day()); - Assertions.assertEquals(503666490, model.monthlyOccurrences().get(0).occurrence()); + Assertions.assertEquals(855765251, model.minutes().get(0)); + Assertions.assertEquals(879333962, model.hours().get(0)); + Assertions.assertEquals(DaysOfWeek.THURSDAY, model.weekDays().get(0)); + Assertions.assertEquals(199972520, model.monthDays().get(0)); + Assertions.assertEquals(DayOfWeek.WEDNESDAY, model.monthlyOccurrences().get(0).day()); + Assertions.assertEquals(1575003480, model.monthlyOccurrences().get(0).occurrence()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { RecurrenceSchedule model - = new RecurrenceSchedule().withMinutes(Arrays.asList(1651942817, 1198550709, 1034605947, 2073508503)) - .withHours(Arrays.asList(761365450, 2140656266, 1965073356, 292574962)) - .withWeekDays(Arrays.asList(DaysOfWeek.FRIDAY, DaysOfWeek.MONDAY)) - .withMonthDays(Arrays.asList(1518926475, 520960902)) + = new RecurrenceSchedule().withMinutes(Arrays.asList(855765251, 1655443106, 2090510823, 1836359328)) + .withHours(Arrays.asList(879333962, 787747970, 349030599, 462374271)) + .withWeekDays(Arrays.asList(DaysOfWeek.THURSDAY)) + .withMonthDays(Arrays.asList(199972520, 1755547689, 554487281, 722555937)) .withMonthlyOccurrences(Arrays.asList( - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.TUESDAY).withOccurrence(503666490) + new RecurrenceScheduleOccurrence().withDay(DayOfWeek.WEDNESDAY) + .withOccurrence(1575003480) .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.MONDAY).withOccurrence(1914403497) + new RecurrenceScheduleOccurrence().withDay(DayOfWeek.FRIDAY) + .withOccurrence(2031325920) .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.WEDNESDAY).withOccurrence(714649368) - .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.TUESDAY).withOccurrence(367076420) + new RecurrenceScheduleOccurrence().withDay(DayOfWeek.FRIDAY) + .withOccurrence(1890860893) .withAdditionalProperties(mapOf()))) .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(RecurrenceSchedule.class); - Assertions.assertEquals(1651942817, model.minutes().get(0)); - Assertions.assertEquals(761365450, model.hours().get(0)); - Assertions.assertEquals(DaysOfWeek.FRIDAY, model.weekDays().get(0)); - Assertions.assertEquals(1518926475, model.monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.TUESDAY, model.monthlyOccurrences().get(0).day()); - Assertions.assertEquals(503666490, model.monthlyOccurrences().get(0).occurrence()); + Assertions.assertEquals(855765251, model.minutes().get(0)); + Assertions.assertEquals(879333962, model.hours().get(0)); + Assertions.assertEquals(DaysOfWeek.THURSDAY, model.weekDays().get(0)); + Assertions.assertEquals(199972520, model.monthDays().get(0)); + Assertions.assertEquals(DayOfWeek.WEDNESDAY, model.monthlyOccurrences().get(0).day()); + Assertions.assertEquals(1575003480, model.monthlyOccurrences().get(0).occurrence()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedirectIncompatibleRowSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedirectIncompatibleRowSettingsTests.java index 67b6d1f34d3e7..384ba45c6c3e0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedirectIncompatibleRowSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedirectIncompatibleRowSettingsTests.java @@ -13,14 +13,16 @@ public final class RedirectIncompatibleRowSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RedirectIncompatibleRowSettings model = BinaryData.fromString( - "{\"linkedServiceName\":\"databmzwlej\",\"path\":\"datayoonbualri\",\"\":{\"fmzmq\":\"databntopbabndw\"}}") + "{\"linkedServiceName\":\"datalxrwdjbya\",\"path\":\"datafmsxamncuhxz\",\"\":{\"torh\":\"datakxyhuet\",\"jqgzloorhxdu\":\"dataeuuysszhse\",\"akgd\":\"dataegljqpyxi\",\"qwgoomapc\":\"dataanmhvwgchgpbd\"}}") .toObject(RedirectIncompatibleRowSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RedirectIncompatibleRowSettings model = new RedirectIncompatibleRowSettings() - .withLinkedServiceName("databmzwlej").withPath("datayoonbualri").withAdditionalProperties(mapOf()); + RedirectIncompatibleRowSettings model + = new RedirectIncompatibleRowSettings().withLinkedServiceName("datalxrwdjbya") + .withPath("datafmsxamncuhxz") + .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(RedirectIncompatibleRowSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedshiftUnloadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedshiftUnloadSettingsTests.java index 748ae268f528e..d6ddaeb60d01e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedshiftUnloadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedshiftUnloadSettingsTests.java @@ -15,19 +15,22 @@ public final class RedshiftUnloadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RedshiftUnloadSettings model = BinaryData.fromString( - "{\"s3LinkedServiceName\":{\"referenceName\":\"xcyai\",\"parameters\":{\"ssuuzfrwmct\":\"datatrdbtrkvluu\",\"c\":\"datangj\"}},\"bucketName\":\"datasfbkrtpu\"}") + "{\"s3LinkedServiceName\":{\"referenceName\":\"ovblx\",\"parameters\":{\"nykfzso\":\"dataezgdpiurfe\",\"vzqldgii\":\"dataouezszl\",\"b\":\"datanpkxp\",\"jhbvllj\":\"datatyjf\"}},\"bucketName\":\"dataqlruhhkkbfgr\"}") .toObject(RedshiftUnloadSettings.class); - Assertions.assertEquals("xcyai", model.s3LinkedServiceName().referenceName()); + Assertions.assertEquals("ovblx", model.s3LinkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RedshiftUnloadSettings model = new RedshiftUnloadSettings() - .withS3LinkedServiceName(new LinkedServiceReference().withReferenceName("xcyai") - .withParameters(mapOf("ssuuzfrwmct", "datatrdbtrkvluu", "c", "datangj"))) - .withBucketName("datasfbkrtpu"); + RedshiftUnloadSettings model + = new RedshiftUnloadSettings() + .withS3LinkedServiceName( + new LinkedServiceReference().withReferenceName("ovblx") + .withParameters(mapOf("nykfzso", "dataezgdpiurfe", "vzqldgii", "dataouezszl", "b", "datanpkxp", + "jhbvllj", "datatyjf"))) + .withBucketName("dataqlruhhkkbfgr"); model = BinaryData.fromObject(model).toObject(RedshiftUnloadSettings.class); - Assertions.assertEquals("xcyai", model.s3LinkedServiceName().referenceName()); + Assertions.assertEquals("ovblx", model.s3LinkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalSourceTests.java index ba9b534b7fc73..710596199ab8a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalSourceTests.java @@ -11,16 +11,18 @@ public final class RelationalSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RelationalSource model = BinaryData.fromString( - "{\"type\":\"RelationalSource\",\"query\":\"databckxennzow\",\"additionalColumns\":\"datairhexjpwyhmktpyk\",\"sourceRetryCount\":\"datacpkoamqfdtbao\",\"sourceRetryWait\":\"dataaofkcvhh\",\"maxConcurrentConnections\":\"datavkuuikrsi\",\"disableMetricsCollection\":\"datarwsj\",\"\":{\"jva\":\"dataen\",\"fm\":\"datadqgfvygrfyyknxua\",\"kt\":\"dataynlcimjmurocryfu\"}}") + "{\"type\":\"drfqyqvhzkfyv\",\"query\":\"datanvdabaodiytxq\",\"additionalColumns\":\"datahehhtltwv\",\"sourceRetryCount\":\"dataenozlgxqfghlosh\",\"sourceRetryWait\":\"datamkpcmtsbande\",\"maxConcurrentConnections\":\"datalvuewrljmlod\",\"disableMetricsCollection\":\"datazvtfyqeogwbs\",\"\":{\"grgaosttbw\":\"datawxhcygfgqmdbaz\",\"ydsdmacydqacgyvw\":\"datapoyanir\"}}") .toObject(RelationalSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RelationalSource model - = new RelationalSource().withSourceRetryCount("datacpkoamqfdtbao").withSourceRetryWait("dataaofkcvhh") - .withMaxConcurrentConnections("datavkuuikrsi").withDisableMetricsCollection("datarwsj") - .withQuery("databckxennzow").withAdditionalColumns("datairhexjpwyhmktpyk"); + RelationalSource model = new RelationalSource().withSourceRetryCount("dataenozlgxqfghlosh") + .withSourceRetryWait("datamkpcmtsbande") + .withMaxConcurrentConnections("datalvuewrljmlod") + .withDisableMetricsCollection("datazvtfyqeogwbs") + .withQuery("datanvdabaodiytxq") + .withAdditionalColumns("datahehhtltwv"); model = BinaryData.fromObject(model).toObject(RelationalSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTests.java index f45100bf84233..1f073e6c34bb1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTests.java @@ -19,29 +19,32 @@ public final class RelationalTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RelationalTableDataset model = BinaryData.fromString( - "{\"type\":\"RelationalTable\",\"typeProperties\":{\"tableName\":\"dataxzizebjr\"},\"description\":\"gdstubw\",\"structure\":\"dataxzsshxliqmsckwh\",\"schema\":\"datadoi\",\"linkedServiceName\":{\"referenceName\":\"yobqzwjalwrsofxc\",\"parameters\":{\"mrs\":\"datamvj\",\"prel\":\"dataydl\",\"ztirjvqxvwkiocxo\":\"dataxfkz\"}},\"parameters\":{\"lrlqxbctatezyozd\":{\"type\":\"Float\",\"defaultValue\":\"datauocqflm\"}},\"annotations\":[\"dataqnl\",\"datajxcscnitodmrah\",\"datajido\",\"datanvlt\"],\"folder\":{\"name\":\"ahpuwkupbbnhic\"},\"\":{\"nhlsforsimtfcqm\":\"datazhrcqdfwbif\",\"pelpfijtezgxmpe\":\"dataynb\",\"f\":\"datazamadlerzi\",\"mirmnrijefmrt\":\"dataivczktllxswtdap\"}}") + "{\"type\":\"d\",\"typeProperties\":{\"tableName\":\"datapkwmamrlfizjud\"},\"description\":\"upngyhy\",\"structure\":\"datayafewcmodxw\",\"schema\":\"dataytxnxrqx\",\"linkedServiceName\":{\"referenceName\":\"tzeargvfvkhbj\",\"parameters\":{\"w\":\"datapjxsd\",\"ynepkt\":\"dataignybffqcw\",\"conyse\":\"datamwg\",\"ouoxfalo\":\"datajijfhpxni\"}},\"parameters\":{\"gfap\":{\"type\":\"Int\",\"defaultValue\":\"datayporsynieunbyd\"}},\"annotations\":[\"dataubwt\",\"dataay\"],\"folder\":{\"name\":\"vjfqreeoxvqjmrn\"},\"\":{\"ivixzhpjg\":\"datahsdbfbm\"}}") .toObject(RelationalTableDataset.class); - Assertions.assertEquals("gdstubw", model.description()); - Assertions.assertEquals("yobqzwjalwrsofxc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("lrlqxbctatezyozd").type()); - Assertions.assertEquals("ahpuwkupbbnhic", model.folder().name()); + Assertions.assertEquals("upngyhy", model.description()); + Assertions.assertEquals("tzeargvfvkhbj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("gfap").type()); + Assertions.assertEquals("vjfqreeoxvqjmrn", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RelationalTableDataset model = new RelationalTableDataset().withDescription("gdstubw") - .withStructure("dataxzsshxliqmsckwh").withSchema("datadoi") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("yobqzwjalwrsofxc") - .withParameters(mapOf("mrs", "datamvj", "prel", "dataydl", "ztirjvqxvwkiocxo", "dataxfkz"))) - .withParameters(mapOf("lrlqxbctatezyozd", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datauocqflm"))) - .withAnnotations(Arrays.asList("dataqnl", "datajxcscnitodmrah", "datajido", "datanvlt")) - .withFolder(new DatasetFolder().withName("ahpuwkupbbnhic")).withTableName("dataxzizebjr"); + RelationalTableDataset model = new RelationalTableDataset().withDescription("upngyhy") + .withStructure("datayafewcmodxw") + .withSchema("dataytxnxrqx") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("tzeargvfvkhbj") + .withParameters(mapOf("w", "datapjxsd", "ynepkt", "dataignybffqcw", "conyse", "datamwg", "ouoxfalo", + "datajijfhpxni"))) + .withParameters(mapOf("gfap", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datayporsynieunbyd"))) + .withAnnotations(Arrays.asList("dataubwt", "dataay")) + .withFolder(new DatasetFolder().withName("vjfqreeoxvqjmrn")) + .withTableName("datapkwmamrlfizjud"); model = BinaryData.fromObject(model).toObject(RelationalTableDataset.class); - Assertions.assertEquals("gdstubw", model.description()); - Assertions.assertEquals("yobqzwjalwrsofxc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("lrlqxbctatezyozd").type()); - Assertions.assertEquals("ahpuwkupbbnhic", model.folder().name()); + Assertions.assertEquals("upngyhy", model.description()); + Assertions.assertEquals("tzeargvfvkhbj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("gfap").type()); + Assertions.assertEquals("vjfqreeoxvqjmrn", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTypePropertiesTests.java index cf0513c7b01bb..f288654c20737 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTypePropertiesTests.java @@ -10,13 +10,14 @@ public final class RelationalTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - RelationalTableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"datac\"}").toObject(RelationalTableDatasetTypeProperties.class); + RelationalTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datamiaoaweacfx\"}") + .toObject(RelationalTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RelationalTableDatasetTypeProperties model = new RelationalTableDatasetTypeProperties().withTableName("datac"); + RelationalTableDatasetTypeProperties model + = new RelationalTableDatasetTypeProperties().withTableName("datamiaoaweacfx"); model = BinaryData.fromObject(model).toObject(RelationalTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RemotePrivateEndpointConnectionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RemotePrivateEndpointConnectionTests.java index 71b044934ccf8..ad302efe46e12 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RemotePrivateEndpointConnectionTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RemotePrivateEndpointConnectionTests.java @@ -14,22 +14,23 @@ public final class RemotePrivateEndpointConnectionTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RemotePrivateEndpointConnection model = BinaryData.fromString( - "{\"provisioningState\":\"ptjgwdt\",\"privateEndpoint\":{\"id\":\"ranblwphqlkccu\"},\"privateLinkServiceConnectionState\":{\"status\":\"gqwa\",\"description\":\"iul\",\"actionsRequired\":\"niiprglvaw\"}}") + "{\"provisioningState\":\"qyybxubmdnafcbq\",\"privateEndpoint\":{\"id\":\"mjel\"},\"privateLinkServiceConnectionState\":{\"status\":\"cigeleohdbvqvw\",\"description\":\"jopwbeonrlkwz\",\"actionsRequired\":\"ybxc\"}}") .toObject(RemotePrivateEndpointConnection.class); - Assertions.assertEquals("gqwa", model.privateLinkServiceConnectionState().status()); - Assertions.assertEquals("iul", model.privateLinkServiceConnectionState().description()); - Assertions.assertEquals("niiprglvaw", model.privateLinkServiceConnectionState().actionsRequired()); + Assertions.assertEquals("cigeleohdbvqvw", model.privateLinkServiceConnectionState().status()); + Assertions.assertEquals("jopwbeonrlkwz", model.privateLinkServiceConnectionState().description()); + Assertions.assertEquals("ybxc", model.privateLinkServiceConnectionState().actionsRequired()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { RemotePrivateEndpointConnection model = new RemotePrivateEndpointConnection().withPrivateEndpoint(new ArmIdWrapper()) - .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("gqwa") - .withDescription("iul").withActionsRequired("niiprglvaw")); + .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("cigeleohdbvqvw") + .withDescription("jopwbeonrlkwz") + .withActionsRequired("ybxc")); model = BinaryData.fromObject(model).toObject(RemotePrivateEndpointConnection.class); - Assertions.assertEquals("gqwa", model.privateLinkServiceConnectionState().status()); - Assertions.assertEquals("iul", model.privateLinkServiceConnectionState().description()); - Assertions.assertEquals("niiprglvaw", model.privateLinkServiceConnectionState().actionsRequired()); + Assertions.assertEquals("cigeleohdbvqvw", model.privateLinkServiceConnectionState().status()); + Assertions.assertEquals("jopwbeonrlkwz", model.privateLinkServiceConnectionState().description()); + Assertions.assertEquals("ybxc", model.privateLinkServiceConnectionState().actionsRequired()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTests.java index 37f6f73115ec4..8af58e77553a1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTests.java @@ -14,25 +14,26 @@ public final class RerunTumblingWindowTriggerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RerunTumblingWindowTrigger model = BinaryData.fromString( - "{\"type\":\"RerunTumblingWindowTrigger\",\"typeProperties\":{\"parentTrigger\":\"datarhgcuejtxxlkokt\",\"requestedStartTime\":\"2021-06-18T21:25:37Z\",\"requestedEndTime\":\"2021-10-10T00:49:20Z\",\"rerunConcurrency\":1762125974},\"description\":\"wtwboxgrvsavoq\",\"runtimeState\":\"Started\",\"annotations\":[\"datakuszllognledhvll\",\"datanyg\",\"datamn\"],\"\":{\"lwdh\":\"dataaqtzop\"}}") + "{\"type\":\"fllrieknxbcgn\",\"typeProperties\":{\"parentTrigger\":\"dataxmaimwufuv\",\"requestedStartTime\":\"2021-03-08T14:49:03Z\",\"requestedEndTime\":\"2021-08-11T21:21:34Z\",\"rerunConcurrency\":253054235},\"description\":\"engkwhkekxohq\",\"runtimeState\":\"Disabled\",\"annotations\":[\"dataoqtvmkjle\",\"datalakvhgefvpc\"],\"\":{\"dtcibbgijkw\":\"databyagqiprb\"}}") .toObject(RerunTumblingWindowTrigger.class); - Assertions.assertEquals("wtwboxgrvsavoq", model.description()); - Assertions.assertEquals(OffsetDateTime.parse("2021-06-18T21:25:37Z"), model.requestedStartTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-10-10T00:49:20Z"), model.requestedEndTime()); - Assertions.assertEquals(1762125974, model.rerunConcurrency()); + Assertions.assertEquals("engkwhkekxohq", model.description()); + Assertions.assertEquals(OffsetDateTime.parse("2021-03-08T14:49:03Z"), model.requestedStartTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-08-11T21:21:34Z"), model.requestedEndTime()); + Assertions.assertEquals(253054235, model.rerunConcurrency()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RerunTumblingWindowTrigger model = new RerunTumblingWindowTrigger().withDescription("wtwboxgrvsavoq") - .withAnnotations(Arrays.asList("datakuszllognledhvll", "datanyg", "datamn")) - .withParentTrigger("datarhgcuejtxxlkokt") - .withRequestedStartTime(OffsetDateTime.parse("2021-06-18T21:25:37Z")) - .withRequestedEndTime(OffsetDateTime.parse("2021-10-10T00:49:20Z")).withRerunConcurrency(1762125974); + RerunTumblingWindowTrigger model = new RerunTumblingWindowTrigger().withDescription("engkwhkekxohq") + .withAnnotations(Arrays.asList("dataoqtvmkjle", "datalakvhgefvpc")) + .withParentTrigger("dataxmaimwufuv") + .withRequestedStartTime(OffsetDateTime.parse("2021-03-08T14:49:03Z")) + .withRequestedEndTime(OffsetDateTime.parse("2021-08-11T21:21:34Z")) + .withRerunConcurrency(253054235); model = BinaryData.fromObject(model).toObject(RerunTumblingWindowTrigger.class); - Assertions.assertEquals("wtwboxgrvsavoq", model.description()); - Assertions.assertEquals(OffsetDateTime.parse("2021-06-18T21:25:37Z"), model.requestedStartTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-10-10T00:49:20Z"), model.requestedEndTime()); - Assertions.assertEquals(1762125974, model.rerunConcurrency()); + Assertions.assertEquals("engkwhkekxohq", model.description()); + Assertions.assertEquals(OffsetDateTime.parse("2021-03-08T14:49:03Z"), model.requestedStartTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-08-11T21:21:34Z"), model.requestedEndTime()); + Assertions.assertEquals(253054235, model.rerunConcurrency()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTypePropertiesTests.java index 2947f51730990..c9bc9879465c8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTypePropertiesTests.java @@ -13,21 +13,23 @@ public final class RerunTumblingWindowTriggerTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RerunTumblingWindowTriggerTypeProperties model = BinaryData.fromString( - "{\"parentTrigger\":\"databvhflbchzob\",\"requestedStartTime\":\"2021-11-28T06:35:22Z\",\"requestedEndTime\":\"2021-01-04T14:44:46Z\",\"rerunConcurrency\":345398232}") + "{\"parentTrigger\":\"datajlkimo\",\"requestedStartTime\":\"2021-07-16T12:25:12Z\",\"requestedEndTime\":\"2021-05-22T10:20:45Z\",\"rerunConcurrency\":667206244}") .toObject(RerunTumblingWindowTriggerTypeProperties.class); - Assertions.assertEquals(OffsetDateTime.parse("2021-11-28T06:35:22Z"), model.requestedStartTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-01-04T14:44:46Z"), model.requestedEndTime()); - Assertions.assertEquals(345398232, model.rerunConcurrency()); + Assertions.assertEquals(OffsetDateTime.parse("2021-07-16T12:25:12Z"), model.requestedStartTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-05-22T10:20:45Z"), model.requestedEndTime()); + Assertions.assertEquals(667206244, model.rerunConcurrency()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RerunTumblingWindowTriggerTypeProperties model = new RerunTumblingWindowTriggerTypeProperties() - .withParentTrigger("databvhflbchzob").withRequestedStartTime(OffsetDateTime.parse("2021-11-28T06:35:22Z")) - .withRequestedEndTime(OffsetDateTime.parse("2021-01-04T14:44:46Z")).withRerunConcurrency(345398232); + RerunTumblingWindowTriggerTypeProperties model + = new RerunTumblingWindowTriggerTypeProperties().withParentTrigger("datajlkimo") + .withRequestedStartTime(OffsetDateTime.parse("2021-07-16T12:25:12Z")) + .withRequestedEndTime(OffsetDateTime.parse("2021-05-22T10:20:45Z")) + .withRerunConcurrency(667206244); model = BinaryData.fromObject(model).toObject(RerunTumblingWindowTriggerTypeProperties.class); - Assertions.assertEquals(OffsetDateTime.parse("2021-11-28T06:35:22Z"), model.requestedStartTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-01-04T14:44:46Z"), model.requestedEndTime()); - Assertions.assertEquals(345398232, model.rerunConcurrency()); + Assertions.assertEquals(OffsetDateTime.parse("2021-07-16T12:25:12Z"), model.requestedStartTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-05-22T10:20:45Z"), model.requestedEndTime()); + Assertions.assertEquals(667206244, model.rerunConcurrency()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysObjectDatasetTests.java index 35758b6a7b961..84419e974e460 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysObjectDatasetTests.java @@ -19,31 +19,34 @@ public final class ResponsysObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ResponsysObjectDataset model = BinaryData.fromString( - "{\"type\":\"ResponsysObject\",\"typeProperties\":{\"tableName\":\"dataxcsdqoxhdenmj\"},\"description\":\"xgrggyciw\",\"structure\":\"dataqinr\",\"schema\":\"datavvmrn\",\"linkedServiceName\":{\"referenceName\":\"rdijox\",\"parameters\":{\"b\":\"datasychdcjggcmpncj\",\"owvfxe\":\"databnoq\",\"irvcpol\":\"datatzgwjeky\",\"ilbdvxlfhlzzgap\":\"datavgppp\"}},\"parameters\":{\"xnroyhthesyw\":{\"type\":\"SecureString\",\"defaultValue\":\"datablscrmzquuzywkgo\"}},\"annotations\":[\"datavg\"],\"folder\":{\"name\":\"c\"},\"\":{\"zyrgrlh\":\"datazcwuejmxlfzl\"}}") + "{\"type\":\"mfwfpoeow\",\"typeProperties\":{\"tableName\":\"datau\"},\"description\":\"zzwncs\",\"structure\":\"datafxvchmubyguqh\",\"schema\":\"datamsvjfgrpryyir\",\"linkedServiceName\":{\"referenceName\":\"bajxj\",\"parameters\":{\"atxkznlwlmbx\":\"datayrkb\",\"ay\":\"datagkev\",\"kxiymzgrg\":\"datax\",\"ybsps\":\"datajalrjwaezp\"}},\"parameters\":{\"suiwexpasckpg\":{\"type\":\"Bool\",\"defaultValue\":\"dataepzimfc\"},\"cmxtoejt\":{\"type\":\"Float\",\"defaultValue\":\"datayxbwslx\"},\"idkxz\":{\"type\":\"Int\",\"defaultValue\":\"datactm\"}},\"annotations\":[\"datauzntbpcadd\",\"datax\",\"datarxiperrplfm\",\"datavmjjfz\"],\"folder\":{\"name\":\"lbiqq\"},\"\":{\"symagbahdbtjmku\":\"datarxknfv\",\"bizrxhuq\":\"dataonrk\",\"cxgqtquirgopgza\":\"datavpanloqov\"}}") .toObject(ResponsysObjectDataset.class); - Assertions.assertEquals("xgrggyciw", model.description()); - Assertions.assertEquals("rdijox", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("xnroyhthesyw").type()); - Assertions.assertEquals("c", model.folder().name()); + Assertions.assertEquals("zzwncs", model.description()); + Assertions.assertEquals("bajxj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("suiwexpasckpg").type()); + Assertions.assertEquals("lbiqq", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ResponsysObjectDataset model = new ResponsysObjectDataset().withDescription("xgrggyciw") - .withStructure("dataqinr").withSchema("datavvmrn") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rdijox") - .withParameters(mapOf("b", "datasychdcjggcmpncj", "owvfxe", "databnoq", "irvcpol", "datatzgwjeky", - "ilbdvxlfhlzzgap", "datavgppp"))) - .withParameters(mapOf("xnroyhthesyw", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datablscrmzquuzywkgo"))) - .withAnnotations(Arrays.asList("datavg")).withFolder(new DatasetFolder().withName("c")) - .withTableName("dataxcsdqoxhdenmj"); + ResponsysObjectDataset model = new ResponsysObjectDataset().withDescription("zzwncs") + .withStructure("datafxvchmubyguqh") + .withSchema("datamsvjfgrpryyir") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bajxj") + .withParameters(mapOf("atxkznlwlmbx", "datayrkb", "ay", "datagkev", "kxiymzgrg", "datax", "ybsps", + "datajalrjwaezp"))) + .withParameters(mapOf("suiwexpasckpg", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataepzimfc"), "cmxtoejt", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datayxbwslx"), "idkxz", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datactm"))) + .withAnnotations(Arrays.asList("datauzntbpcadd", "datax", "datarxiperrplfm", "datavmjjfz")) + .withFolder(new DatasetFolder().withName("lbiqq")) + .withTableName("datau"); model = BinaryData.fromObject(model).toObject(ResponsysObjectDataset.class); - Assertions.assertEquals("xgrggyciw", model.description()); - Assertions.assertEquals("rdijox", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("xnroyhthesyw").type()); - Assertions.assertEquals("c", model.folder().name()); + Assertions.assertEquals("zzwncs", model.description()); + Assertions.assertEquals("bajxj", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("suiwexpasckpg").type()); + Assertions.assertEquals("lbiqq", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysSourceTests.java index 97f3e4a51bd07..f2059537a9c52 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysSourceTests.java @@ -11,16 +11,19 @@ public final class ResponsysSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ResponsysSource model = BinaryData.fromString( - "{\"type\":\"ResponsysSource\",\"query\":\"datacgcie\",\"queryTimeout\":\"datax\",\"additionalColumns\":\"datazvnghtknr\",\"sourceRetryCount\":\"datahysnmyuvf\",\"sourceRetryWait\":\"datacnrapxw\",\"maxConcurrentConnections\":\"datapxoelfobehr\",\"disableMetricsCollection\":\"dataglojjcziytf\",\"\":{\"nkms\":\"datairmbrdognqa\",\"dzvuhw\":\"dataybh\"}}") + "{\"type\":\"cabsmrfx\",\"query\":\"datarxl\",\"queryTimeout\":\"dataiirneop\",\"additionalColumns\":\"databhcfswpda\",\"sourceRetryCount\":\"datagvzgwvmhbiziij\",\"sourceRetryWait\":\"dataeexdboat\",\"maxConcurrentConnections\":\"datafy\",\"disableMetricsCollection\":\"datafeqrn\",\"\":{\"caraxorqjboyng\":\"dataqyagfxaco\",\"aptwmawypkpbmi\":\"dataogqvwchynrdt\"}}") .toObject(ResponsysSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ResponsysSource model - = new ResponsysSource().withSourceRetryCount("datahysnmyuvf").withSourceRetryWait("datacnrapxw") - .withMaxConcurrentConnections("datapxoelfobehr").withDisableMetricsCollection("dataglojjcziytf") - .withQueryTimeout("datax").withAdditionalColumns("datazvnghtknr").withQuery("datacgcie"); + ResponsysSource model = new ResponsysSource().withSourceRetryCount("datagvzgwvmhbiziij") + .withSourceRetryWait("dataeexdboat") + .withMaxConcurrentConnections("datafy") + .withDisableMetricsCollection("datafeqrn") + .withQueryTimeout("dataiirneop") + .withAdditionalColumns("databhcfswpda") + .withQuery("datarxl"); model = BinaryData.fromObject(model).toObject(ResponsysSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTests.java index de73764250ada..0ef221cab32e0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTests.java @@ -19,33 +19,38 @@ public final class RestResourceDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RestResourceDataset model = BinaryData.fromString( - "{\"type\":\"RestResource\",\"typeProperties\":{\"relativeUrl\":\"databbfpxxa\",\"requestMethod\":\"dataoz\",\"requestBody\":\"datagsnuhwy\",\"additionalHeaders\":{\"axcebnbeosk\":\"dataf\",\"v\":\"datamqqerwqxpj\",\"zmhytebjkjgee\":\"datamdfkhttuobr\"},\"paginationRules\":{\"twofxfmhlvy\":\"datafmabvbmn\"}},\"description\":\"sl\",\"structure\":\"datarml\",\"schema\":\"dataiekhjgqq\",\"linkedServiceName\":{\"referenceName\":\"ugwespscvsmsp\",\"parameters\":{\"upcvq\":\"datawozfvza\",\"cgmlmpn\":\"dataxcvwioqhc\"}},\"parameters\":{\"hdqseyp\":{\"type\":\"Array\",\"defaultValue\":\"dataiarz\"}},\"annotations\":[\"dataajpuyx\",\"dataa\",\"datarmzgccynb\",\"datavmsiehedm\"],\"folder\":{\"name\":\"neeyrxparxtz\"},\"\":{\"lzdssi\":\"datawddigebls\",\"p\":\"datawveeozbjkj\",\"dyw\":\"datazdnuehxwltss\"}}") + "{\"type\":\"ypkcpwsrqnn\",\"typeProperties\":{\"relativeUrl\":\"datavwkryzgav\",\"requestMethod\":\"datandm\",\"requestBody\":\"dataiekkiskyyyaekn\",\"additionalHeaders\":{\"jiutfofhoajj\":\"datayshdawjlmlcufb\"},\"paginationRules\":{\"quyhbceev\":\"datayqyjnufzvl\",\"dssijuaxxf\":\"datagirrpwnqtvuxeuj\"}},\"description\":\"u\",\"structure\":\"datatl\",\"schema\":\"dataltjhb\",\"linkedServiceName\":{\"referenceName\":\"ycgqakcsihxvt\",\"parameters\":{\"pxpry\":\"datawf\",\"ahtqmmk\":\"datansbubwhzqqgugwlu\",\"qwebagm\":\"datahwq\",\"rgvypa\":\"datapkephujeucosvkke\"}},\"parameters\":{\"ezfpffbuqxkn\":{\"type\":\"SecureString\",\"defaultValue\":\"dataillgnu\"},\"bg\":{\"type\":\"Float\",\"defaultValue\":\"datag\"},\"zoksgqhb\":{\"type\":\"String\",\"defaultValue\":\"datajfchicpare\"}},\"annotations\":[\"datauxilozb\",\"datakcr\",\"datal\"],\"folder\":{\"name\":\"jw\"},\"\":{\"hfgmuxuqiagsko\":\"dataptsflotumbmwgft\"}}") .toObject(RestResourceDataset.class); - Assertions.assertEquals("sl", model.description()); - Assertions.assertEquals("ugwespscvsmsp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("hdqseyp").type()); - Assertions.assertEquals("neeyrxparxtz", model.folder().name()); + Assertions.assertEquals("u", model.description()); + Assertions.assertEquals("ycgqakcsihxvt", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("ezfpffbuqxkn").type()); + Assertions.assertEquals("jw", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RestResourceDataset model - = new RestResourceDataset().withDescription("sl").withStructure("datarml").withSchema("dataiekhjgqq") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ugwespscvsmsp") - .withParameters(mapOf("upcvq", "datawozfvza", "cgmlmpn", "dataxcvwioqhc"))) - .withParameters(mapOf("hdqseyp", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataiarz"))) - .withAnnotations(Arrays.asList("dataajpuyx", "dataa", "datarmzgccynb", "datavmsiehedm")) - .withFolder(new DatasetFolder().withName("neeyrxparxtz")).withRelativeUrl("databbfpxxa") - .withRequestMethod("dataoz").withRequestBody("datagsnuhwy") - .withAdditionalHeaders( - mapOf("axcebnbeosk", "dataf", "v", "datamqqerwqxpj", "zmhytebjkjgee", "datamdfkhttuobr")) - .withPaginationRules(mapOf("twofxfmhlvy", "datafmabvbmn")); + RestResourceDataset model = new RestResourceDataset().withDescription("u") + .withStructure("datatl") + .withSchema("dataltjhb") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ycgqakcsihxvt") + .withParameters(mapOf("pxpry", "datawf", "ahtqmmk", "datansbubwhzqqgugwlu", "qwebagm", "datahwq", + "rgvypa", "datapkephujeucosvkke"))) + .withParameters(mapOf("ezfpffbuqxkn", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataillgnu"), "bg", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datag"), "zoksgqhb", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datajfchicpare"))) + .withAnnotations(Arrays.asList("datauxilozb", "datakcr", "datal")) + .withFolder(new DatasetFolder().withName("jw")) + .withRelativeUrl("datavwkryzgav") + .withRequestMethod("datandm") + .withRequestBody("dataiekkiskyyyaekn") + .withAdditionalHeaders(mapOf("jiutfofhoajj", "datayshdawjlmlcufb")) + .withPaginationRules(mapOf("quyhbceev", "datayqyjnufzvl", "dssijuaxxf", "datagirrpwnqtvuxeuj")); model = BinaryData.fromObject(model).toObject(RestResourceDataset.class); - Assertions.assertEquals("sl", model.description()); - Assertions.assertEquals("ugwespscvsmsp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("hdqseyp").type()); - Assertions.assertEquals("neeyrxparxtz", model.folder().name()); + Assertions.assertEquals("u", model.description()); + Assertions.assertEquals("ycgqakcsihxvt", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("ezfpffbuqxkn").type()); + Assertions.assertEquals("jw", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTypePropertiesTests.java index 1cfb64f13b45e..b80f567e570c6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTypePropertiesTests.java @@ -13,17 +13,18 @@ public final class RestResourceDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RestResourceDatasetTypeProperties model = BinaryData.fromString( - "{\"relativeUrl\":\"dataklgerxactsawv\",\"requestMethod\":\"dataimpthj\",\"requestBody\":\"dataplzmslubnk\",\"additionalHeaders\":{\"lfswarmybwmro\":\"datauysjhvrr\",\"cbfnxiajuv\":\"datageysyqnipehfw\"},\"paginationRules\":{\"zguaxfhvjixgofqd\":\"datafjisosfzlnraxnf\",\"jmi\":\"dataw\",\"ntlydprpensbmzj\":\"datauvrqpbxdoicqp\",\"hbfexmizz\":\"dataitukoy\"}}") + "{\"relativeUrl\":\"datauqirh\",\"requestMethod\":\"dataxpaowkgvnlfueyx\",\"requestBody\":\"dataibrlrjugcfeb\",\"additionalHeaders\":{\"bvr\":\"datace\",\"lsxr\":\"datalldfknbdzw\"},\"paginationRules\":{\"lwrduxntpfxxgja\":\"dataj\",\"eyfbkqynlzx\":\"datayxfwfctq\",\"vfoy\":\"datamelupjckiehd\"}}") .toObject(RestResourceDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RestResourceDatasetTypeProperties model = new RestResourceDatasetTypeProperties() - .withRelativeUrl("dataklgerxactsawv").withRequestMethod("dataimpthj").withRequestBody("dataplzmslubnk") - .withAdditionalHeaders(mapOf("lfswarmybwmro", "datauysjhvrr", "cbfnxiajuv", "datageysyqnipehfw")) - .withPaginationRules(mapOf("zguaxfhvjixgofqd", "datafjisosfzlnraxnf", "jmi", "dataw", "ntlydprpensbmzj", - "datauvrqpbxdoicqp", "hbfexmizz", "dataitukoy")); + RestResourceDatasetTypeProperties model = new RestResourceDatasetTypeProperties().withRelativeUrl("datauqirh") + .withRequestMethod("dataxpaowkgvnlfueyx") + .withRequestBody("dataibrlrjugcfeb") + .withAdditionalHeaders(mapOf("bvr", "datace", "lsxr", "datalldfknbdzw")) + .withPaginationRules( + mapOf("lwrduxntpfxxgja", "dataj", "eyfbkqynlzx", "datayxfwfctq", "vfoy", "datamelupjckiehd")); model = BinaryData.fromObject(model).toObject(RestResourceDatasetTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSinkTests.java index bbe699dfe0fd6..52aa0cb40c48d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSinkTests.java @@ -11,18 +11,23 @@ public final class RestSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RestSink model = BinaryData.fromString( - "{\"type\":\"RestSink\",\"requestMethod\":\"datawwtlerhpfrarqnj\",\"additionalHeaders\":\"datahsxhtvnq\",\"httpRequestTimeout\":\"datarrgmlw\",\"requestInterval\":\"datam\",\"httpCompressionType\":\"datacsddlcnwbijxf\",\"writeBatchSize\":\"datageffrghwdmr\",\"writeBatchTimeout\":\"datahrr\",\"sinkRetryCount\":\"datavdrggucwa\",\"sinkRetryWait\":\"datam\",\"maxConcurrentConnections\":\"dataklzomdfcp\",\"disableMetricsCollection\":\"dataimijzhrbs\",\"\":{\"qfbgeblp\":\"dataublouelf\",\"swgfjrg\":\"datawckmnpzubzq\"}}") + "{\"type\":\"lmytnhvy\",\"requestMethod\":\"datagvqioqrebwarljpl\",\"additionalHeaders\":\"dataemxcdreqaqvs\",\"httpRequestTimeout\":\"datayvearwt\",\"requestInterval\":\"databscwbiwvwmcrhy\",\"httpCompressionType\":\"datas\",\"writeBatchSize\":\"datafe\",\"writeBatchTimeout\":\"dataxgstiawywppq\",\"sinkRetryCount\":\"datajxbdyczplmljcisx\",\"sinkRetryWait\":\"datas\",\"maxConcurrentConnections\":\"datayt\",\"disableMetricsCollection\":\"datamufdynhqlzanta\",\"\":{\"oadwiqnsmpfeyjvl\":\"datakxsjympsx\"}}") .toObject(RestSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RestSink model = new RestSink().withWriteBatchSize("datageffrghwdmr").withWriteBatchTimeout("datahrr") - .withSinkRetryCount("datavdrggucwa").withSinkRetryWait("datam") - .withMaxConcurrentConnections("dataklzomdfcp").withDisableMetricsCollection("dataimijzhrbs") - .withRequestMethod("datawwtlerhpfrarqnj").withAdditionalHeaders("datahsxhtvnq") - .withHttpRequestTimeout("datarrgmlw").withRequestInterval("datam") - .withHttpCompressionType("datacsddlcnwbijxf"); + RestSink model = new RestSink().withWriteBatchSize("datafe") + .withWriteBatchTimeout("dataxgstiawywppq") + .withSinkRetryCount("datajxbdyczplmljcisx") + .withSinkRetryWait("datas") + .withMaxConcurrentConnections("datayt") + .withDisableMetricsCollection("datamufdynhqlzanta") + .withRequestMethod("datagvqioqrebwarljpl") + .withAdditionalHeaders("dataemxcdreqaqvs") + .withHttpRequestTimeout("datayvearwt") + .withRequestInterval("databscwbiwvwmcrhy") + .withHttpCompressionType("datas"); model = BinaryData.fromObject(model).toObject(RestSink.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSourceTests.java index 6b7b4373b04fd..8bc9c7951e6dc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSourceTests.java @@ -11,18 +11,23 @@ public final class RestSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RestSource model = BinaryData.fromString( - "{\"type\":\"RestSource\",\"requestMethod\":\"datacpbshqzzlcfe\",\"requestBody\":\"dataryxnklfswzsyigx\",\"additionalHeaders\":\"dataxhygc\",\"paginationRules\":\"databapeuqyz\",\"httpRequestTimeout\":\"datasuopcdiaossp\",\"requestInterval\":\"datatgkmrsqaqgllnhgi\",\"additionalColumns\":\"datawzzk\",\"sourceRetryCount\":\"dataqrngl\",\"sourceRetryWait\":\"datatu\",\"maxConcurrentConnections\":\"datafwdkpadktsyy\",\"disableMetricsCollection\":\"dataojrfqtfk\",\"\":{\"xokiffqpwdyzset\":\"datapmdajqpdvvzbej\",\"oij\":\"datamvtqhn\",\"g\":\"datacprkqywyb\",\"lisvqfblsizxpolp\":\"datayomsetzc\"}}") + "{\"type\":\"pxoelfobehr\",\"requestMethod\":\"dataqikcork\",\"requestBody\":\"dataobobxfhtb\",\"additionalHeaders\":\"datavwzjycgcie\",\"paginationRules\":\"datax\",\"httpRequestTimeout\":\"datazvnghtknr\",\"requestInterval\":\"datahysnmyuvf\",\"additionalColumns\":\"datacnrapxw\",\"sourceRetryCount\":\"dataglojjcziytf\",\"sourceRetryWait\":\"datavirmbr\",\"maxConcurrentConnections\":\"datagnqa\",\"disableMetricsCollection\":\"datak\",\"\":{\"dzvuhw\":\"dataybh\"}}") .toObject(RestSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RestSource model = new RestSource().withSourceRetryCount("dataqrngl").withSourceRetryWait("datatu") - .withMaxConcurrentConnections("datafwdkpadktsyy").withDisableMetricsCollection("dataojrfqtfk") - .withRequestMethod("datacpbshqzzlcfe").withRequestBody("dataryxnklfswzsyigx") - .withAdditionalHeaders("dataxhygc").withPaginationRules("databapeuqyz") - .withHttpRequestTimeout("datasuopcdiaossp").withRequestInterval("datatgkmrsqaqgllnhgi") - .withAdditionalColumns("datawzzk"); + RestSource model = new RestSource().withSourceRetryCount("dataglojjcziytf") + .withSourceRetryWait("datavirmbr") + .withMaxConcurrentConnections("datagnqa") + .withDisableMetricsCollection("datak") + .withRequestMethod("dataqikcork") + .withRequestBody("dataobobxfhtb") + .withAdditionalHeaders("datavwzjycgcie") + .withPaginationRules("datax") + .withHttpRequestTimeout("datazvnghtknr") + .withRequestInterval("datahysnmyuvf") + .withAdditionalColumns("datacnrapxw"); model = BinaryData.fromObject(model).toObject(RestSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RetryPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RetryPolicyTests.java index f4e79866f6570..0a287c1c5111e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RetryPolicyTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RetryPolicyTests.java @@ -11,15 +11,15 @@ public final class RetryPolicyTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - RetryPolicy model = BinaryData.fromString("{\"count\":\"datazws\",\"intervalInSeconds\":1398360325}") + RetryPolicy model = BinaryData.fromString("{\"count\":\"dataptnls\",\"intervalInSeconds\":1168799036}") .toObject(RetryPolicy.class); - Assertions.assertEquals(1398360325, model.intervalInSeconds()); + Assertions.assertEquals(1168799036, model.intervalInSeconds()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RetryPolicy model = new RetryPolicy().withCount("datazws").withIntervalInSeconds(1398360325); + RetryPolicy model = new RetryPolicy().withCount("dataptnls").withIntervalInSeconds(1168799036); model = BinaryData.fromObject(model).toObject(RetryPolicy.class); - Assertions.assertEquals(1398360325, model.intervalInSeconds()); + Assertions.assertEquals(1168799036, model.intervalInSeconds()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryFilterTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryFilterTests.java index b5c8ffd61138d..6e4da5d9a5cca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryFilterTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryFilterTests.java @@ -15,21 +15,21 @@ public final class RunQueryFilterTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { RunQueryFilter model = BinaryData.fromString( - "{\"operand\":\"TriggerName\",\"operator\":\"Equals\",\"values\":[\"vewzcj\",\"nmwcpmgu\",\"adraufactkahzo\",\"ajjziuxxpshne\"]}") + "{\"operand\":\"TriggerRunTimestamp\",\"operator\":\"NotEquals\",\"values\":[\"qytibyowbblgyavu\",\"pthjoxo\",\"smsks\",\"pi\"]}") .toObject(RunQueryFilter.class); - Assertions.assertEquals(RunQueryFilterOperand.TRIGGER_NAME, model.operand()); - Assertions.assertEquals(RunQueryFilterOperator.EQUALS, model.operator()); - Assertions.assertEquals("vewzcj", model.values().get(0)); + Assertions.assertEquals(RunQueryFilterOperand.TRIGGER_RUN_TIMESTAMP, model.operand()); + Assertions.assertEquals(RunQueryFilterOperator.NOT_EQUALS, model.operator()); + Assertions.assertEquals("qytibyowbblgyavu", model.values().get(0)); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - RunQueryFilter model = new RunQueryFilter().withOperand(RunQueryFilterOperand.TRIGGER_NAME) - .withOperator(RunQueryFilterOperator.EQUALS) - .withValues(Arrays.asList("vewzcj", "nmwcpmgu", "adraufactkahzo", "ajjziuxxpshne")); + RunQueryFilter model = new RunQueryFilter().withOperand(RunQueryFilterOperand.TRIGGER_RUN_TIMESTAMP) + .withOperator(RunQueryFilterOperator.NOT_EQUALS) + .withValues(Arrays.asList("qytibyowbblgyavu", "pthjoxo", "smsks", "pi")); model = BinaryData.fromObject(model).toObject(RunQueryFilter.class); - Assertions.assertEquals(RunQueryFilterOperand.TRIGGER_NAME, model.operand()); - Assertions.assertEquals(RunQueryFilterOperator.EQUALS, model.operator()); - Assertions.assertEquals("vewzcj", model.values().get(0)); + Assertions.assertEquals(RunQueryFilterOperand.TRIGGER_RUN_TIMESTAMP, model.operand()); + Assertions.assertEquals(RunQueryFilterOperator.NOT_EQUALS, model.operator()); + Assertions.assertEquals("qytibyowbblgyavu", model.values().get(0)); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryOrderByTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryOrderByTests.java index 21f7fa250ff90..7f9b55c25d16a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryOrderByTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryOrderByTests.java @@ -13,18 +13,18 @@ public final class RunQueryOrderByTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - RunQueryOrderBy model - = BinaryData.fromString("{\"orderBy\":\"ActivityName\",\"order\":\"ASC\"}").toObject(RunQueryOrderBy.class); - Assertions.assertEquals(RunQueryOrderByField.ACTIVITY_NAME, model.orderBy()); + RunQueryOrderBy model = BinaryData.fromString("{\"orderBy\":\"ActivityRunStart\",\"order\":\"ASC\"}") + .toObject(RunQueryOrderBy.class); + Assertions.assertEquals(RunQueryOrderByField.ACTIVITY_RUN_START, model.orderBy()); Assertions.assertEquals(RunQueryOrder.ASC, model.order()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { RunQueryOrderBy model - = new RunQueryOrderBy().withOrderBy(RunQueryOrderByField.ACTIVITY_NAME).withOrder(RunQueryOrder.ASC); + = new RunQueryOrderBy().withOrderBy(RunQueryOrderByField.ACTIVITY_RUN_START).withOrder(RunQueryOrder.ASC); model = BinaryData.fromObject(model).toObject(RunQueryOrderBy.class); - Assertions.assertEquals(RunQueryOrderByField.ACTIVITY_NAME, model.orderBy()); + Assertions.assertEquals(RunQueryOrderByField.ACTIVITY_RUN_START, model.orderBy()); Assertions.assertEquals(RunQueryOrder.ASC, model.order()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudObjectDatasetTests.java index 8f5465f7af1ec..25085d6a9a6ab 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudObjectDatasetTests.java @@ -19,31 +19,35 @@ public final class SalesforceMarketingCloudObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceMarketingCloudObjectDataset model = BinaryData.fromString( - "{\"type\":\"SalesforceMarketingCloudObject\",\"typeProperties\":{\"tableName\":\"dataktwomlpczlqboomz\"},\"description\":\"rolhsfddk\",\"structure\":\"datavevwxmnbw\",\"schema\":\"dataa\",\"linkedServiceName\":{\"referenceName\":\"xgnpyhtu\",\"parameters\":{\"aokex\":\"datapqild\"}},\"parameters\":{\"gtz\":{\"type\":\"String\",\"defaultValue\":\"datatkqjarlazb\"},\"oujfgtgxuupczegq\":{\"type\":\"Object\",\"defaultValue\":\"datatrm\"}},\"annotations\":[\"datadvssvg\",\"dataoggkztzttjnknpb\",\"datagzkuobclobn\",\"dataqe\"],\"folder\":{\"name\":\"liqlyugp\"},\"\":{\"yiqywlpxmli\":\"datazjmkffeonmnvmu\",\"ekbirhyvsyuv\":\"datatdegcrunbkilxs\",\"gio\":\"dataiemorszffiukltr\"}}") + "{\"type\":\"jhgvte\",\"typeProperties\":{\"tableName\":\"datajpgbml\"},\"description\":\"runudmakkshrna\",\"structure\":\"dataczkwohdigeyuocf\",\"schema\":\"dataamodw\",\"linkedServiceName\":{\"referenceName\":\"baktvxer\",\"parameters\":{\"eonmtojxgdo\":\"datazvrnnbegra\"}},\"parameters\":{\"qsjknaqszbwgpmdm\":{\"type\":\"Array\",\"defaultValue\":\"datak\"},\"dargkwim\":{\"type\":\"Bool\",\"defaultValue\":\"dataevveswghhbqqhd\"},\"uwczzcujwxvbk\":{\"type\":\"Float\",\"defaultValue\":\"dataeeeuq\"}},\"annotations\":[\"datak\",\"datahfwlajwdajydj\",\"datajgi\",\"datavspewyzhydtkbmtr\"],\"folder\":{\"name\":\"lviaigarma\"},\"\":{\"ysclwbjgiynqr\":\"datagcneviccwb\",\"weofvsxauphzefi\":\"dataoi\",\"gtiivzkd\":\"dataeyydx\",\"ywmwtacrscfc\":\"dataexccwldgfq\"}}") .toObject(SalesforceMarketingCloudObjectDataset.class); - Assertions.assertEquals("rolhsfddk", model.description()); - Assertions.assertEquals("xgnpyhtu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("gtz").type()); - Assertions.assertEquals("liqlyugp", model.folder().name()); + Assertions.assertEquals("runudmakkshrna", model.description()); + Assertions.assertEquals("baktvxer", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("qsjknaqszbwgpmdm").type()); + Assertions.assertEquals("lviaigarma", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceMarketingCloudObjectDataset model = new SalesforceMarketingCloudObjectDataset() - .withDescription("rolhsfddk").withStructure("datavevwxmnbw").withSchema("dataa") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("xgnpyhtu").withParameters(mapOf("aokex", "datapqild"))) - .withParameters(mapOf("gtz", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datatkqjarlazb"), - "oujfgtgxuupczegq", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datatrm"))) - .withAnnotations(Arrays.asList("datadvssvg", "dataoggkztzttjnknpb", "datagzkuobclobn", "dataqe")) - .withFolder(new DatasetFolder().withName("liqlyugp")).withTableName("dataktwomlpczlqboomz"); + SalesforceMarketingCloudObjectDataset model + = new SalesforceMarketingCloudObjectDataset().withDescription("runudmakkshrna") + .withStructure("dataczkwohdigeyuocf") + .withSchema("dataamodw") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("baktvxer") + .withParameters(mapOf("eonmtojxgdo", "datazvrnnbegra"))) + .withParameters(mapOf("qsjknaqszbwgpmdm", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datak"), "dargkwim", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataevveswghhbqqhd"), + "uwczzcujwxvbk", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataeeeuq"))) + .withAnnotations(Arrays.asList("datak", "datahfwlajwdajydj", "datajgi", "datavspewyzhydtkbmtr")) + .withFolder(new DatasetFolder().withName("lviaigarma")) + .withTableName("datajpgbml"); model = BinaryData.fromObject(model).toObject(SalesforceMarketingCloudObjectDataset.class); - Assertions.assertEquals("rolhsfddk", model.description()); - Assertions.assertEquals("xgnpyhtu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("gtz").type()); - Assertions.assertEquals("liqlyugp", model.folder().name()); + Assertions.assertEquals("runudmakkshrna", model.description()); + Assertions.assertEquals("baktvxer", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("qsjknaqszbwgpmdm").type()); + Assertions.assertEquals("lviaigarma", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudSourceTests.java index 862fb36b66292..579ac067a0d22 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudSourceTests.java @@ -11,16 +11,19 @@ public final class SalesforceMarketingCloudSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceMarketingCloudSource model = BinaryData.fromString( - "{\"type\":\"SalesforceMarketingCloudSource\",\"query\":\"dataiwvwmc\",\"queryTimeout\":\"datayoestplmytnhvyj\",\"additionalColumns\":\"dataerh\",\"sourceRetryCount\":\"datastiawywppq\",\"sourceRetryWait\":\"datajxbdyczplmljcisx\",\"maxConcurrentConnections\":\"datas\",\"disableMetricsCollection\":\"datayt\",\"\":{\"zantahuykxsjymps\":\"dataufdynhq\",\"adwiqnsmpfeyjvl\":\"datam\",\"ryoleqikcorkem\":\"dataqsy\",\"htbtuvwz\":\"databobx\"}}") + "{\"type\":\"jpiecnrivsiwws\",\"query\":\"datawlsrxytevzqxpmf\",\"queryTimeout\":\"dataxqqrmckqmds\",\"additionalColumns\":\"datautbymlzgkzhbn\",\"sourceRetryCount\":\"datasuhki\",\"sourceRetryWait\":\"datauvpcjyh\",\"maxConcurrentConnections\":\"datanmjtan\",\"disableMetricsCollection\":\"datarrnqloomsyw\",\"\":{\"vknquipipgvfch\":\"dataaskapg\"}}") .toObject(SalesforceMarketingCloudSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceMarketingCloudSource model = new SalesforceMarketingCloudSource() - .withSourceRetryCount("datastiawywppq").withSourceRetryWait("datajxbdyczplmljcisx") - .withMaxConcurrentConnections("datas").withDisableMetricsCollection("datayt") - .withQueryTimeout("datayoestplmytnhvyj").withAdditionalColumns("dataerh").withQuery("dataiwvwmc"); + SalesforceMarketingCloudSource model = new SalesforceMarketingCloudSource().withSourceRetryCount("datasuhki") + .withSourceRetryWait("datauvpcjyh") + .withMaxConcurrentConnections("datanmjtan") + .withDisableMetricsCollection("datarrnqloomsyw") + .withQueryTimeout("dataxqqrmckqmds") + .withAdditionalColumns("datautbymlzgkzhbn") + .withQuery("datawlsrxytevzqxpmf"); model = BinaryData.fromObject(model).toObject(SalesforceMarketingCloudSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTests.java index fda6e8d790f0e..c3e25dfab6ad6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTests.java @@ -19,30 +19,38 @@ public final class SalesforceObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceObjectDataset model = BinaryData.fromString( - "{\"type\":\"SalesforceObject\",\"typeProperties\":{\"objectApiName\":\"datahgbgbhudh\"},\"description\":\"jimvrrq\",\"structure\":\"databpk\",\"schema\":\"dataamrlfizjuddnd\",\"linkedServiceName\":{\"referenceName\":\"hupngyhylqyafew\",\"parameters\":{\"xnxrqxrtzeargv\":\"datadxwuuy\"}},\"parameters\":{\"eignybffqc\":{\"type\":\"Int\",\"defaultValue\":\"datajhmvpjxsdh\"},\"tvmwgvconyse\":{\"type\":\"Bool\",\"defaultValue\":\"datanep\"}},\"annotations\":[\"datajfhpxnikouo\",\"datafalo\",\"databskkypor\",\"dataynieunbydlgfaphw\"],\"folder\":{\"name\":\"wtsaynrtvj\"},\"\":{\"hsdbfbm\":\"dataeeoxvqjmrnbl\",\"zmiaoaweacf\":\"dataivixzhpjg\"}}") + "{\"type\":\"fnhcg\",\"typeProperties\":{\"objectApiName\":\"dataxuyxsxteuikhzn\"},\"description\":\"qsrmrfqderk\",\"structure\":\"datasdcobpmgqlwy\",\"schema\":\"datanbbyzpo\",\"linkedServiceName\":{\"referenceName\":\"zfutgpbygbnb\",\"parameters\":{\"ewflwzhxzuxe\":\"dataiqgtzpv\"}},\"parameters\":{\"jdajdqxymxxyfrd\":{\"type\":\"Object\",\"defaultValue\":\"datalrkqsqvvdkfp\"},\"igwouppvyddqsvc\":{\"type\":\"String\",\"defaultValue\":\"datacetfvgwfwsl\"},\"tfxxepzpxzxlcqz\":{\"type\":\"Int\",\"defaultValue\":\"datanxfrppwwqclmd\"}},\"annotations\":[\"dataiti\"],\"folder\":{\"name\":\"jbsmkirpqni\"},\"\":{\"kcomeobwkeuzlten\":\"datam\"}}") .toObject(SalesforceObjectDataset.class); - Assertions.assertEquals("jimvrrq", model.description()); - Assertions.assertEquals("hupngyhylqyafew", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("eignybffqc").type()); - Assertions.assertEquals("wtsaynrtvj", model.folder().name()); + Assertions.assertEquals("qsrmrfqderk", model.description()); + Assertions.assertEquals("zfutgpbygbnb", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("jdajdqxymxxyfrd").type()); + Assertions.assertEquals("jbsmkirpqni", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceObjectDataset model = new SalesforceObjectDataset().withDescription("jimvrrq") - .withStructure("databpk").withSchema("dataamrlfizjuddnd") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hupngyhylqyafew") - .withParameters(mapOf("xnxrqxrtzeargv", "datadxwuuy"))) - .withParameters(mapOf("eignybffqc", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datajhmvpjxsdh"), - "tvmwgvconyse", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datanep"))) - .withAnnotations(Arrays.asList("datajfhpxnikouo", "datafalo", "databskkypor", "dataynieunbydlgfaphw")) - .withFolder(new DatasetFolder().withName("wtsaynrtvj")).withObjectApiName("datahgbgbhudh"); + SalesforceObjectDataset model + = new SalesforceObjectDataset().withDescription("qsrmrfqderk") + .withStructure("datasdcobpmgqlwy") + .withSchema("datanbbyzpo") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zfutgpbygbnb") + .withParameters(mapOf("ewflwzhxzuxe", "dataiqgtzpv"))) + .withParameters( + mapOf("jdajdqxymxxyfrd", + new ParameterSpecification().withType(ParameterType.OBJECT) + .withDefaultValue("datalrkqsqvvdkfp"), + "igwouppvyddqsvc", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datacetfvgwfwsl"), + "tfxxepzpxzxlcqz", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datanxfrppwwqclmd"))) + .withAnnotations(Arrays.asList("dataiti")) + .withFolder(new DatasetFolder().withName("jbsmkirpqni")) + .withObjectApiName("dataxuyxsxteuikhzn"); model = BinaryData.fromObject(model).toObject(SalesforceObjectDataset.class); - Assertions.assertEquals("jimvrrq", model.description()); - Assertions.assertEquals("hupngyhylqyafew", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("eignybffqc").type()); - Assertions.assertEquals("wtsaynrtvj", model.folder().name()); + Assertions.assertEquals("qsrmrfqderk", model.description()); + Assertions.assertEquals("zfutgpbygbnb", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("jdajdqxymxxyfrd").type()); + Assertions.assertEquals("jbsmkirpqni", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTypePropertiesTests.java index 2aaa58786aa30..014cfe25d9e61 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTypePropertiesTests.java @@ -10,15 +10,14 @@ public final class SalesforceObjectDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SalesforceObjectDatasetTypeProperties model - = BinaryData.fromString("{\"objectApiName\":\"dataubuhruetcnxriqz\"}") - .toObject(SalesforceObjectDatasetTypeProperties.class); + SalesforceObjectDatasetTypeProperties model = BinaryData.fromString("{\"objectApiName\":\"datafxlmxozesnd\"}") + .toObject(SalesforceObjectDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SalesforceObjectDatasetTypeProperties model - = new SalesforceObjectDatasetTypeProperties().withObjectApiName("dataubuhruetcnxriqz"); + = new SalesforceObjectDatasetTypeProperties().withObjectApiName("datafxlmxozesnd"); model = BinaryData.fromObject(model).toObject(SalesforceObjectDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTests.java index 360168fcd5a98..f599a36dbce8a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTests.java @@ -19,31 +19,34 @@ public final class SalesforceServiceCloudObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceServiceCloudObjectDataset model = BinaryData.fromString( - "{\"type\":\"SalesforceServiceCloudObject\",\"typeProperties\":{\"objectApiName\":\"datak\"},\"description\":\"qdrrj\",\"structure\":\"datar\",\"schema\":\"datawobwxrxm\",\"linkedServiceName\":{\"referenceName\":\"okohlsfj\",\"parameters\":{\"huv\":\"dataqjpzhe\",\"dmjhymudjma\":\"dataqxqkv\"}},\"parameters\":{\"yqkaaptb\":{\"type\":\"Array\",\"defaultValue\":\"databhsermclyqwwu\"},\"bptw\":{\"type\":\"Array\",\"defaultValue\":\"datakb\"},\"u\":{\"type\":\"Bool\",\"defaultValue\":\"dataoc\"}},\"annotations\":[\"dataxzbnss\",\"datavqnpszbeuybut\",\"datadzjfjtvpeyxdyuxu\"],\"folder\":{\"name\":\"ltqmmij\"},\"\":{\"xgwpq\":\"datafkwnaeikczscymqf\",\"mzapdokez\":\"datay\",\"knfzqnzbflbqmhb\":\"datape\",\"ea\":\"datayxxvwedhagqbbse\"}}") + "{\"type\":\"ishyfmrzc\",\"typeProperties\":{\"objectApiName\":\"dataxymckikkqyvur\"},\"description\":\"evnkyakck\",\"structure\":\"datah\",\"schema\":\"datansddjkkd\",\"linkedServiceName\":{\"referenceName\":\"desuazogfcn\",\"parameters\":{\"umtcqxmyvkxixypa\":\"datag\",\"g\":\"dataifjc\",\"czbyfkocgm\":\"datahvpsuwichm\",\"gxrolw\":\"datadctsnlwscrngt\"}},\"parameters\":{\"ish\":{\"type\":\"SecureString\",\"defaultValue\":\"datadksutacuctiha\"}},\"annotations\":[\"datahxvpmqququxlp\",\"datan\",\"datarxlexoweorocr\"],\"folder\":{\"name\":\"gy\"},\"\":{\"tvijvwmrgcnz\":\"databqpfyrv\"}}") .toObject(SalesforceServiceCloudObjectDataset.class); - Assertions.assertEquals("qdrrj", model.description()); - Assertions.assertEquals("okohlsfj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("yqkaaptb").type()); - Assertions.assertEquals("ltqmmij", model.folder().name()); + Assertions.assertEquals("evnkyakck", model.description()); + Assertions.assertEquals("desuazogfcn", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("ish").type()); + Assertions.assertEquals("gy", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceServiceCloudObjectDataset model = new SalesforceServiceCloudObjectDataset().withDescription("qdrrj") - .withStructure("datar").withSchema("datawobwxrxm") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("okohlsfj") - .withParameters(mapOf("huv", "dataqjpzhe", "dmjhymudjma", "dataqxqkv"))) - .withParameters(mapOf("yqkaaptb", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("databhsermclyqwwu"), - "bptw", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datakb"), "u", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataoc"))) - .withAnnotations(Arrays.asList("dataxzbnss", "datavqnpszbeuybut", "datadzjfjtvpeyxdyuxu")) - .withFolder(new DatasetFolder().withName("ltqmmij")).withObjectApiName("datak"); + SalesforceServiceCloudObjectDataset model + = new SalesforceServiceCloudObjectDataset().withDescription("evnkyakck") + .withStructure("datah") + .withSchema("datansddjkkd") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("desuazogfcn") + .withParameters(mapOf("umtcqxmyvkxixypa", "datag", "g", "dataifjc", "czbyfkocgm", "datahvpsuwichm", + "gxrolw", "datadctsnlwscrngt"))) + .withParameters(mapOf("ish", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("datadksutacuctiha"))) + .withAnnotations(Arrays.asList("datahxvpmqququxlp", "datan", "datarxlexoweorocr")) + .withFolder(new DatasetFolder().withName("gy")) + .withObjectApiName("dataxymckikkqyvur"); model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudObjectDataset.class); - Assertions.assertEquals("qdrrj", model.description()); - Assertions.assertEquals("okohlsfj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("yqkaaptb").type()); - Assertions.assertEquals("ltqmmij", model.folder().name()); + Assertions.assertEquals("evnkyakck", model.description()); + Assertions.assertEquals("desuazogfcn", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("ish").type()); + Assertions.assertEquals("gy", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTypePropertiesTests.java index 06283514f0829..f3309a0eeac1d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class SalesforceServiceCloudObjectDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceServiceCloudObjectDatasetTypeProperties model - = BinaryData.fromString("{\"objectApiName\":\"dataflmsy\"}") + = BinaryData.fromString("{\"objectApiName\":\"dataplcxfmbzquuutqm\"}") .toObject(SalesforceServiceCloudObjectDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SalesforceServiceCloudObjectDatasetTypeProperties model - = new SalesforceServiceCloudObjectDatasetTypeProperties().withObjectApiName("dataflmsy"); + = new SalesforceServiceCloudObjectDatasetTypeProperties().withObjectApiName("dataplcxfmbzquuutqm"); model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudObjectDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSinkTests.java index daf2199ed0259..7942dc3fccea9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSinkTests.java @@ -13,19 +13,23 @@ public final class SalesforceServiceCloudSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceServiceCloudSink model = BinaryData.fromString( - "{\"type\":\"SalesforceServiceCloudSink\",\"writeBehavior\":\"Insert\",\"externalIdFieldName\":\"dataskjbasmrdpbmoq\",\"ignoreNullValues\":\"datavukgfzbykapmeo\",\"writeBatchSize\":\"datapvma\",\"writeBatchTimeout\":\"datatgpnyu\",\"sinkRetryCount\":\"datawymyewb\",\"sinkRetryWait\":\"dataxwv\",\"maxConcurrentConnections\":\"datatjsnjbahxyfd\",\"disableMetricsCollection\":\"databtksrdjhqcrmptj\",\"\":{\"pjracyxnzadfl\":\"dataaw\"}}") + "{\"type\":\"af\",\"writeBehavior\":\"Upsert\",\"externalIdFieldName\":\"datakdpnq\",\"ignoreNullValues\":\"dataprlr\",\"writeBatchSize\":\"datacue\",\"writeBatchTimeout\":\"datasqmzeelitqvqyini\",\"sinkRetryCount\":\"datamccf\",\"sinkRetryWait\":\"databytzm\",\"maxConcurrentConnections\":\"datamesdcmgmv\",\"disableMetricsCollection\":\"datanfdhyr\",\"\":{\"uj\":\"dataaapsgyyufhcfeggy\"}}") .toObject(SalesforceServiceCloudSink.class); - Assertions.assertEquals(SalesforceSinkWriteBehavior.INSERT, model.writeBehavior()); + Assertions.assertEquals(SalesforceSinkWriteBehavior.UPSERT, model.writeBehavior()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceServiceCloudSink model = new SalesforceServiceCloudSink().withWriteBatchSize("datapvma") - .withWriteBatchTimeout("datatgpnyu").withSinkRetryCount("datawymyewb").withSinkRetryWait("dataxwv") - .withMaxConcurrentConnections("datatjsnjbahxyfd").withDisableMetricsCollection("databtksrdjhqcrmptj") - .withWriteBehavior(SalesforceSinkWriteBehavior.INSERT).withExternalIdFieldName("dataskjbasmrdpbmoq") - .withIgnoreNullValues("datavukgfzbykapmeo"); + SalesforceServiceCloudSink model = new SalesforceServiceCloudSink().withWriteBatchSize("datacue") + .withWriteBatchTimeout("datasqmzeelitqvqyini") + .withSinkRetryCount("datamccf") + .withSinkRetryWait("databytzm") + .withMaxConcurrentConnections("datamesdcmgmv") + .withDisableMetricsCollection("datanfdhyr") + .withWriteBehavior(SalesforceSinkWriteBehavior.UPSERT) + .withExternalIdFieldName("datakdpnq") + .withIgnoreNullValues("dataprlr"); model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudSink.class); - Assertions.assertEquals(SalesforceSinkWriteBehavior.INSERT, model.writeBehavior()); + Assertions.assertEquals(SalesforceSinkWriteBehavior.UPSERT, model.writeBehavior()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSourceTests.java index 3e285c2ef1f89..f894bb02fdc64 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSourceTests.java @@ -11,16 +11,20 @@ public final class SalesforceServiceCloudSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceServiceCloudSource model = BinaryData.fromString( - "{\"type\":\"SalesforceServiceCloudSource\",\"query\":\"dataprf\",\"readBehavior\":\"datahfv\",\"additionalColumns\":\"datayqzhoikemhohxa\",\"sourceRetryCount\":\"dataxoowpoogozer\",\"sourceRetryWait\":\"datazvpbnkgkuujeqqjq\",\"maxConcurrentConnections\":\"datajkajlogvfnwq\",\"disableMetricsCollection\":\"datalvazkqkycg\",\"\":{\"c\":\"datawehjybboqyxi\"}}") + "{\"type\":\"egcg\",\"query\":\"datasz\",\"readBehavior\":\"datauqbuvpbeswgkre\",\"additionalColumns\":\"datapufkcamzcbzgikl\",\"sourceRetryCount\":\"datadpbsieymmcbikte\",\"sourceRetryWait\":\"datavqtcesvcsbyimyg\",\"maxConcurrentConnections\":\"dataduz\",\"disableMetricsCollection\":\"datanbzqweohml\",\"\":{\"tmhaerhxds\":\"dataxadmauanxzr\",\"welicrxbbqmoguy\":\"datakbrkhjj\",\"dxljjzdbzk\":\"datamselwszqveak\"}}") .toObject(SalesforceServiceCloudSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceServiceCloudSource model = new SalesforceServiceCloudSource().withSourceRetryCount("dataxoowpoogozer") - .withSourceRetryWait("datazvpbnkgkuujeqqjq").withMaxConcurrentConnections("datajkajlogvfnwq") - .withDisableMetricsCollection("datalvazkqkycg").withQuery("dataprf").withReadBehavior("datahfv") - .withAdditionalColumns("datayqzhoikemhohxa"); + SalesforceServiceCloudSource model + = new SalesforceServiceCloudSource().withSourceRetryCount("datadpbsieymmcbikte") + .withSourceRetryWait("datavqtcesvcsbyimyg") + .withMaxConcurrentConnections("dataduz") + .withDisableMetricsCollection("datanbzqweohml") + .withQuery("datasz") + .withReadBehavior("datauqbuvpbeswgkre") + .withAdditionalColumns("datapufkcamzcbzgikl"); model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTests.java index f22195a78655d..3ed33c0323143 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTests.java @@ -19,31 +19,34 @@ public final class SalesforceServiceCloudV2ObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceServiceCloudV2ObjectDataset model = BinaryData.fromString( - "{\"type\":\"SalesforceServiceCloudV2Object\",\"typeProperties\":{\"objectApiName\":\"datadplvi\",\"reportId\":\"datagarmawokgcnev\"},\"description\":\"cwbqy\",\"structure\":\"datalwbj\",\"schema\":\"dataynqryoisweofv\",\"linkedServiceName\":{\"referenceName\":\"xauphzefineyy\",\"parameters\":{\"exccwldgfq\":\"datagtiivzkd\",\"crvjcull\":\"dataywmwtacrscfc\",\"owoszzwncsjgfxv\":\"datafwfpo\",\"uqhgnmsvjfgrpryy\":\"datahmuby\"}},\"parameters\":{\"kbuatxkznlwl\":{\"type\":\"Int\",\"defaultValue\":\"datajxjrbvy\"},\"x\":{\"type\":\"SecureString\",\"defaultValue\":\"dataogkevday\"}},\"annotations\":[\"dataiy\"],\"folder\":{\"name\":\"r\"},\"\":{\"lybspsbomt\":\"dataalrjwaez\",\"suiwexpasckpg\":\"dataepzimfc\",\"cmxtoejt\":\"datamlyxbwslx\"}}") + "{\"type\":\"bmljrjyfj\",\"typeProperties\":{\"objectApiName\":\"dataulbyzzcxs\",\"reportId\":\"dataaoymyckdpzb\"},\"description\":\"uxixkpsjldgnimqo\",\"structure\":\"datafq\",\"schema\":\"datajziqcso\",\"linkedServiceName\":{\"referenceName\":\"wq\",\"parameters\":{\"mgmgnddaxaog\":\"datadcdefqoe\",\"mbipysehyyb\":\"datakhcm\",\"bpgdiwdy\":\"databfjcvmk\"}},\"parameters\":{\"qwvcwce\":{\"type\":\"Bool\",\"defaultValue\":\"dataembr\"}},\"annotations\":[\"datatrgpd\",\"databtbxhyfwjf\",\"dataktuzrltpe\",\"datasdkbqfzbvttqjntv\"],\"folder\":{\"name\":\"pbhj\"},\"\":{\"xyeeafd\":\"datacupcyfrhooyvmv\"}}") .toObject(SalesforceServiceCloudV2ObjectDataset.class); - Assertions.assertEquals("cwbqy", model.description()); - Assertions.assertEquals("xauphzefineyy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("kbuatxkznlwl").type()); - Assertions.assertEquals("r", model.folder().name()); + Assertions.assertEquals("uxixkpsjldgnimqo", model.description()); + Assertions.assertEquals("wq", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("qwvcwce").type()); + Assertions.assertEquals("pbhj", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SalesforceServiceCloudV2ObjectDataset model = new SalesforceServiceCloudV2ObjectDataset() - .withDescription("cwbqy").withStructure("datalwbj").withSchema("dataynqryoisweofv") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xauphzefineyy") - .withParameters(mapOf("exccwldgfq", "datagtiivzkd", "crvjcull", "dataywmwtacrscfc", "owoszzwncsjgfxv", - "datafwfpo", "uqhgnmsvjfgrpryy", "datahmuby"))) - .withParameters(mapOf("kbuatxkznlwl", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datajxjrbvy"), "x", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataogkevday"))) - .withAnnotations(Arrays.asList("dataiy")).withFolder(new DatasetFolder().withName("r")) - .withObjectApiName("datadplvi").withReportId("datagarmawokgcnev"); + .withDescription("uxixkpsjldgnimqo") + .withStructure("datafq") + .withSchema("datajziqcso") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("wq") + .withParameters( + mapOf("mgmgnddaxaog", "datadcdefqoe", "mbipysehyyb", "datakhcm", "bpgdiwdy", "databfjcvmk"))) + .withParameters(mapOf("qwvcwce", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataembr"))) + .withAnnotations(Arrays.asList("datatrgpd", "databtbxhyfwjf", "dataktuzrltpe", "datasdkbqfzbvttqjntv")) + .withFolder(new DatasetFolder().withName("pbhj")) + .withObjectApiName("dataulbyzzcxs") + .withReportId("dataaoymyckdpzb"); model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudV2ObjectDataset.class); - Assertions.assertEquals("cwbqy", model.description()); - Assertions.assertEquals("xauphzefineyy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("kbuatxkznlwl").type()); - Assertions.assertEquals("r", model.folder().name()); + Assertions.assertEquals("uxixkpsjldgnimqo", model.description()); + Assertions.assertEquals("wq", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("qwvcwce").type()); + Assertions.assertEquals("pbhj", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTypePropertiesTests.java index 32654b496ff30..f319eb212c670 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTypePropertiesTests.java @@ -11,15 +11,15 @@ public final class SalesforceServiceCloudV2ObjectDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceServiceCloudV2ObjectDatasetTypeProperties model - = BinaryData.fromString("{\"objectApiName\":\"dataqct\",\"reportId\":\"dataidkxz\"}") + = BinaryData.fromString("{\"objectApiName\":\"datauwlynxzhgbs\",\"reportId\":\"dataxbhj\"}") .toObject(SalesforceServiceCloudV2ObjectDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SalesforceServiceCloudV2ObjectDatasetTypeProperties model - = new SalesforceServiceCloudV2ObjectDatasetTypeProperties().withObjectApiName("dataqct") - .withReportId("dataidkxz"); + = new SalesforceServiceCloudV2ObjectDatasetTypeProperties().withObjectApiName("datauwlynxzhgbs") + .withReportId("dataxbhj"); model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudV2ObjectDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SinkTests.java index 88b8acf390fe3..a9cf4d899f16a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SinkTests.java @@ -13,19 +13,22 @@ public final class SalesforceServiceCloudV2SinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceServiceCloudV2Sink model = BinaryData.fromString( - "{\"type\":\"SalesforceServiceCloudV2Sink\",\"writeBehavior\":\"Insert\",\"externalIdFieldName\":\"datayzbnkofcs\",\"ignoreNullValues\":\"datapwahehuc\",\"writeBatchSize\":\"datargpmgtjvuhcwcfz\",\"writeBatchTimeout\":\"dataklvtceaoiuurql\",\"sinkRetryCount\":\"datahebjfhpayww\",\"sinkRetryWait\":\"dataaqsuqp\",\"maxConcurrentConnections\":\"datavxbdlraridiat\",\"disableMetricsCollection\":\"dataxq\",\"\":{\"grn\":\"datayleyopgy\",\"ffosomxmvgjuzg\":\"datafjwoaomogkpc\"}}") + "{\"type\":\"xsrmadakjsypuvyv\",\"writeBehavior\":\"Insert\",\"externalIdFieldName\":\"databaj\",\"ignoreNullValues\":\"datacngwkz\",\"writeBatchSize\":\"datakkekldxclqjnn\",\"writeBatchTimeout\":\"datatwqkgvrzli\",\"sinkRetryCount\":\"datasutms\",\"sinkRetryWait\":\"dataibzvytempsa\",\"maxConcurrentConnections\":\"datacxuvdcwtn\",\"disableMetricsCollection\":\"dataleghnfqwjwwhsfj\",\"\":{\"scblsxmsc\":\"databclvpgbutyrsrav\",\"clfbvvuyoilnixw\":\"datafgdtu\"}}") .toObject(SalesforceServiceCloudV2Sink.class); Assertions.assertEquals(SalesforceV2SinkWriteBehavior.INSERT, model.writeBehavior()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceServiceCloudV2Sink model - = new SalesforceServiceCloudV2Sink().withWriteBatchSize("datargpmgtjvuhcwcfz") - .withWriteBatchTimeout("dataklvtceaoiuurql").withSinkRetryCount("datahebjfhpayww") - .withSinkRetryWait("dataaqsuqp").withMaxConcurrentConnections("datavxbdlraridiat") - .withDisableMetricsCollection("dataxq").withWriteBehavior(SalesforceV2SinkWriteBehavior.INSERT) - .withExternalIdFieldName("datayzbnkofcs").withIgnoreNullValues("datapwahehuc"); + SalesforceServiceCloudV2Sink model = new SalesforceServiceCloudV2Sink().withWriteBatchSize("datakkekldxclqjnn") + .withWriteBatchTimeout("datatwqkgvrzli") + .withSinkRetryCount("datasutms") + .withSinkRetryWait("dataibzvytempsa") + .withMaxConcurrentConnections("datacxuvdcwtn") + .withDisableMetricsCollection("dataleghnfqwjwwhsfj") + .withWriteBehavior(SalesforceV2SinkWriteBehavior.INSERT) + .withExternalIdFieldName("databaj") + .withIgnoreNullValues("datacngwkz"); model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudV2Sink.class); Assertions.assertEquals(SalesforceV2SinkWriteBehavior.INSERT, model.writeBehavior()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SourceTests.java index a3447fa1938f6..ac74608673f5d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SourceTests.java @@ -11,16 +11,19 @@ public final class SalesforceServiceCloudV2SourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceServiceCloudV2Source model = BinaryData.fromString( - "{\"type\":\"SalesforceServiceCloudV2Source\",\"SOQLQuery\":\"datahrzpyxmfip\",\"includeDeletedObjects\":\"datamlf\",\"additionalColumns\":\"datawfxssxarxvft\",\"sourceRetryCount\":\"datasuqap\",\"sourceRetryWait\":\"datadgrbcltfkyq\",\"maxConcurrentConnections\":\"dataiujukcdlvpt\",\"disableMetricsCollection\":\"dataycupmfp\",\"\":{\"pxslccu\":\"dataswgnglmllr\",\"ndirdlehjz\":\"datascjefapouwsynsb\",\"kt\":\"datapdwyhggvhcoaoeti\",\"ae\":\"dataeirambfm\"}}") + "{\"type\":\"mvpsvwwtn\",\"SOQLQuery\":\"datazdecgiomdcolwq\",\"includeDeletedObjects\":\"datarrjudgnph\",\"additionalColumns\":\"datadqt\",\"sourceRetryCount\":\"datanoq\",\"sourceRetryWait\":\"datanllicovvdc\",\"maxConcurrentConnections\":\"datafnbdpaoijx\",\"disableMetricsCollection\":\"datafm\",\"\":{\"xjfkpuszsjay\":\"datavv\"}}") .toObject(SalesforceServiceCloudV2Source.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceServiceCloudV2Source model = new SalesforceServiceCloudV2Source().withSourceRetryCount("datasuqap") - .withSourceRetryWait("datadgrbcltfkyq").withMaxConcurrentConnections("dataiujukcdlvpt") - .withDisableMetricsCollection("dataycupmfp").withSoqlQuery("datahrzpyxmfip") - .withIncludeDeletedObjects("datamlf").withAdditionalColumns("datawfxssxarxvft"); + SalesforceServiceCloudV2Source model = new SalesforceServiceCloudV2Source().withSourceRetryCount("datanoq") + .withSourceRetryWait("datanllicovvdc") + .withMaxConcurrentConnections("datafnbdpaoijx") + .withDisableMetricsCollection("datafm") + .withSoqlQuery("datazdecgiomdcolwq") + .withIncludeDeletedObjects("datarrjudgnph") + .withAdditionalColumns("datadqt"); model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudV2Source.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSinkTests.java index 53aeaa852a505..7d65facc5c21f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSinkTests.java @@ -13,18 +13,22 @@ public final class SalesforceSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceSink model = BinaryData.fromString( - "{\"type\":\"SalesforceSink\",\"writeBehavior\":\"Upsert\",\"externalIdFieldName\":\"datai\",\"ignoreNullValues\":\"dataqjnuiiytyarpeyig\",\"writeBatchSize\":\"datappgkk\",\"writeBatchTimeout\":\"dataygjldljgd\",\"sinkRetryCount\":\"datagrtse\",\"sinkRetryWait\":\"dataow\",\"maxConcurrentConnections\":\"dataxkofmtfwcu\",\"disableMetricsCollection\":\"databnapzfdzmr\",\"\":{\"rj\":\"databclj\",\"l\":\"dataawnz\"}}") + "{\"type\":\"qhcps\",\"writeBehavior\":\"Upsert\",\"externalIdFieldName\":\"datajohx\",\"ignoreNullValues\":\"datahvjo\",\"writeBatchSize\":\"datacdgcvf\",\"writeBatchTimeout\":\"datavxkcyhkhw\",\"sinkRetryCount\":\"datavwfo\",\"sinkRetryWait\":\"datacgrwl\",\"maxConcurrentConnections\":\"datarcovqtydf\",\"disableMetricsCollection\":\"datatkrgagxzmr\",\"\":{\"wrhoma\":\"datagzslnnc\",\"up\":\"datavni\"}}") .toObject(SalesforceSink.class); Assertions.assertEquals(SalesforceSinkWriteBehavior.UPSERT, model.writeBehavior()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceSink model = new SalesforceSink().withWriteBatchSize("datappgkk") - .withWriteBatchTimeout("dataygjldljgd").withSinkRetryCount("datagrtse").withSinkRetryWait("dataow") - .withMaxConcurrentConnections("dataxkofmtfwcu").withDisableMetricsCollection("databnapzfdzmr") - .withWriteBehavior(SalesforceSinkWriteBehavior.UPSERT).withExternalIdFieldName("datai") - .withIgnoreNullValues("dataqjnuiiytyarpeyig"); + SalesforceSink model = new SalesforceSink().withWriteBatchSize("datacdgcvf") + .withWriteBatchTimeout("datavxkcyhkhw") + .withSinkRetryCount("datavwfo") + .withSinkRetryWait("datacgrwl") + .withMaxConcurrentConnections("datarcovqtydf") + .withDisableMetricsCollection("datatkrgagxzmr") + .withWriteBehavior(SalesforceSinkWriteBehavior.UPSERT) + .withExternalIdFieldName("datajohx") + .withIgnoreNullValues("datahvjo"); model = BinaryData.fromObject(model).toObject(SalesforceSink.class); Assertions.assertEquals(SalesforceSinkWriteBehavior.UPSERT, model.writeBehavior()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSourceTests.java index 55e7630df096e..3a503e74fc943 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSourceTests.java @@ -11,16 +11,20 @@ public final class SalesforceSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceSource model = BinaryData.fromString( - "{\"type\":\"SalesforceSource\",\"query\":\"databnekhjzbfb\",\"readBehavior\":\"dataeqkuozarr\",\"queryTimeout\":\"datapyzryjb\",\"additionalColumns\":\"databcvoyqnrjdrc\",\"sourceRetryCount\":\"datarvzewogh\",\"sourceRetryWait\":\"datazxkjqecj\",\"maxConcurrentConnections\":\"dataromeawthycbigpi\",\"disableMetricsCollection\":\"datapxhzjnparsulmuwl\",\"\":{\"xxqgoavzycxpza\":\"datakhe\",\"mftmxwtwzs\":\"datatalo\"}}") + "{\"type\":\"rhynlbtr\",\"query\":\"dataadmcvvkjnpef\",\"readBehavior\":\"dataevlohuahlqmcb\",\"queryTimeout\":\"dataoyllxc\",\"additionalColumns\":\"datahzylspz\",\"sourceRetryCount\":\"dataecvag\",\"sourceRetryWait\":\"datarhadg\",\"maxConcurrentConnections\":\"dataqrasxeomjqqhb\",\"disableMetricsCollection\":\"dataiuhiafbhzdjvd\",\"\":{\"xz\":\"dataijggbpdpzgvqfz\",\"utyhmflvxilaytjy\":\"dataliic\",\"wnoghqdl\":\"datafq\"}}") .toObject(SalesforceSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceSource model = new SalesforceSource().withSourceRetryCount("datarvzewogh") - .withSourceRetryWait("datazxkjqecj").withMaxConcurrentConnections("dataromeawthycbigpi") - .withDisableMetricsCollection("datapxhzjnparsulmuwl").withQueryTimeout("datapyzryjb") - .withAdditionalColumns("databcvoyqnrjdrc").withQuery("databnekhjzbfb").withReadBehavior("dataeqkuozarr"); + SalesforceSource model = new SalesforceSource().withSourceRetryCount("dataecvag") + .withSourceRetryWait("datarhadg") + .withMaxConcurrentConnections("dataqrasxeomjqqhb") + .withDisableMetricsCollection("dataiuhiafbhzdjvd") + .withQueryTimeout("dataoyllxc") + .withAdditionalColumns("datahzylspz") + .withQuery("dataadmcvvkjnpef") + .withReadBehavior("dataevlohuahlqmcb"); model = BinaryData.fromObject(model).toObject(SalesforceSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTests.java index d75abea9d55ef..2bd71d50a3c66 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTests.java @@ -19,31 +19,34 @@ public final class SalesforceV2ObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceV2ObjectDataset model = BinaryData.fromString( - "{\"type\":\"SalesforceV2Object\",\"typeProperties\":{\"objectApiName\":\"datamajokbxxcdkhxjwt\",\"reportId\":\"datatgzljuepm\"},\"description\":\"sjpgbmlbxjhgvte\",\"structure\":\"datarunudmakkshrna\",\"schema\":\"dataczkwohdigeyuocf\",\"linkedServiceName\":{\"referenceName\":\"samodwq\",\"parameters\":{\"xerowuzvrnnb\":\"datat\",\"dofmazhk\":\"datagrafeonmtojx\"}},\"parameters\":{\"szbwgpmdmwi\":{\"type\":\"String\",\"defaultValue\":\"datana\"},\"hh\":{\"type\":\"SecureString\",\"defaultValue\":\"datavesw\"}},\"annotations\":[\"datahdldarg\",\"datawimtcceeeuquu\",\"dataczzc\",\"datajwx\"],\"folder\":{\"name\":\"irgkn\"},\"\":{\"wdajyd\":\"datala\"}}") + "{\"type\":\"poid\",\"typeProperties\":{\"objectApiName\":\"databthutctcabc\",\"reportId\":\"dataabzfihsz\"},\"description\":\"wegvuojuwgweccvu\",\"structure\":\"dataqvfcfsssmyaemk\",\"schema\":\"databsdgktluifiqgp\",\"linkedServiceName\":{\"referenceName\":\"cpenobqysbees\",\"parameters\":{\"rszsu\":\"datavva\",\"intz\":\"datazsautbricvvofe\",\"xjpisrdnowinc\":\"datanhyyqxckd\",\"vijnubxfiiytqx\":\"dataes\"}},\"parameters\":{\"rxkivbkutogecyqo\":{\"type\":\"Bool\",\"defaultValue\":\"datautezlghkvoxdp\"},\"ya\":{\"type\":\"Object\",\"defaultValue\":\"datassbvqnpwdwdmu\"}},\"annotations\":[\"dataqpwxnb\",\"datazrmiukothyfjbpnh\"],\"folder\":{\"name\":\"r\"},\"\":{\"jioqwu\":\"datazfwgnpcjniaffwcg\",\"mwr\":\"dataogdkp\",\"yxqucnbg\":\"datauqjde\"}}") .toObject(SalesforceV2ObjectDataset.class); - Assertions.assertEquals("sjpgbmlbxjhgvte", model.description()); - Assertions.assertEquals("samodwq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("szbwgpmdmwi").type()); - Assertions.assertEquals("irgkn", model.folder().name()); + Assertions.assertEquals("wegvuojuwgweccvu", model.description()); + Assertions.assertEquals("cpenobqysbees", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("rxkivbkutogecyqo").type()); + Assertions.assertEquals("r", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceV2ObjectDataset model = new SalesforceV2ObjectDataset().withDescription("sjpgbmlbxjhgvte") - .withStructure("datarunudmakkshrna").withSchema("dataczkwohdigeyuocf") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("samodwq") - .withParameters(mapOf("xerowuzvrnnb", "datat", "dofmazhk", "datagrafeonmtojx"))) - .withParameters(mapOf("szbwgpmdmwi", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datana"), "hh", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datavesw"))) - .withAnnotations(Arrays.asList("datahdldarg", "datawimtcceeeuquu", "dataczzc", "datajwx")) - .withFolder(new DatasetFolder().withName("irgkn")).withObjectApiName("datamajokbxxcdkhxjwt") - .withReportId("datatgzljuepm"); + SalesforceV2ObjectDataset model = new SalesforceV2ObjectDataset().withDescription("wegvuojuwgweccvu") + .withStructure("dataqvfcfsssmyaemk") + .withSchema("databsdgktluifiqgp") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cpenobqysbees") + .withParameters(mapOf("rszsu", "datavva", "intz", "datazsautbricvvofe", "xjpisrdnowinc", + "datanhyyqxckd", "vijnubxfiiytqx", "dataes"))) + .withParameters(mapOf("rxkivbkutogecyqo", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datautezlghkvoxdp"), "ya", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datassbvqnpwdwdmu"))) + .withAnnotations(Arrays.asList("dataqpwxnb", "datazrmiukothyfjbpnh")) + .withFolder(new DatasetFolder().withName("r")) + .withObjectApiName("databthutctcabc") + .withReportId("dataabzfihsz"); model = BinaryData.fromObject(model).toObject(SalesforceV2ObjectDataset.class); - Assertions.assertEquals("sjpgbmlbxjhgvte", model.description()); - Assertions.assertEquals("samodwq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("szbwgpmdmwi").type()); - Assertions.assertEquals("irgkn", model.folder().name()); + Assertions.assertEquals("wegvuojuwgweccvu", model.description()); + Assertions.assertEquals("cpenobqysbees", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("rxkivbkutogecyqo").type()); + Assertions.assertEquals("r", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTypePropertiesTests.java index 249789899c6de..2d24793386a81 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTypePropertiesTests.java @@ -11,14 +11,15 @@ public final class SalesforceV2ObjectDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceV2ObjectDatasetTypeProperties model - = BinaryData.fromString("{\"objectApiName\":\"datajgi\",\"reportId\":\"dataspewyzhydtkbm\"}") + = BinaryData.fromString("{\"objectApiName\":\"dataklscwdko\",\"reportId\":\"datayvihevbfvxmtsm\"}") .toObject(SalesforceV2ObjectDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceV2ObjectDatasetTypeProperties model = new SalesforceV2ObjectDatasetTypeProperties() - .withObjectApiName("datajgi").withReportId("dataspewyzhydtkbm"); + SalesforceV2ObjectDatasetTypeProperties model + = new SalesforceV2ObjectDatasetTypeProperties().withObjectApiName("dataklscwdko") + .withReportId("datayvihevbfvxmtsm"); model = BinaryData.fromObject(model).toObject(SalesforceV2ObjectDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SinkTests.java index 71e94685ac5f7..69b0da2dfc539 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SinkTests.java @@ -13,18 +13,22 @@ public final class SalesforceV2SinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceV2Sink model = BinaryData.fromString( - "{\"type\":\"SalesforceV2Sink\",\"writeBehavior\":\"Upsert\",\"externalIdFieldName\":\"databqtfcupjmw\",\"ignoreNullValues\":\"datamir\",\"writeBatchSize\":\"dataiqmk\",\"writeBatchTimeout\":\"datafjhtlbrkgh\",\"sinkRetryCount\":\"datafppjunkh\",\"sinkRetryWait\":\"datahkqny\",\"maxConcurrentConnections\":\"datafvzrq\",\"disableMetricsCollection\":\"datahepc\",\"\":{\"qwomkzcmwqfd\":\"dataeqqetasijia\",\"mvqumjmpsxzxbafs\":\"datagpmvl\",\"bzporj\":\"datad\",\"qtcnyhsdgmoxnelh\":\"dataubzkzjazfwywv\"}}") + "{\"type\":\"z\",\"writeBehavior\":\"Upsert\",\"externalIdFieldName\":\"datadqgavcwxwkjambf\",\"ignoreNullValues\":\"datasrxjfapiodsnz\",\"writeBatchSize\":\"dataqbgcxlg\",\"writeBatchTimeout\":\"dataxoymjx\",\"sinkRetryCount\":\"datantjhvcorobmqudz\",\"sinkRetryWait\":\"datazkkl\",\"maxConcurrentConnections\":\"datazkcygyqgvofhpgu\",\"disableMetricsCollection\":\"databkwwyfsqgass\",\"\":{\"ohpwnrmhlotknb\":\"dataazvrmulsje\",\"zqmudmefsxmdmlow\":\"datau\",\"meqirxwkomjsfkd\":\"datasixpwfvtwg\"}}") .toObject(SalesforceV2Sink.class); Assertions.assertEquals(SalesforceV2SinkWriteBehavior.UPSERT, model.writeBehavior()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceV2Sink model = new SalesforceV2Sink().withWriteBatchSize("dataiqmk") - .withWriteBatchTimeout("datafjhtlbrkgh").withSinkRetryCount("datafppjunkh").withSinkRetryWait("datahkqny") - .withMaxConcurrentConnections("datafvzrq").withDisableMetricsCollection("datahepc") - .withWriteBehavior(SalesforceV2SinkWriteBehavior.UPSERT).withExternalIdFieldName("databqtfcupjmw") - .withIgnoreNullValues("datamir"); + SalesforceV2Sink model = new SalesforceV2Sink().withWriteBatchSize("dataqbgcxlg") + .withWriteBatchTimeout("dataxoymjx") + .withSinkRetryCount("datantjhvcorobmqudz") + .withSinkRetryWait("datazkkl") + .withMaxConcurrentConnections("datazkcygyqgvofhpgu") + .withDisableMetricsCollection("databkwwyfsqgass") + .withWriteBehavior(SalesforceV2SinkWriteBehavior.UPSERT) + .withExternalIdFieldName("datadqgavcwxwkjambf") + .withIgnoreNullValues("datasrxjfapiodsnz"); model = BinaryData.fromObject(model).toObject(SalesforceV2Sink.class); Assertions.assertEquals(SalesforceV2SinkWriteBehavior.UPSERT, model.writeBehavior()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SourceTests.java index 0d4baed148be7..1dd1b8fae651d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SourceTests.java @@ -11,16 +11,20 @@ public final class SalesforceV2SourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SalesforceV2Source model = BinaryData.fromString( - "{\"type\":\"SalesforceV2Source\",\"SOQLQuery\":\"datav\",\"includeDeletedObjects\":\"datamlwadst\",\"queryTimeout\":\"datargqmuthxoldmhypp\",\"additionalColumns\":\"datappmulwv\",\"sourceRetryCount\":\"datathgw\",\"sourceRetryWait\":\"datatbubkyipzehitd\",\"maxConcurrentConnections\":\"databouwuajsitgpz\",\"disableMetricsCollection\":\"datalkcvkme\",\"\":{\"ebn\":\"dataolp\",\"jdbdjxvcxepjfxcm\":\"dataafvks\",\"mtret\":\"datahivw\",\"rs\":\"datalirbvqkbxgzepiny\"}}") + "{\"type\":\"riviftjjmtk\",\"SOQLQuery\":\"datagxxmxdrgxhrta\",\"includeDeletedObjects\":\"datajboiyqixb\",\"queryTimeout\":\"datafw\",\"additionalColumns\":\"dataqj\",\"sourceRetryCount\":\"datadgfjvit\",\"sourceRetryWait\":\"dataa\",\"maxConcurrentConnections\":\"dataesxn\",\"disableMetricsCollection\":\"dataslhncaspwvgl\",\"\":{\"qhatwxq\":\"datan\",\"aicyuplmdhuu\":\"dataggbirzj\",\"tjqjtoeaug\":\"datatiecnpka\"}}") .toObject(SalesforceV2Source.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SalesforceV2Source model = new SalesforceV2Source().withSourceRetryCount("datathgw") - .withSourceRetryWait("datatbubkyipzehitd").withMaxConcurrentConnections("databouwuajsitgpz") - .withDisableMetricsCollection("datalkcvkme").withQueryTimeout("datargqmuthxoldmhypp") - .withAdditionalColumns("datappmulwv").withSoqlQuery("datav").withIncludeDeletedObjects("datamlwadst"); + SalesforceV2Source model = new SalesforceV2Source().withSourceRetryCount("datadgfjvit") + .withSourceRetryWait("dataa") + .withMaxConcurrentConnections("dataesxn") + .withDisableMetricsCollection("dataslhncaspwvgl") + .withQueryTimeout("datafw") + .withAdditionalColumns("dataqj") + .withSoqlQuery("datagxxmxdrgxhrta") + .withIncludeDeletedObjects("datajboiyqixb"); model = BinaryData.fromObject(model).toObject(SalesforceV2Source.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwCubeDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwCubeDatasetTests.java index 8f083bfe661b9..2d35bd3ad6d24 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwCubeDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwCubeDatasetTests.java @@ -19,33 +19,32 @@ public final class SapBwCubeDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapBwCubeDataset model = BinaryData.fromString( - "{\"type\":\"SapBwCube\",\"description\":\"shennmsgpywdib\",\"structure\":\"datavnrgalv\",\"schema\":\"datahry\",\"linkedServiceName\":{\"referenceName\":\"brbknuubxcwoj\",\"parameters\":{\"fujgtdowlxm\":\"dataqdvnpyeevff\"}},\"parameters\":{\"vyrjqdjlgk\":{\"type\":\"String\",\"defaultValue\":\"dataybpchrtczwjcujyz\"},\"n\":{\"type\":\"SecureString\",\"defaultValue\":\"dataxxeuwiiirc\"},\"ohktxagfujdbqjny\":{\"type\":\"String\",\"defaultValue\":\"datadviw\"}},\"annotations\":[\"datavxgxqqqa\",\"datafeoo\",\"dataftpvevtarp\",\"dataklqlii\"],\"folder\":{\"name\":\"n\"},\"\":{\"gijydg\":\"datajno\"}}") + "{\"type\":\"dqseypdlmajpuy\",\"description\":\"a\",\"structure\":\"datamzgccy\",\"schema\":\"datauvmsie\",\"linkedServiceName\":{\"referenceName\":\"edmmvoneeyr\",\"parameters\":{\"spl\":\"datarxtzayqwddigeb\"}},\"parameters\":{\"izdnuehx\":{\"type\":\"Array\",\"defaultValue\":\"datawwveeozbjkjq\"},\"rxactsawvxcimp\":{\"type\":\"String\",\"defaultValue\":\"datassjdywbnklg\"}},\"annotations\":[\"datarm\",\"datalzmslubnknyfuy\",\"datajhvrrllfswarmy\"],\"folder\":{\"name\":\"rotgeysyq\"},\"\":{\"xia\":\"dataehfwwcbf\"}}") .toObject(SapBwCubeDataset.class); - Assertions.assertEquals("shennmsgpywdib", model.description()); - Assertions.assertEquals("brbknuubxcwoj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("vyrjqdjlgk").type()); - Assertions.assertEquals("n", model.folder().name()); + Assertions.assertEquals("a", model.description()); + Assertions.assertEquals("edmmvoneeyr", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("izdnuehx").type()); + Assertions.assertEquals("rotgeysyq", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapBwCubeDataset model = new SapBwCubeDataset().withDescription("shennmsgpywdib").withStructure("datavnrgalv") - .withSchema("datahry") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("brbknuubxcwoj") - .withParameters(mapOf("fujgtdowlxm", "dataqdvnpyeevff"))) - .withParameters(mapOf("vyrjqdjlgk", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataybpchrtczwjcujyz"), - "n", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataxxeuwiiirc"), - "ohktxagfujdbqjny", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datadviw"))) - .withAnnotations(Arrays.asList("datavxgxqqqa", "datafeoo", "dataftpvevtarp", "dataklqlii")) - .withFolder(new DatasetFolder().withName("n")); + SapBwCubeDataset model = new SapBwCubeDataset().withDescription("a") + .withStructure("datamzgccy") + .withSchema("datauvmsie") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("edmmvoneeyr") + .withParameters(mapOf("spl", "datarxtzayqwddigeb"))) + .withParameters(mapOf("izdnuehx", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datawwveeozbjkjq"), + "rxactsawvxcimp", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datassjdywbnklg"))) + .withAnnotations(Arrays.asList("datarm", "datalzmslubnknyfuy", "datajhvrrllfswarmy")) + .withFolder(new DatasetFolder().withName("rotgeysyq")); model = BinaryData.fromObject(model).toObject(SapBwCubeDataset.class); - Assertions.assertEquals("shennmsgpywdib", model.description()); - Assertions.assertEquals("brbknuubxcwoj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("vyrjqdjlgk").type()); - Assertions.assertEquals("n", model.folder().name()); + Assertions.assertEquals("a", model.description()); + Assertions.assertEquals("edmmvoneeyr", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("izdnuehx").type()); + Assertions.assertEquals("rotgeysyq", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwSourceTests.java index f613e470f79a6..fce1609315f11 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwSourceTests.java @@ -11,15 +11,19 @@ public final class SapBwSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapBwSource model = BinaryData.fromString( - "{\"type\":\"SapBwSource\",\"query\":\"datanwhcmvdowlqcy\",\"queryTimeout\":\"dataubzixqxx\",\"additionalColumns\":\"dataawbftzn\",\"sourceRetryCount\":\"datarfhj\",\"sourceRetryWait\":\"dataiutbrnr\",\"maxConcurrentConnections\":\"dataljucodrbkdieismd\",\"disableMetricsCollection\":\"datafim\",\"\":{\"foexlcskelwzmji\":\"dataijrlmnkvp\"}}") + "{\"type\":\"mq\",\"query\":\"datahllmblls\",\"queryTimeout\":\"datajjrhxornuoqpob\",\"additionalColumns\":\"datarsdx\",\"sourceRetryCount\":\"dataxbqyavcxjols\",\"sourceRetryWait\":\"datai\",\"maxConcurrentConnections\":\"datapnms\",\"disableMetricsCollection\":\"datantakr\",\"\":{\"uwcmzpwk\":\"dataurf\"}}") .toObject(SapBwSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapBwSource model = new SapBwSource().withSourceRetryCount("datarfhj").withSourceRetryWait("dataiutbrnr") - .withMaxConcurrentConnections("dataljucodrbkdieismd").withDisableMetricsCollection("datafim") - .withQueryTimeout("dataubzixqxx").withAdditionalColumns("dataawbftzn").withQuery("datanwhcmvdowlqcy"); + SapBwSource model = new SapBwSource().withSourceRetryCount("dataxbqyavcxjols") + .withSourceRetryWait("datai") + .withMaxConcurrentConnections("datapnms") + .withDisableMetricsCollection("datantakr") + .withQueryTimeout("datajjrhxornuoqpob") + .withAdditionalColumns("datarsdx") + .withQuery("datahllmblls"); model = BinaryData.fromObject(model).toObject(SapBwSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTests.java index e9fcf599672d0..e74663e6114b5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTests.java @@ -19,31 +19,32 @@ public final class SapCloudForCustomerResourceDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapCloudForCustomerResourceDataset model = BinaryData.fromString( - "{\"type\":\"SapCloudForCustomerResource\",\"typeProperties\":{\"path\":\"dataebjuymtevaebzm\"},\"description\":\"whrjkejvaedogzo\",\"structure\":\"dataxbxxgjogcphivfhr\",\"schema\":\"dataenfdvdoea\",\"linkedServiceName\":{\"referenceName\":\"ywusrjzhdtr\",\"parameters\":{\"wnmwtqiljknn\":\"dataezfsmyljdzyy\",\"aqjyih\":\"dataynkstdtfwhjfphf\",\"vhkhpsp\":\"datacwwvaosckf\",\"exnguwnrdpuz\":\"dataweifdyfa\"}},\"parameters\":{\"ybsz\":{\"type\":\"Array\",\"defaultValue\":\"dataujtg\"},\"yelrnh\":{\"type\":\"Array\",\"defaultValue\":\"datajxejpdcliqwzut\"}},\"annotations\":[\"datavhqsz\"],\"folder\":{\"name\":\"ovqmqcudptoqwr\"},\"\":{\"kmxwawfu\":\"datakjthl\"}}") + "{\"type\":\"vjixgofqdqwsjm\",\"typeProperties\":{\"path\":\"dataraxnfyzguax\"},\"description\":\"uvrqpbxdoicqp\",\"structure\":\"datatly\",\"schema\":\"datarpensbm\",\"linkedServiceName\":{\"referenceName\":\"jritu\",\"parameters\":{\"f\":\"datamh\"}},\"parameters\":{\"zwanduhduwdvolxt\":{\"type\":\"Bool\",\"defaultValue\":\"datazjxwjoq\"}},\"annotations\":[\"dataic\",\"datas\",\"datalzbki\",\"datamjfgoxedrmra\"],\"folder\":{\"name\":\"chvvoyiogbntnwz\"},\"\":{\"ppnvcebspciry\":\"datasoowxcsmxtlc\",\"kwh\":\"datamhkdwuwedupbkm\",\"x\":\"datajsqwhaefefvv\",\"dobues\":\"databnmcvaqycd\"}}") .toObject(SapCloudForCustomerResourceDataset.class); - Assertions.assertEquals("whrjkejvaedogzo", model.description()); - Assertions.assertEquals("ywusrjzhdtr", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("ybsz").type()); - Assertions.assertEquals("ovqmqcudptoqwr", model.folder().name()); + Assertions.assertEquals("uvrqpbxdoicqp", model.description()); + Assertions.assertEquals("jritu", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zwanduhduwdvolxt").type()); + Assertions.assertEquals("chvvoyiogbntnwz", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapCloudForCustomerResourceDataset model = new SapCloudForCustomerResourceDataset() - .withDescription("whrjkejvaedogzo").withStructure("dataxbxxgjogcphivfhr").withSchema("dataenfdvdoea") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ywusrjzhdtr") - .withParameters(mapOf("wnmwtqiljknn", "dataezfsmyljdzyy", "aqjyih", "dataynkstdtfwhjfphf", "vhkhpsp", - "datacwwvaosckf", "exnguwnrdpuz", "dataweifdyfa"))) - .withParameters(mapOf("ybsz", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataujtg"), "yelrnh", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datajxejpdcliqwzut"))) - .withAnnotations(Arrays.asList("datavhqsz")).withFolder(new DatasetFolder().withName("ovqmqcudptoqwr")) - .withPath("dataebjuymtevaebzm"); + SapCloudForCustomerResourceDataset model + = new SapCloudForCustomerResourceDataset().withDescription("uvrqpbxdoicqp") + .withStructure("datatly") + .withSchema("datarpensbm") + .withLinkedServiceName( + new LinkedServiceReference().withReferenceName("jritu").withParameters(mapOf("f", "datamh"))) + .withParameters(mapOf("zwanduhduwdvolxt", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datazjxwjoq"))) + .withAnnotations(Arrays.asList("dataic", "datas", "datalzbki", "datamjfgoxedrmra")) + .withFolder(new DatasetFolder().withName("chvvoyiogbntnwz")) + .withPath("dataraxnfyzguax"); model = BinaryData.fromObject(model).toObject(SapCloudForCustomerResourceDataset.class); - Assertions.assertEquals("whrjkejvaedogzo", model.description()); - Assertions.assertEquals("ywusrjzhdtr", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("ybsz").type()); - Assertions.assertEquals("ovqmqcudptoqwr", model.folder().name()); + Assertions.assertEquals("uvrqpbxdoicqp", model.description()); + Assertions.assertEquals("jritu", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zwanduhduwdvolxt").type()); + Assertions.assertEquals("chvvoyiogbntnwz", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTypePropertiesTests.java index 83fe466a73a47..94e32078c2b9f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class SapCloudForCustomerResourceDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapCloudForCustomerResourceDatasetTypeProperties model - = BinaryData.fromString("{\"path\":\"datakngejjxumowy\"}") + = BinaryData.fromString("{\"path\":\"datayvfxnzpfdfupk\"}") .toObject(SapCloudForCustomerResourceDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SapCloudForCustomerResourceDatasetTypeProperties model - = new SapCloudForCustomerResourceDatasetTypeProperties().withPath("datakngejjxumowy"); + = new SapCloudForCustomerResourceDatasetTypeProperties().withPath("datayvfxnzpfdfupk"); model = BinaryData.fromObject(model).toObject(SapCloudForCustomerResourceDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSinkTests.java index 954846d64d26a..2114979f6d651 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSinkTests.java @@ -13,18 +13,21 @@ public final class SapCloudForCustomerSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapCloudForCustomerSink model = BinaryData.fromString( - "{\"type\":\"SapCloudForCustomerSink\",\"writeBehavior\":\"Update\",\"httpRequestTimeout\":\"datamophtkyzsgayng\",\"writeBatchSize\":\"datawvcnv\",\"writeBatchTimeout\":\"dataqxqhysu\",\"sinkRetryCount\":\"datadnslroqxrvycjdn\",\"sinkRetryWait\":\"datamggy\",\"maxConcurrentConnections\":\"datapmsacbamtoqse\",\"disableMetricsCollection\":\"dataoyxdig\",\"\":{\"osxdsxil\":\"datazmylqhq\"}}") + "{\"type\":\"u\",\"writeBehavior\":\"Update\",\"httpRequestTimeout\":\"dataktcwgnkxjdax\",\"writeBatchSize\":\"datazawotpiaklef\",\"writeBatchTimeout\":\"dataiabfntrmkeawmfe\",\"sinkRetryCount\":\"datacgdljbnfw\",\"sinkRetryWait\":\"datafn\",\"maxConcurrentConnections\":\"dataiuzbpgskg\",\"disableMetricsCollection\":\"dataspxhhnv\",\"\":{\"tgmd\":\"datajti\",\"akgzcmbgw\":\"datawefstize\",\"jpxpwxabvxwoa\":\"datalnmddflckum\",\"ozkm\":\"dataoeillszdgy\"}}") .toObject(SapCloudForCustomerSink.class); Assertions.assertEquals(SapCloudForCustomerSinkWriteBehavior.UPDATE, model.writeBehavior()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapCloudForCustomerSink model = new SapCloudForCustomerSink().withWriteBatchSize("datawvcnv") - .withWriteBatchTimeout("dataqxqhysu").withSinkRetryCount("datadnslroqxrvycjdn") - .withSinkRetryWait("datamggy").withMaxConcurrentConnections("datapmsacbamtoqse") - .withDisableMetricsCollection("dataoyxdig").withWriteBehavior(SapCloudForCustomerSinkWriteBehavior.UPDATE) - .withHttpRequestTimeout("datamophtkyzsgayng"); + SapCloudForCustomerSink model = new SapCloudForCustomerSink().withWriteBatchSize("datazawotpiaklef") + .withWriteBatchTimeout("dataiabfntrmkeawmfe") + .withSinkRetryCount("datacgdljbnfw") + .withSinkRetryWait("datafn") + .withMaxConcurrentConnections("dataiuzbpgskg") + .withDisableMetricsCollection("dataspxhhnv") + .withWriteBehavior(SapCloudForCustomerSinkWriteBehavior.UPDATE) + .withHttpRequestTimeout("dataktcwgnkxjdax"); model = BinaryData.fromObject(model).toObject(SapCloudForCustomerSink.class); Assertions.assertEquals(SapCloudForCustomerSinkWriteBehavior.UPDATE, model.writeBehavior()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSourceTests.java index fbb8f35722c9f..abd7f02d04a7d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSourceTests.java @@ -11,16 +11,20 @@ public final class SapCloudForCustomerSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapCloudForCustomerSource model = BinaryData.fromString( - "{\"type\":\"SapCloudForCustomerSource\",\"query\":\"datadgem\",\"httpRequestTimeout\":\"datayddzjtxlvgsl\",\"queryTimeout\":\"datalys\",\"additionalColumns\":\"datav\",\"sourceRetryCount\":\"dataak\",\"sourceRetryWait\":\"datapaexllt\",\"maxConcurrentConnections\":\"datakkaei\",\"disableMetricsCollection\":\"datahr\",\"\":{\"lta\":\"datagvsrt\",\"lnwiwrubxey\":\"datajkraleglpyns\",\"w\":\"dataalhbrwaltvky\"}}") + "{\"type\":\"kympwquu\",\"query\":\"dataujshcsnk\",\"httpRequestTimeout\":\"datagpqxqevt\",\"queryTimeout\":\"datakjirvjogsalvjl\",\"additionalColumns\":\"dataimua\",\"sourceRetryCount\":\"dataz\",\"sourceRetryWait\":\"datau\",\"maxConcurrentConnections\":\"datahszjyanhsliqeftg\",\"disableMetricsCollection\":\"dataro\",\"\":{\"zxj\":\"dataufqsdt\",\"gibog\":\"dataxolmehquqi\",\"lxcwrszwa\":\"dataojupenoupc\"}}") .toObject(SapCloudForCustomerSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapCloudForCustomerSource model = new SapCloudForCustomerSource().withSourceRetryCount("dataak") - .withSourceRetryWait("datapaexllt").withMaxConcurrentConnections("datakkaei") - .withDisableMetricsCollection("datahr").withQueryTimeout("datalys").withAdditionalColumns("datav") - .withQuery("datadgem").withHttpRequestTimeout("datayddzjtxlvgsl"); + SapCloudForCustomerSource model = new SapCloudForCustomerSource().withSourceRetryCount("dataz") + .withSourceRetryWait("datau") + .withMaxConcurrentConnections("datahszjyanhsliqeftg") + .withDisableMetricsCollection("dataro") + .withQueryTimeout("datakjirvjogsalvjl") + .withAdditionalColumns("dataimua") + .withQuery("dataujshcsnk") + .withHttpRequestTimeout("datagpqxqevt"); model = BinaryData.fromObject(model).toObject(SapCloudForCustomerSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTests.java index ced1ab4236f94..f101388f262c7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTests.java @@ -19,29 +19,39 @@ public final class SapEccResourceDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapEccResourceDataset model = BinaryData.fromString( - "{\"type\":\"SapEccResource\",\"typeProperties\":{\"path\":\"datajmoozmxuk\"},\"description\":\"rs\",\"structure\":\"datamnkxjou\",\"schema\":\"datazcfdtstiaxtyrnu\",\"linkedServiceName\":{\"referenceName\":\"cfhep\",\"parameters\":{\"q\":\"databcmlroiommems\",\"abebckc\":\"datablcyeqdobobaq\",\"pjcxbjgfm\":\"datasrsixwn\"}},\"parameters\":{\"y\":{\"type\":\"Float\",\"defaultValue\":\"datarridzfps\"}},\"annotations\":[\"datadfhmlx\",\"dataq\",\"dataekn\",\"datamkqafzvptriy\"],\"folder\":{\"name\":\"gt\"},\"\":{\"dwedg\":\"datapuqpsrcekdvvo\",\"clvbwatza\":\"dataw\"}}") + "{\"type\":\"tbuvedwuuqbmen\",\"typeProperties\":{\"path\":\"datatabvbbkflewgsl\"},\"description\":\"q\",\"structure\":\"datawclykcr\",\"schema\":\"dataekkbnj\",\"linkedServiceName\":{\"referenceName\":\"evmp\",\"parameters\":{\"gpkkhpj\":\"dataeuenthshnfiyg\",\"nmtrd\":\"dataglaqlmi\",\"wgecpvfpnrzikvo\":\"datapxi\"}},\"parameters\":{\"hpxukxgoyxon\":{\"type\":\"Float\",\"defaultValue\":\"datahyfivxdifbwbli\"},\"uqr\":{\"type\":\"String\",\"defaultValue\":\"datadqrxro\"},\"tktwjrppifey\":{\"type\":\"SecureString\",\"defaultValue\":\"dataxfuaefewx\"},\"ojklwjpzw\":{\"type\":\"String\",\"defaultValue\":\"dataelrmdcizhvks\"}},\"annotations\":[\"datawksmpyeyzolb\"],\"folder\":{\"name\":\"lytfxudui\"},\"\":{\"wkqwo\":\"datami\",\"ng\":\"datawsnliy\",\"wrgavtfy\":\"datauqzgpdglkfv\",\"ryxpi\":\"datasedfmzu\"}}") .toObject(SapEccResourceDataset.class); - Assertions.assertEquals("rs", model.description()); - Assertions.assertEquals("cfhep", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("y").type()); - Assertions.assertEquals("gt", model.folder().name()); + Assertions.assertEquals("q", model.description()); + Assertions.assertEquals("evmp", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("hpxukxgoyxon").type()); + Assertions.assertEquals("lytfxudui", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapEccResourceDataset model = new SapEccResourceDataset().withDescription("rs").withStructure("datamnkxjou") - .withSchema("datazcfdtstiaxtyrnu") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cfhep").withParameters( - mapOf("q", "databcmlroiommems", "abebckc", "datablcyeqdobobaq", "pjcxbjgfm", "datasrsixwn"))) + SapEccResourceDataset model = new SapEccResourceDataset().withDescription("q") + .withStructure("datawclykcr") + .withSchema("dataekkbnj") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("evmp") + .withParameters( + mapOf("gpkkhpj", "dataeuenthshnfiyg", "nmtrd", "dataglaqlmi", "wgecpvfpnrzikvo", "datapxi"))) .withParameters( - mapOf("y", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datarridzfps"))) - .withAnnotations(Arrays.asList("datadfhmlx", "dataq", "dataekn", "datamkqafzvptriy")) - .withFolder(new DatasetFolder().withName("gt")).withPath("datajmoozmxuk"); + mapOf("hpxukxgoyxon", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datahyfivxdifbwbli"), + "uqr", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datadqrxro"), + "tktwjrppifey", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("dataxfuaefewx"), + "ojklwjpzw", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataelrmdcizhvks"))) + .withAnnotations(Arrays.asList("datawksmpyeyzolb")) + .withFolder(new DatasetFolder().withName("lytfxudui")) + .withPath("datatabvbbkflewgsl"); model = BinaryData.fromObject(model).toObject(SapEccResourceDataset.class); - Assertions.assertEquals("rs", model.description()); - Assertions.assertEquals("cfhep", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("y").type()); - Assertions.assertEquals("gt", model.folder().name()); + Assertions.assertEquals("q", model.description()); + Assertions.assertEquals("evmp", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("hpxukxgoyxon").type()); + Assertions.assertEquals("lytfxudui", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTypePropertiesTests.java index 515da95dd3544..b9a4edb70d0f6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTypePropertiesTests.java @@ -10,14 +10,14 @@ public final class SapEccResourceDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SapEccResourceDatasetTypeProperties model = BinaryData.fromString("{\"path\":\"datarjbjngoarsr\"}") + SapEccResourceDatasetTypeProperties model = BinaryData.fromString("{\"path\":\"datavapeakfdmc\"}") .toObject(SapEccResourceDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SapEccResourceDatasetTypeProperties model - = new SapEccResourceDatasetTypeProperties().withPath("datarjbjngoarsr"); + = new SapEccResourceDatasetTypeProperties().withPath("datavapeakfdmc"); model = BinaryData.fromObject(model).toObject(SapEccResourceDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccSourceTests.java index 94094425d55a6..bb4d39000bd3e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccSourceTests.java @@ -11,16 +11,20 @@ public final class SapEccSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapEccSource model = BinaryData.fromString( - "{\"type\":\"SapEccSource\",\"query\":\"datap\",\"httpRequestTimeout\":\"dataegkrjolbaeghak\",\"queryTimeout\":\"datacismrnneklfibn\",\"additionalColumns\":\"datafcl\",\"sourceRetryCount\":\"datafu\",\"sourceRetryWait\":\"datawqzbiukzmfy\",\"maxConcurrentConnections\":\"datayz\",\"disableMetricsCollection\":\"dataf\",\"\":{\"seyxpgkmlnj\":\"datalnfvexiuuqafo\",\"glnfwjslwvexblu\":\"datauaywgcjqnfafp\"}}") + "{\"type\":\"pwqzvqtnozw\",\"query\":\"datab\",\"httpRequestTimeout\":\"dataeeeucvvnbymrgel\",\"queryTimeout\":\"datafr\",\"additionalColumns\":\"datanvkqtv\",\"sourceRetryCount\":\"datakaqracvcbrtlt\",\"sourceRetryWait\":\"datahbzbbjxkamitgv\",\"maxConcurrentConnections\":\"datapdv\",\"disableMetricsCollection\":\"datayelrteunkwypu\",\"\":{\"lylzdbrwjlwfthf\":\"datamsygtdfqlfd\",\"jsfgkwrcbgxypr\":\"dataxwi\"}}") .toObject(SapEccSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapEccSource model = new SapEccSource().withSourceRetryCount("datafu").withSourceRetryWait("datawqzbiukzmfy") - .withMaxConcurrentConnections("datayz").withDisableMetricsCollection("dataf") - .withQueryTimeout("datacismrnneklfibn").withAdditionalColumns("datafcl").withQuery("datap") - .withHttpRequestTimeout("dataegkrjolbaeghak"); + SapEccSource model = new SapEccSource().withSourceRetryCount("datakaqracvcbrtlt") + .withSourceRetryWait("datahbzbbjxkamitgv") + .withMaxConcurrentConnections("datapdv") + .withDisableMetricsCollection("datayelrteunkwypu") + .withQueryTimeout("datafr") + .withAdditionalColumns("datanvkqtv") + .withQuery("datab") + .withHttpRequestTimeout("dataeeeucvvnbymrgel"); model = BinaryData.fromObject(model).toObject(SapEccSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaPartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaPartitionSettingsTests.java index f72b762a09fd5..7d77f498e0223 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaPartitionSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaPartitionSettingsTests.java @@ -10,13 +10,13 @@ public final class SapHanaPartitionSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SapHanaPartitionSettings model = BinaryData.fromString("{\"partitionColumnName\":\"dataujhejytrvlgu\"}") - .toObject(SapHanaPartitionSettings.class); + SapHanaPartitionSettings model + = BinaryData.fromString("{\"partitionColumnName\":\"datadpd\"}").toObject(SapHanaPartitionSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapHanaPartitionSettings model = new SapHanaPartitionSettings().withPartitionColumnName("dataujhejytrvlgu"); + SapHanaPartitionSettings model = new SapHanaPartitionSettings().withPartitionColumnName("datadpd"); model = BinaryData.fromObject(model).toObject(SapHanaPartitionSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaSourceTests.java index 74a1b09150a63..b612be95ab0b4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaSourceTests.java @@ -12,18 +12,22 @@ public final class SapHanaSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapHanaSource model = BinaryData.fromString( - "{\"type\":\"SapHanaSource\",\"query\":\"datamqwkfgmkpve\",\"packetSize\":\"datatzqzhdwrcajfersx\",\"partitionOption\":\"datalkcwjw\",\"partitionSettings\":{\"partitionColumnName\":\"datasksgxykdepqcy\"},\"queryTimeout\":\"datahwsxpzkmotgmd\",\"additionalColumns\":\"datawwqevbiuntp\",\"sourceRetryCount\":\"datamwjxlyce\",\"sourceRetryWait\":\"dataeqgywrauur\",\"maxConcurrentConnections\":\"datad\",\"disableMetricsCollection\":\"dataycnk\",\"\":{\"amhefuhnbdlza\":\"dataiecfmqcxmpcdbvcx\",\"cmpnk\":\"dataectzjjgvcbt\"}}") + "{\"type\":\"pceuwmfdy\",\"query\":\"dataoizabjbh\",\"packetSize\":\"dataddijfkkt\",\"partitionOption\":\"dataiseepl\",\"partitionSettings\":{\"partitionColumnName\":\"datarxydsompnce\"},\"queryTimeout\":\"dataczkjkfakgrwtpe\",\"additionalColumns\":\"dataanm\",\"sourceRetryCount\":\"datapy\",\"sourceRetryWait\":\"datagqyszjuijzhvejqj\",\"maxConcurrentConnections\":\"dataolnylpykjpa\",\"disableMetricsCollection\":\"dataouya\",\"\":{\"on\":\"dataxaicheycak\"}}") .toObject(SapHanaSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapHanaSource model - = new SapHanaSource().withSourceRetryCount("datamwjxlyce").withSourceRetryWait("dataeqgywrauur") - .withMaxConcurrentConnections("datad").withDisableMetricsCollection("dataycnk") - .withQueryTimeout("datahwsxpzkmotgmd").withAdditionalColumns("datawwqevbiuntp") - .withQuery("datamqwkfgmkpve").withPacketSize("datatzqzhdwrcajfersx").withPartitionOption("datalkcwjw") - .withPartitionSettings(new SapHanaPartitionSettings().withPartitionColumnName("datasksgxykdepqcy")); + SapHanaSource model = new SapHanaSource().withSourceRetryCount("datapy") + .withSourceRetryWait("datagqyszjuijzhvejqj") + .withMaxConcurrentConnections("dataolnylpykjpa") + .withDisableMetricsCollection("dataouya") + .withQueryTimeout("dataczkjkfakgrwtpe") + .withAdditionalColumns("dataanm") + .withQuery("dataoizabjbh") + .withPacketSize("dataddijfkkt") + .withPartitionOption("dataiseepl") + .withPartitionSettings(new SapHanaPartitionSettings().withPartitionColumnName("datarxydsompnce")); model = BinaryData.fromObject(model).toObject(SapHanaSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTests.java index 6d807fc8fc465..ff47209f650e3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTests.java @@ -19,32 +19,35 @@ public final class SapHanaTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapHanaTableDataset model = BinaryData.fromString( - "{\"type\":\"SapHanaTable\",\"typeProperties\":{\"schema\":\"datai\",\"table\":\"datazcyniapy\"},\"description\":\"mrxirqwipzesstu\",\"structure\":\"dataytkmlfupj\",\"schema\":\"datax\",\"linkedServiceName\":{\"referenceName\":\"vzjoyxjgahxue\",\"parameters\":{\"erfcv\":\"dataaktnytkb\",\"hnhhcikh\":\"datapvfqjckmpwyv\",\"lsac\":\"dataebgjg\",\"ygotoh\":\"datagiflr\"}},\"parameters\":{\"tjsjzelsriemvu\":{\"type\":\"String\",\"defaultValue\":\"datadhbxitrapwzhl\"},\"cb\":{\"type\":\"Int\",\"defaultValue\":\"dataakosysycvldee\"}},\"annotations\":[\"datapus\",\"dataofkegbvbbdledffl\",\"datavsluazzxfjv\",\"dataugpxzeempup\"],\"folder\":{\"name\":\"boxraqdczmr\"},\"\":{\"drzzbskiwrjsb\":\"databekxeheowseca\",\"qsuivmrfaptndrmm\":\"databmseesacuicnvq\"}}") + "{\"type\":\"oqkanqtrkic\",\"typeProperties\":{\"schema\":\"datax\",\"table\":\"datao\"},\"description\":\"qyrgqmndk\",\"structure\":\"datawmurhvifqeqf\",\"schema\":\"datanackitlw\",\"linkedServiceName\":{\"referenceName\":\"ebylpzjelda\",\"parameters\":{\"h\":\"dataunilni\",\"kbvzpkodngvnq\":\"datacbrdsypotn\"}},\"parameters\":{\"vbqy\":{\"type\":\"Object\",\"defaultValue\":\"datatwgnldxuczl\"},\"uvcqoqkqwucqsdgb\":{\"type\":\"Float\",\"defaultValue\":\"datarirpi\"},\"jlamyv\":{\"type\":\"Int\",\"defaultValue\":\"datatvmijccpk\"}},\"annotations\":[\"datajmaih\",\"datannlb\"],\"folder\":{\"name\":\"ppcbqetf\"},\"\":{\"mprklatwiuujxsuj\":\"datapvolzayjwdunj\",\"pc\":\"datarwgxeegxbnjnczep\",\"dquyyaes\":\"datamgbf\"}}") .toObject(SapHanaTableDataset.class); - Assertions.assertEquals("mrxirqwipzesstu", model.description()); - Assertions.assertEquals("vzjoyxjgahxue", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("tjsjzelsriemvu").type()); - Assertions.assertEquals("boxraqdczmr", model.folder().name()); + Assertions.assertEquals("qyrgqmndk", model.description()); + Assertions.assertEquals("ebylpzjelda", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("vbqy").type()); + Assertions.assertEquals("ppcbqetf", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapHanaTableDataset model = new SapHanaTableDataset().withDescription("mrxirqwipzesstu") - .withStructure("dataytkmlfupj").withSchema("datax") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vzjoyxjgahxue") - .withParameters(mapOf("erfcv", "dataaktnytkb", "hnhhcikh", "datapvfqjckmpwyv", "lsac", "dataebgjg", - "ygotoh", "datagiflr"))) - .withParameters(mapOf("tjsjzelsriemvu", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datadhbxitrapwzhl"), "cb", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataakosysycvldee"))) - .withAnnotations(Arrays.asList("datapus", "dataofkegbvbbdledffl", "datavsluazzxfjv", "dataugpxzeempup")) - .withFolder(new DatasetFolder().withName("boxraqdczmr")).withSchemaTypePropertiesSchema("datai") - .withTable("datazcyniapy"); + SapHanaTableDataset model = new SapHanaTableDataset().withDescription("qyrgqmndk") + .withStructure("datawmurhvifqeqf") + .withSchema("datanackitlw") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ebylpzjelda") + .withParameters(mapOf("h", "dataunilni", "kbvzpkodngvnq", "datacbrdsypotn"))) + .withParameters(mapOf("vbqy", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datatwgnldxuczl"), + "uvcqoqkqwucqsdgb", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datarirpi"), "jlamyv", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datatvmijccpk"))) + .withAnnotations(Arrays.asList("datajmaih", "datannlb")) + .withFolder(new DatasetFolder().withName("ppcbqetf")) + .withSchemaTypePropertiesSchema("datax") + .withTable("datao"); model = BinaryData.fromObject(model).toObject(SapHanaTableDataset.class); - Assertions.assertEquals("mrxirqwipzesstu", model.description()); - Assertions.assertEquals("vzjoyxjgahxue", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("tjsjzelsriemvu").type()); - Assertions.assertEquals("boxraqdczmr", model.folder().name()); + Assertions.assertEquals("qyrgqmndk", model.description()); + Assertions.assertEquals("ebylpzjelda", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("vbqy").type()); + Assertions.assertEquals("ppcbqetf", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTypePropertiesTests.java index bd949f9968c05..967cbc432451d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class SapHanaTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapHanaTableDatasetTypeProperties model - = BinaryData.fromString("{\"schema\":\"datahnkmxrqkek\",\"table\":\"dataaviiebeqrfz\"}") + = BinaryData.fromString("{\"schema\":\"dataxnavpyxqb\",\"table\":\"datadtb\"}") .toObject(SapHanaTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SapHanaTableDatasetTypeProperties model - = new SapHanaTableDatasetTypeProperties().withSchema("datahnkmxrqkek").withTable("dataaviiebeqrfz"); + = new SapHanaTableDatasetTypeProperties().withSchema("dataxnavpyxqb").withTable("datadtb"); model = BinaryData.fromObject(model).toObject(SapHanaTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTests.java index 3aa808b6b3599..3e71c3220a44d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTests.java @@ -19,33 +19,38 @@ public final class SapOdpResourceDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapOdpResourceDataset model = BinaryData.fromString( - "{\"type\":\"SapOdpResource\",\"typeProperties\":{\"context\":\"datawuuqbmenxcqsxwc\",\"objectName\":\"dataykc\"},\"description\":\"dek\",\"structure\":\"datanjre\",\"schema\":\"dataptedeuenthshnfi\",\"linkedServiceName\":{\"referenceName\":\"gpgpkkhpjnglaqlm\",\"parameters\":{\"ecpvfpnrzikvo\":\"datamtrdlpxiww\",\"ivxdifbwblijhp\":\"dataloeohy\"}},\"parameters\":{\"xr\":{\"type\":\"SecureString\",\"defaultValue\":\"dataoyxontbwdq\"},\"fewxatktwjrppi\":{\"type\":\"String\",\"defaultValue\":\"dataqrrldxfua\"},\"jklwjp\":{\"type\":\"Array\",\"defaultValue\":\"datarqvelrmdcizhvksb\"}},\"annotations\":[\"datancw\",\"datasmpyeyzolbfnfly\"],\"folder\":{\"name\":\"uduiqoom\"},\"\":{\"liyznghuqzgp\":\"datakqwopws\",\"fy\":\"dataglkfvdwrgav\"}}") + "{\"type\":\"vuqzgbjwvrudmp\",\"typeProperties\":{\"context\":\"dataick\",\"objectName\":\"datakithueoc\"},\"description\":\"wpm\",\"structure\":\"dataleajabesgyzwp\",\"schema\":\"datajkscecmbaajdfw\",\"linkedServiceName\":{\"referenceName\":\"dkqlvzkf\",\"parameters\":{\"b\":\"dataesbpjqtl\"}},\"parameters\":{\"rr\":{\"type\":\"Array\",\"defaultValue\":\"datauibs\"},\"rcpzhbwcxybtdzyc\":{\"type\":\"String\",\"defaultValue\":\"dataeqrypyurvshhovtu\"},\"wczsrazcbybic\":{\"type\":\"Bool\",\"defaultValue\":\"dataoegjzgpljb\"},\"pua\":{\"type\":\"SecureString\",\"defaultValue\":\"datah\"}},\"annotations\":[\"datai\"],\"folder\":{\"name\":\"mu\"},\"\":{\"enndzgthdzit\":\"datawuycuo\",\"vswtwonadezm\":\"datazffpherwj\"}}") .toObject(SapOdpResourceDataset.class); - Assertions.assertEquals("dek", model.description()); - Assertions.assertEquals("gpgpkkhpjnglaqlm", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("xr").type()); - Assertions.assertEquals("uduiqoom", model.folder().name()); + Assertions.assertEquals("wpm", model.description()); + Assertions.assertEquals("dkqlvzkf", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("rr").type()); + Assertions.assertEquals("mu", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapOdpResourceDataset model = new SapOdpResourceDataset().withDescription("dek").withStructure("datanjre") - .withSchema("dataptedeuenthshnfi") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("gpgpkkhpjnglaqlm") - .withParameters(mapOf("ecpvfpnrzikvo", "datamtrdlpxiww", "ivxdifbwblijhp", "dataloeohy"))) - .withParameters(mapOf("xr", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataoyxontbwdq"), - "fewxatktwjrppi", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataqrrldxfua"), "jklwjp", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datarqvelrmdcizhvksb"))) - .withAnnotations(Arrays.asList("datancw", "datasmpyeyzolbfnfly")) - .withFolder(new DatasetFolder().withName("uduiqoom")).withContext("datawuuqbmenxcqsxwc") - .withObjectName("dataykc"); + SapOdpResourceDataset model = new SapOdpResourceDataset().withDescription("wpm") + .withStructure("dataleajabesgyzwp") + .withSchema("datajkscecmbaajdfw") + .withLinkedServiceName( + new LinkedServiceReference().withReferenceName("dkqlvzkf").withParameters(mapOf("b", "dataesbpjqtl"))) + .withParameters( + mapOf("rr", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datauibs"), + "rcpzhbwcxybtdzyc", + new ParameterSpecification().withType(ParameterType.STRING) + .withDefaultValue("dataeqrypyurvshhovtu"), + "wczsrazcbybic", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataoegjzgpljb"), "pua", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datah"))) + .withAnnotations(Arrays.asList("datai")) + .withFolder(new DatasetFolder().withName("mu")) + .withContext("dataick") + .withObjectName("datakithueoc"); model = BinaryData.fromObject(model).toObject(SapOdpResourceDataset.class); - Assertions.assertEquals("dek", model.description()); - Assertions.assertEquals("gpgpkkhpjnglaqlm", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("xr").type()); - Assertions.assertEquals("uduiqoom", model.folder().name()); + Assertions.assertEquals("wpm", model.description()); + Assertions.assertEquals("dkqlvzkf", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("rr").type()); + Assertions.assertEquals("mu", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTypePropertiesTests.java index 1bb78d4859ca9..2c8d14a7618f1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class SapOdpResourceDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapOdpResourceDatasetTypeProperties model - = BinaryData.fromString("{\"context\":\"datasedfmzu\",\"objectName\":\"dataryxpi\"}") + = BinaryData.fromString("{\"context\":\"dataxvfybxmmr\",\"objectName\":\"datanuvqkrrsguog\"}") .toObject(SapOdpResourceDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SapOdpResourceDatasetTypeProperties model - = new SapOdpResourceDatasetTypeProperties().withContext("datasedfmzu").withObjectName("dataryxpi"); + = new SapOdpResourceDatasetTypeProperties().withContext("dataxvfybxmmr").withObjectName("datanuvqkrrsguog"); model = BinaryData.fromObject(model).toObject(SapOdpResourceDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpSourceTests.java index 69797636d8ff0..3a32f29d4ac2a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpSourceTests.java @@ -11,17 +11,22 @@ public final class SapOdpSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapOdpSource model = BinaryData.fromString( - "{\"type\":\"SapOdpSource\",\"extractionMode\":\"datax\",\"subscriberProcess\":\"datanrurtnwbjj\",\"selection\":\"dataupckhfbmdemohlsh\",\"projection\":\"dataaoofltb\",\"queryTimeout\":\"datayvmwaejxzkqcm\",\"additionalColumns\":\"datacf\",\"sourceRetryCount\":\"dataxyrtqegabsfjrj\",\"sourceRetryWait\":\"dataq\",\"maxConcurrentConnections\":\"datagorvgdibepg\",\"disableMetricsCollection\":\"databijoehhqwwsgqzi\",\"\":{\"buxqtokckxfkft\":\"dataetwjssyazm\",\"btbogxlyve\":\"datakbyruheawucmqfu\",\"tsfi\":\"datavxjgu\"}}") + "{\"type\":\"l\",\"extractionMode\":\"dataau\",\"subscriberProcess\":\"datacl\",\"selection\":\"dataaoidjhoykgtyvrn\",\"projection\":\"datakiree\",\"queryTimeout\":\"datah\",\"additionalColumns\":\"datavndg\",\"sourceRetryCount\":\"dataa\",\"sourceRetryWait\":\"datasipfwlyei\",\"maxConcurrentConnections\":\"datadp\",\"disableMetricsCollection\":\"dataqteirrjjmvr\",\"\":{\"auytqdkgaxl\":\"datapopwxxdgzhnpxbu\",\"xqrokw\":\"dataafwsu\"}}") .toObject(SapOdpSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapOdpSource model = new SapOdpSource().withSourceRetryCount("dataxyrtqegabsfjrj").withSourceRetryWait("dataq") - .withMaxConcurrentConnections("datagorvgdibepg").withDisableMetricsCollection("databijoehhqwwsgqzi") - .withQueryTimeout("datayvmwaejxzkqcm").withAdditionalColumns("datacf").withExtractionMode("datax") - .withSubscriberProcess("datanrurtnwbjj").withSelection("dataupckhfbmdemohlsh") - .withProjection("dataaoofltb"); + SapOdpSource model = new SapOdpSource().withSourceRetryCount("dataa") + .withSourceRetryWait("datasipfwlyei") + .withMaxConcurrentConnections("datadp") + .withDisableMetricsCollection("dataqteirrjjmvr") + .withQueryTimeout("datah") + .withAdditionalColumns("datavndg") + .withExtractionMode("dataau") + .withSubscriberProcess("datacl") + .withSelection("dataaoidjhoykgtyvrn") + .withProjection("datakiree"); model = BinaryData.fromObject(model).toObject(SapOdpSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubSourceTests.java index 89762fbbe5c3e..7e55bb21d2bdb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubSourceTests.java @@ -11,17 +11,22 @@ public final class SapOpenHubSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapOpenHubSource model = BinaryData.fromString( - "{\"type\":\"SapOpenHubSource\",\"excludeLastRequest\":\"databrngnbqhmuqyzxk\",\"baseRequestId\":\"datam\",\"customRfcReadTableFunctionModule\":\"datajshtcfnb\",\"sapDataColumnDelimiter\":\"datad\",\"queryTimeout\":\"datalyhx\",\"additionalColumns\":\"dataqoe\",\"sourceRetryCount\":\"databoqozxnuxamxikh\",\"sourceRetryWait\":\"dataikglynbqpeojecb\",\"maxConcurrentConnections\":\"datagw\",\"disableMetricsCollection\":\"datahtnywgtsodnxeir\",\"\":{\"g\":\"datajimcfrht\",\"a\":\"datauvoaxqo\",\"cyramvzu\":\"dataptfpbzyqbgg\",\"uwxslzq\":\"dataxtbrqnyurxl\"}}") + "{\"type\":\"syhdapynpvgya\",\"excludeLastRequest\":\"datasksfbkxfkeeqo\",\"baseRequestId\":\"databek\",\"customRfcReadTableFunctionModule\":\"dataerwss\",\"sapDataColumnDelimiter\":\"datamrpdjrylfpdudx\",\"queryTimeout\":\"databanfsqfhatqssnge\",\"additionalColumns\":\"datayffgohrhj\",\"sourceRetryCount\":\"datatbet\",\"sourceRetryWait\":\"datadtgpvnczf\",\"maxConcurrentConnections\":\"dataybjku\",\"disableMetricsCollection\":\"dataajkyrhucbfkaqlp\",\"\":{\"ronbqakt\":\"datat\",\"obcqvzmywvsyn\":\"dataotmge\"}}") .toObject(SapOpenHubSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapOpenHubSource model = new SapOpenHubSource().withSourceRetryCount("databoqozxnuxamxikh") - .withSourceRetryWait("dataikglynbqpeojecb").withMaxConcurrentConnections("datagw") - .withDisableMetricsCollection("datahtnywgtsodnxeir").withQueryTimeout("datalyhx") - .withAdditionalColumns("dataqoe").withExcludeLastRequest("databrngnbqhmuqyzxk").withBaseRequestId("datam") - .withCustomRfcReadTableFunctionModule("datajshtcfnb").withSapDataColumnDelimiter("datad"); + SapOpenHubSource model = new SapOpenHubSource().withSourceRetryCount("datatbet") + .withSourceRetryWait("datadtgpvnczf") + .withMaxConcurrentConnections("dataybjku") + .withDisableMetricsCollection("dataajkyrhucbfkaqlp") + .withQueryTimeout("databanfsqfhatqssnge") + .withAdditionalColumns("datayffgohrhj") + .withExcludeLastRequest("datasksfbkxfkeeqo") + .withBaseRequestId("databek") + .withCustomRfcReadTableFunctionModule("dataerwss") + .withSapDataColumnDelimiter("datamrpdjrylfpdudx"); model = BinaryData.fromObject(model).toObject(SapOpenHubSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTests.java index a628310415bfa..5f5639f011bc5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTests.java @@ -19,30 +19,38 @@ public final class SapOpenHubTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapOpenHubTableDataset model = BinaryData.fromString( - "{\"type\":\"SapOpenHubTable\",\"typeProperties\":{\"openHubDestinationName\":\"datavqymcwtsi\",\"excludeLastRequest\":\"dataeplbrzgkuorwpqbs\",\"baseRequestId\":\"datae\"},\"description\":\"ptscru\",\"structure\":\"dataki\",\"schema\":\"dataayynoyj\",\"linkedServiceName\":{\"referenceName\":\"flsmsbnlyoifg\",\"parameters\":{\"zotkx\":\"datajqthykcvoevcw\",\"cgcvyp\":\"datalwwooxgbsd\",\"jcqgzwvxwi\":\"datahubdmgobxeh\"}},\"parameters\":{\"uky\":{\"type\":\"Int\",\"defaultValue\":\"datamjklqrljd\"}},\"annotations\":[\"datarjiqoqovqhgphgxu\",\"dataud\",\"datacy\"],\"folder\":{\"name\":\"vumryd\"},\"\":{\"iyoypoedkspwwibp\":\"dataivahfcqwnjzebpic\",\"znfffnhcgnaqsrm\":\"databqeigxuyxsxteuik\"}}") + "{\"type\":\"on\",\"typeProperties\":{\"openHubDestinationName\":\"datazcg\",\"excludeLastRequest\":\"dataozlibcbnunzuysaj\",\"baseRequestId\":\"dataqlho\"},\"description\":\"ivgtibtkqjqjc\",\"structure\":\"datago\",\"schema\":\"datatkhh\",\"linkedServiceName\":{\"referenceName\":\"emrvk\",\"parameters\":{\"qfxspxgogyp\":\"datajtdyulglhelwrukl\",\"kvctvuz\":\"dataztgaexjn\",\"ttmhlvr\":\"datamb\"}},\"parameters\":{\"jggkwdepem\":{\"type\":\"Float\",\"defaultValue\":\"datanwukfajnpdw\"},\"gtrttcuayiqylnk\":{\"type\":\"Float\",\"defaultValue\":\"datayfiqiidxcorjvudy\"},\"gqexowq\":{\"type\":\"Object\",\"defaultValue\":\"datazifb\"},\"kuobpw\":{\"type\":\"Array\",\"defaultValue\":\"datagqrqkkvfy\"}},\"annotations\":[\"datapgobothx\",\"dataewhpnyjt\",\"dataqgzyvextc\"],\"folder\":{\"name\":\"r\"},\"\":{\"rifiozttcbiichg\":\"dataowuwhd\",\"ryqycymzrlcfgdwz\":\"dataudsozodwjcfqoy\",\"adnyciwzki\":\"datauzfdh\"}}") .toObject(SapOpenHubTableDataset.class); - Assertions.assertEquals("ptscru", model.description()); - Assertions.assertEquals("flsmsbnlyoifg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("uky").type()); - Assertions.assertEquals("vumryd", model.folder().name()); + Assertions.assertEquals("ivgtibtkqjqjc", model.description()); + Assertions.assertEquals("emrvk", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("jggkwdepem").type()); + Assertions.assertEquals("r", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapOpenHubTableDataset model - = new SapOpenHubTableDataset().withDescription("ptscru").withStructure("dataki").withSchema("dataayynoyj") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("flsmsbnlyoifg").withParameters( - mapOf("zotkx", "datajqthykcvoevcw", "cgcvyp", "datalwwooxgbsd", "jcqgzwvxwi", "datahubdmgobxeh"))) - .withParameters(mapOf("uky", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datamjklqrljd"))) - .withAnnotations(Arrays.asList("datarjiqoqovqhgphgxu", "dataud", "datacy")) - .withFolder(new DatasetFolder().withName("vumryd")).withOpenHubDestinationName("datavqymcwtsi") - .withExcludeLastRequest("dataeplbrzgkuorwpqbs").withBaseRequestId("datae"); + SapOpenHubTableDataset model = new SapOpenHubTableDataset().withDescription("ivgtibtkqjqjc") + .withStructure("datago") + .withSchema("datatkhh") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("emrvk") + .withParameters( + mapOf("qfxspxgogyp", "datajtdyulglhelwrukl", "kvctvuz", "dataztgaexjn", "ttmhlvr", "datamb"))) + .withParameters(mapOf("jggkwdepem", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datanwukfajnpdw"), + "gtrttcuayiqylnk", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datayfiqiidxcorjvudy"), + "gqexowq", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datazifb"), + "kuobpw", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datagqrqkkvfy"))) + .withAnnotations(Arrays.asList("datapgobothx", "dataewhpnyjt", "dataqgzyvextc")) + .withFolder(new DatasetFolder().withName("r")) + .withOpenHubDestinationName("datazcg") + .withExcludeLastRequest("dataozlibcbnunzuysaj") + .withBaseRequestId("dataqlho"); model = BinaryData.fromObject(model).toObject(SapOpenHubTableDataset.class); - Assertions.assertEquals("ptscru", model.description()); - Assertions.assertEquals("flsmsbnlyoifg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("uky").type()); - Assertions.assertEquals("vumryd", model.folder().name()); + Assertions.assertEquals("ivgtibtkqjqjc", model.description()); + Assertions.assertEquals("emrvk", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("jggkwdepem").type()); + Assertions.assertEquals("r", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTypePropertiesTests.java index 56887321ddd5f..856bc5c1d78de 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTypePropertiesTests.java @@ -11,15 +11,16 @@ public final class SapOpenHubTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapOpenHubTableDatasetTypeProperties model = BinaryData.fromString( - "{\"openHubDestinationName\":\"datafqderkr\",\"excludeLastRequest\":\"datadcob\",\"baseRequestId\":\"datagqlwyqznbby\"}") + "{\"openHubDestinationName\":\"dataykqadfges\",\"excludeLastRequest\":\"dataoha\",\"baseRequestId\":\"datarizmadjrsbgail\"}") .toObject(SapOpenHubTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SapOpenHubTableDatasetTypeProperties model - = new SapOpenHubTableDatasetTypeProperties().withOpenHubDestinationName("datafqderkr") - .withExcludeLastRequest("datadcob").withBaseRequestId("datagqlwyqznbby"); + = new SapOpenHubTableDatasetTypeProperties().withOpenHubDestinationName("dataykqadfges") + .withExcludeLastRequest("dataoha") + .withBaseRequestId("datarizmadjrsbgail"); model = BinaryData.fromObject(model).toObject(SapOpenHubTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTablePartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTablePartitionSettingsTests.java index d4a0ab3ccb18e..b62fa5c209990 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTablePartitionSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTablePartitionSettingsTests.java @@ -11,15 +11,16 @@ public final class SapTablePartitionSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapTablePartitionSettings model = BinaryData.fromString( - "{\"partitionColumnName\":\"databdeyhweb\",\"partitionUpperBound\":\"datab\",\"partitionLowerBound\":\"datacfvajmmdmbyl\",\"maxPartitionsNumber\":\"datadtqujfzxsazuj\"}") + "{\"partitionColumnName\":\"datafnipywgjgf\",\"partitionUpperBound\":\"datafsvay\",\"partitionLowerBound\":\"datajypo\",\"maxPartitionsNumber\":\"dataiptnwpws\"}") .toObject(SapTablePartitionSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapTablePartitionSettings model - = new SapTablePartitionSettings().withPartitionColumnName("databdeyhweb").withPartitionUpperBound("datab") - .withPartitionLowerBound("datacfvajmmdmbyl").withMaxPartitionsNumber("datadtqujfzxsazuj"); + SapTablePartitionSettings model = new SapTablePartitionSettings().withPartitionColumnName("datafnipywgjgf") + .withPartitionUpperBound("datafsvay") + .withPartitionLowerBound("datajypo") + .withMaxPartitionsNumber("dataiptnwpws"); model = BinaryData.fromObject(model).toObject(SapTablePartitionSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTests.java index 01c114d6026e1..6b9408adbb2b9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTests.java @@ -19,30 +19,38 @@ public final class SapTableResourceDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapTableResourceDataset model = BinaryData.fromString( - "{\"type\":\"SapTableResource\",\"typeProperties\":{\"tableName\":\"dataxwjoqfzwand\"},\"description\":\"duwd\",\"structure\":\"datalxtqm\",\"schema\":\"datac\",\"linkedServiceName\":{\"referenceName\":\"s\",\"parameters\":{\"oxedrmrazhvch\":\"databkiumjf\",\"zsoowxcsmxtlcapp\":\"datavoyiogbntnwzr\"}},\"parameters\":{\"pbkmzkwhjjs\":{\"type\":\"Array\",\"defaultValue\":\"dataspciryomhkdwuwed\"},\"vvkxdbnmc\":{\"type\":\"Float\",\"defaultValue\":\"dataaefe\"}},\"annotations\":[\"dataycdzdob\",\"dataesdyvf\"],\"folder\":{\"name\":\"pfdfu\"},\"\":{\"bbkfl\":\"datawpdpsegivytab\"}}") + "{\"type\":\"ekijhminen\",\"typeProperties\":{\"tableName\":\"datacgcsapvbcqpf\"},\"description\":\"ivp\",\"structure\":\"datakvorlfqm\",\"schema\":\"dataewynbf\",\"linkedServiceName\":{\"referenceName\":\"vcwvurkmjufavvln\",\"parameters\":{\"kkf\":\"dataotmynklnmrznmtv\",\"xutcbvriuvnfazx\":\"datawxizkstxneykei\",\"yaeiivjqkq\":\"datavsa\"}},\"parameters\":{\"joqfxqe\":{\"type\":\"Bool\",\"defaultValue\":\"dataew\"},\"zq\":{\"type\":\"SecureString\",\"defaultValue\":\"datadqiuspguzljvgji\"},\"qq\":{\"type\":\"Object\",\"defaultValue\":\"datajsazmjsis\"},\"ygllfkchhgsjuzzc\":{\"type\":\"String\",\"defaultValue\":\"dataecagsbfeiirpn\"}},\"annotations\":[\"datawmqcycabaam\",\"datakhdhpmkxdujkxpuq\",\"datadyoqywsuarpzhry\",\"dataezxiz\"],\"folder\":{\"name\":\"xbfj\"},\"\":{\"kdf\":\"datauazccouhwi\",\"rb\":\"datajsk\",\"piqdqbvxqto\":\"datazepirtv\"}}") .toObject(SapTableResourceDataset.class); - Assertions.assertEquals("duwd", model.description()); - Assertions.assertEquals("s", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("pbkmzkwhjjs").type()); - Assertions.assertEquals("pfdfu", model.folder().name()); + Assertions.assertEquals("ivp", model.description()); + Assertions.assertEquals("vcwvurkmjufavvln", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("joqfxqe").type()); + Assertions.assertEquals("xbfj", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapTableResourceDataset model = new SapTableResourceDataset().withDescription("duwd").withStructure("datalxtqm") - .withSchema("datac") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("s") - .withParameters(mapOf("oxedrmrazhvch", "databkiumjf", "zsoowxcsmxtlcapp", "datavoyiogbntnwzr"))) - .withParameters(mapOf("pbkmzkwhjjs", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataspciryomhkdwuwed"), - "vvkxdbnmc", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataaefe"))) - .withAnnotations(Arrays.asList("dataycdzdob", "dataesdyvf")) - .withFolder(new DatasetFolder().withName("pfdfu")).withTableName("dataxwjoqfzwand"); + SapTableResourceDataset model = new SapTableResourceDataset().withDescription("ivp") + .withStructure("datakvorlfqm") + .withSchema("dataewynbf") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vcwvurkmjufavvln") + .withParameters(mapOf("kkf", "dataotmynklnmrznmtv", "xutcbvriuvnfazx", "datawxizkstxneykei", + "yaeiivjqkq", "datavsa"))) + .withParameters(mapOf("joqfxqe", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataew"), "zq", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("datadqiuspguzljvgji"), + "qq", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datajsazmjsis"), + "ygllfkchhgsjuzzc", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataecagsbfeiirpn"))) + .withAnnotations( + Arrays.asList("datawmqcycabaam", "datakhdhpmkxdujkxpuq", "datadyoqywsuarpzhry", "dataezxiz")) + .withFolder(new DatasetFolder().withName("xbfj")) + .withTableName("datacgcsapvbcqpf"); model = BinaryData.fromObject(model).toObject(SapTableResourceDataset.class); - Assertions.assertEquals("duwd", model.description()); - Assertions.assertEquals("s", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("pbkmzkwhjjs").type()); - Assertions.assertEquals("pfdfu", model.folder().name()); + Assertions.assertEquals("ivp", model.description()); + Assertions.assertEquals("vcwvurkmjufavvln", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("joqfxqe").type()); + Assertions.assertEquals("xbfj", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTypePropertiesTests.java index a35d3b563a6a2..a35b6fb7a9c38 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTypePropertiesTests.java @@ -10,14 +10,14 @@ public final class SapTableResourceDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SapTableResourceDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datawgsltutbuve\"}") + SapTableResourceDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datapwbopvhcbt\"}") .toObject(SapTableResourceDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SapTableResourceDatasetTypeProperties model - = new SapTableResourceDatasetTypeProperties().withTableName("datawgsltutbuve"); + = new SapTableResourceDatasetTypeProperties().withTableName("datapwbopvhcbt"); model = BinaryData.fromObject(model).toObject(SapTableResourceDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableSourceTests.java index 2386aa2d3beb3..5f26c4690a726 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableSourceTests.java @@ -12,21 +12,30 @@ public final class SapTableSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SapTableSource model = BinaryData.fromString( - "{\"type\":\"SapTableSource\",\"rowCount\":\"dataiznbif\",\"rowSkips\":\"dataix\",\"rfcTableFields\":\"datakj\",\"rfcTableOptions\":\"dataxl\",\"batchSize\":\"datamvrblj\",\"customRfcReadTableFunctionModule\":\"datawsaskullvtsaujiv\",\"sapDataColumnDelimiter\":\"datazidzq\",\"partitionOption\":\"datafhzxkjygkuidgwdh\",\"partitionSettings\":{\"partitionColumnName\":\"datacozb\",\"partitionUpperBound\":\"datat\",\"partitionLowerBound\":\"datacmxqdexnkp\",\"maxPartitionsNumber\":\"dataxc\"},\"queryTimeout\":\"datamzykp\",\"additionalColumns\":\"datagiumuztb\",\"sourceRetryCount\":\"datatfmcnrgwgcsto\",\"sourceRetryWait\":\"dataveehmvr\",\"maxConcurrentConnections\":\"dataurpzry\",\"disableMetricsCollection\":\"datafdhch\",\"\":{\"fqsjzlcktkbg\":\"dataahaxyrdlvbom\",\"tle\":\"datateehyhxgnlpj\",\"krcohhuwzun\":\"dataymijhn\"}}") + "{\"type\":\"pubsdinfauyt\",\"rowCount\":\"dataql\",\"rowSkips\":\"datarhctbrvegdamoy\",\"rfcTableFields\":\"datafjpkezqjizbyczme\",\"rfcTableOptions\":\"dataacgvlnpjjb\",\"batchSize\":\"datayrktuvdestarulnh\",\"customRfcReadTableFunctionModule\":\"datatvyhsxhcrf\",\"sapDataColumnDelimiter\":\"dataxexupcuizvx\",\"partitionOption\":\"datavzhlkeotdscqkxzr\",\"partitionSettings\":{\"partitionColumnName\":\"dataqzmvemli\",\"partitionUpperBound\":\"datadfqfnftrrhhgwaw\",\"partitionLowerBound\":\"datah\",\"maxPartitionsNumber\":\"datavcfxdvk\"},\"queryTimeout\":\"dataxultxhqqvdhdyy\",\"additionalColumns\":\"datatx\",\"sourceRetryCount\":\"datavsdyqyjkmfo\",\"sourceRetryWait\":\"datamxedlcxm\",\"maxConcurrentConnections\":\"datatpbapojknvxantlp\",\"disableMetricsCollection\":\"dataiipfgdnqpkvvrho\",\"\":{\"uexhskh\":\"dataqdvnruo\",\"xhegc\":\"dataqlvocrdd\",\"unqwcrkkabyx\":\"datalhqzcklq\"}}") .toObject(SapTableSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SapTableSource model = new SapTableSource().withSourceRetryCount("datatfmcnrgwgcsto") - .withSourceRetryWait("dataveehmvr").withMaxConcurrentConnections("dataurpzry") - .withDisableMetricsCollection("datafdhch").withQueryTimeout("datamzykp") - .withAdditionalColumns("datagiumuztb").withRowCount("dataiznbif").withRowSkips("dataix") - .withRfcTableFields("datakj").withRfcTableOptions("dataxl").withBatchSize("datamvrblj") - .withCustomRfcReadTableFunctionModule("datawsaskullvtsaujiv").withSapDataColumnDelimiter("datazidzq") - .withPartitionOption("datafhzxkjygkuidgwdh").withPartitionSettings( - new SapTablePartitionSettings().withPartitionColumnName("datacozb").withPartitionUpperBound("datat") - .withPartitionLowerBound("datacmxqdexnkp").withMaxPartitionsNumber("dataxc")); + SapTableSource model = new SapTableSource().withSourceRetryCount("datavsdyqyjkmfo") + .withSourceRetryWait("datamxedlcxm") + .withMaxConcurrentConnections("datatpbapojknvxantlp") + .withDisableMetricsCollection("dataiipfgdnqpkvvrho") + .withQueryTimeout("dataxultxhqqvdhdyy") + .withAdditionalColumns("datatx") + .withRowCount("dataql") + .withRowSkips("datarhctbrvegdamoy") + .withRfcTableFields("datafjpkezqjizbyczme") + .withRfcTableOptions("dataacgvlnpjjb") + .withBatchSize("datayrktuvdestarulnh") + .withCustomRfcReadTableFunctionModule("datatvyhsxhcrf") + .withSapDataColumnDelimiter("dataxexupcuizvx") + .withPartitionOption("datavzhlkeotdscqkxzr") + .withPartitionSettings(new SapTablePartitionSettings().withPartitionColumnName("dataqzmvemli") + .withPartitionUpperBound("datadfqfnftrrhhgwaw") + .withPartitionLowerBound("datah") + .withMaxPartitionsNumber("datavcfxdvk")); model = BinaryData.fromObject(model).toObject(SapTableSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerRecurrenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerRecurrenceTests.java index 3160b1b94132f..03bbaa64e0d54 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerRecurrenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerRecurrenceTests.java @@ -21,50 +21,54 @@ public final class ScheduleTriggerRecurrenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ScheduleTriggerRecurrence model = BinaryData.fromString( - "{\"frequency\":\"Month\",\"interval\":917730009,\"startTime\":\"2021-03-30T03:31:07Z\",\"endTime\":\"2021-04-02T08:19:10Z\",\"timeZone\":\"zmrwlsrjjaj\",\"schedule\":{\"minutes\":[2020562074],\"hours\":[35520275,1915462888],\"weekDays\":[\"Wednesday\"],\"monthDays\":[1497257373,182132397,79673613,1340181084],\"monthlyOccurrences\":[{\"day\":\"Saturday\",\"occurrence\":1604982715,\"\":{\"zdkrmpljzrzv\":\"databkytrztwwkvwpbd\"}},{\"day\":\"Tuesday\",\"occurrence\":1218396039,\"\":{\"jrlugigzwh\":\"datayg\"}},{\"day\":\"Wednesday\",\"occurrence\":31016802,\"\":{\"yzzk\":\"datal\"}}],\"\":{\"ntgi\":\"dataeydjagyks\",\"b\":\"dataaazfjbxhnah\",\"a\":\"datao\",\"gbyxpma\":\"datawidumilxi\"}},\"\":{\"viqwfctiyaf\":\"datanqcb\"}}") + "{\"frequency\":\"Minute\",\"interval\":1449790272,\"startTime\":\"2021-01-15T17:32:17Z\",\"endTime\":\"2021-02-22T01:40:59Z\",\"timeZone\":\"mqbcitlyyphtdwhm\",\"schedule\":{\"minutes\":[758989911],\"hours\":[2057988261],\"weekDays\":[\"Saturday\",\"Wednesday\",\"Saturday\",\"Thursday\"],\"monthDays\":[406785212,1599563071,1148299002],\"monthlyOccurrences\":[{\"day\":\"Monday\",\"occurrence\":1137923293,\"\":{\"ej\":\"dataynsqxyowwrb\"}},{\"day\":\"Wednesday\",\"occurrence\":1214619883,\"\":{\"gidkgsjivdtrtkq\":\"datakswfpqrusxy\"}}],\"\":{\"rdx\":\"dataxslbrttlwaskt\",\"okjnbcdnjexcyhs\":\"datasbbd\"}},\"\":{\"qgimwivqphddewfa\":\"datairaubxfb\",\"zzak\":\"dataajpojz\",\"sqdzivvy\":\"datantwnhpc\",\"uq\":\"dataysavevnerpyz\"}}") .toObject(ScheduleTriggerRecurrence.class); - Assertions.assertEquals(RecurrenceFrequency.MONTH, model.frequency()); - Assertions.assertEquals(917730009, model.interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-03-30T03:31:07Z"), model.startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-04-02T08:19:10Z"), model.endTime()); - Assertions.assertEquals("zmrwlsrjjaj", model.timeZone()); - Assertions.assertEquals(2020562074, model.schedule().minutes().get(0)); - Assertions.assertEquals(35520275, model.schedule().hours().get(0)); - Assertions.assertEquals(DaysOfWeek.WEDNESDAY, model.schedule().weekDays().get(0)); - Assertions.assertEquals(1497257373, model.schedule().monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.SATURDAY, model.schedule().monthlyOccurrences().get(0).day()); - Assertions.assertEquals(1604982715, model.schedule().monthlyOccurrences().get(0).occurrence()); + Assertions.assertEquals(RecurrenceFrequency.MINUTE, model.frequency()); + Assertions.assertEquals(1449790272, model.interval()); + Assertions.assertEquals(OffsetDateTime.parse("2021-01-15T17:32:17Z"), model.startTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-02-22T01:40:59Z"), model.endTime()); + Assertions.assertEquals("mqbcitlyyphtdwhm", model.timeZone()); + Assertions.assertEquals(758989911, model.schedule().minutes().get(0)); + Assertions.assertEquals(2057988261, model.schedule().hours().get(0)); + Assertions.assertEquals(DaysOfWeek.SATURDAY, model.schedule().weekDays().get(0)); + Assertions.assertEquals(406785212, model.schedule().monthDays().get(0)); + Assertions.assertEquals(DayOfWeek.MONDAY, model.schedule().monthlyOccurrences().get(0).day()); + Assertions.assertEquals(1137923293, model.schedule().monthlyOccurrences().get(0).occurrence()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ScheduleTriggerRecurrence model = new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.MONTH) - .withInterval(917730009).withStartTime(OffsetDateTime.parse("2021-03-30T03:31:07Z")) - .withEndTime(OffsetDateTime.parse("2021-04-02T08:19:10Z")).withTimeZone("zmrwlsrjjaj") - .withSchedule(new RecurrenceSchedule().withMinutes(Arrays.asList(2020562074)) - .withHours(Arrays.asList(35520275, 1915462888)).withWeekDays(Arrays.asList(DaysOfWeek.WEDNESDAY)) - .withMonthDays(Arrays.asList(1497257373, 182132397, 79673613, 1340181084)) + ScheduleTriggerRecurrence model = new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.MINUTE) + .withInterval(1449790272) + .withStartTime(OffsetDateTime.parse("2021-01-15T17:32:17Z")) + .withEndTime(OffsetDateTime.parse("2021-02-22T01:40:59Z")) + .withTimeZone("mqbcitlyyphtdwhm") + .withSchedule(new RecurrenceSchedule().withMinutes(Arrays.asList(758989911)) + .withHours(Arrays.asList(2057988261)) + .withWeekDays( + Arrays.asList(DaysOfWeek.SATURDAY, DaysOfWeek.WEDNESDAY, DaysOfWeek.SATURDAY, DaysOfWeek.THURSDAY)) + .withMonthDays(Arrays.asList(406785212, 1599563071, 1148299002)) .withMonthlyOccurrences(Arrays.asList( - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.SATURDAY).withOccurrence(1604982715) + new RecurrenceScheduleOccurrence().withDay(DayOfWeek.MONDAY) + .withOccurrence(1137923293) .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.TUESDAY).withOccurrence(1218396039) - .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.WEDNESDAY).withOccurrence(31016802) + new RecurrenceScheduleOccurrence().withDay(DayOfWeek.WEDNESDAY) + .withOccurrence(1214619883) .withAdditionalProperties(mapOf()))) .withAdditionalProperties(mapOf())) .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(ScheduleTriggerRecurrence.class); - Assertions.assertEquals(RecurrenceFrequency.MONTH, model.frequency()); - Assertions.assertEquals(917730009, model.interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-03-30T03:31:07Z"), model.startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-04-02T08:19:10Z"), model.endTime()); - Assertions.assertEquals("zmrwlsrjjaj", model.timeZone()); - Assertions.assertEquals(2020562074, model.schedule().minutes().get(0)); - Assertions.assertEquals(35520275, model.schedule().hours().get(0)); - Assertions.assertEquals(DaysOfWeek.WEDNESDAY, model.schedule().weekDays().get(0)); - Assertions.assertEquals(1497257373, model.schedule().monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.SATURDAY, model.schedule().monthlyOccurrences().get(0).day()); - Assertions.assertEquals(1604982715, model.schedule().monthlyOccurrences().get(0).occurrence()); + Assertions.assertEquals(RecurrenceFrequency.MINUTE, model.frequency()); + Assertions.assertEquals(1449790272, model.interval()); + Assertions.assertEquals(OffsetDateTime.parse("2021-01-15T17:32:17Z"), model.startTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-02-22T01:40:59Z"), model.endTime()); + Assertions.assertEquals("mqbcitlyyphtdwhm", model.timeZone()); + Assertions.assertEquals(758989911, model.schedule().minutes().get(0)); + Assertions.assertEquals(2057988261, model.schedule().hours().get(0)); + Assertions.assertEquals(DaysOfWeek.SATURDAY, model.schedule().weekDays().get(0)); + Assertions.assertEquals(406785212, model.schedule().monthDays().get(0)); + Assertions.assertEquals(DayOfWeek.MONDAY, model.schedule().monthlyOccurrences().get(0).day()); + Assertions.assertEquals(1137923293, model.schedule().monthlyOccurrences().get(0).occurrence()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTests.java index 7a7f524496740..6cf768243bc24 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTests.java @@ -24,71 +24,77 @@ public final class ScheduleTriggerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ScheduleTrigger model = BinaryData.fromString( - "{\"type\":\"ScheduleTrigger\",\"typeProperties\":{\"recurrence\":{\"frequency\":\"Minute\",\"interval\":1427518805,\"startTime\":\"2021-04-27T23:49:15Z\",\"endTime\":\"2021-06-28T00:05:17Z\",\"timeZone\":\"rnb\",\"schedule\":{\"minutes\":[1119797804],\"hours\":[999351737,732651337],\"weekDays\":[\"Tuesday\",\"Thursday\",\"Thursday\",\"Tuesday\"],\"monthDays\":[466726136,431958697,2021158859,959950633],\"monthlyOccurrences\":[{\"day\":\"Thursday\",\"occurrence\":104057412,\"\":{\"mnfavllbskl\":\"datahwaadcz\",\"kcea\":\"dataakkihxpofv\"}},{\"day\":\"Tuesday\",\"occurrence\":1036046655,\"\":{\"tg\":\"datadvaw\",\"naeclrjscdoqocdr\":\"dataqteg\"}},{\"day\":\"Thursday\",\"occurrence\":1417547783,\"\":{\"uuboyrfqyjtollug\":\"dataroaedswhbse\",\"kasbda\":\"datasvzi\",\"mutwmarfbsz\":\"datadsvdbdl\"}},{\"day\":\"Friday\",\"occurrence\":10999242,\"\":{\"hc\":\"datardmbebxmkwokl\"}}],\"\":{\"bfweezzrzfytq\":\"datagutitjwvvvapdsh\",\"hidh\":\"datanbxgofiphlwyzd\"}},\"\":{\"xcbfrnttlrumvi\":\"dataslczkzlbztsgklue\"}}},\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"dcavvj\",\"name\":\"nnhafed\"},\"parameters\":{\"vmuqxsoc\":\"dataxbauzvxejicpmgl\",\"djpugais\":\"datakjrtcifxl\",\"dmnvtpbohzcaaq\":\"datavsj\"}},{\"pipelineReference\":{\"referenceName\":\"daqfvplfy\",\"name\":\"b\"},\"parameters\":{\"bbvqsqw\":\"datashmqn\",\"ilqscjxpro\":\"datawxtqdtve\",\"iytkeqjviawspvbc\":\"datafyddrsairxnw\"}}],\"description\":\"heelmiuprfqyrwtd\",\"runtimeState\":\"Stopped\",\"annotations\":[\"datahewd\",\"dataualgkfo\",\"datanqmdtuyimdoprkp\",\"dataghqs\"],\"\":{\"hox\":\"dataebxjedyyeng\"}}") + "{\"type\":\"rkrfvlqwi\",\"typeProperties\":{\"recurrence\":{\"frequency\":\"Day\",\"interval\":862096390,\"startTime\":\"2021-01-25T07:32:55Z\",\"endTime\":\"2021-10-08T06:46:47Z\",\"timeZone\":\"lbqdxvxdfkdwk\",\"schedule\":{\"minutes\":[248937577,815850992,462109108,208237939],\"hours\":[239856664,1272427932,1347880889,1400698641],\"weekDays\":[\"Thursday\",\"Wednesday\",\"Tuesday\"],\"monthDays\":[333698935],\"monthlyOccurrences\":[{\"day\":\"Sunday\",\"occurrence\":1064820707,\"\":{\"is\":\"datadrecrrbkmzq\",\"xekql\":\"datagoapxdmxwetkj\",\"edfbfqwll\":\"datactdxargqff\",\"qzj\":\"datab\"}},{\"day\":\"Monday\",\"occurrence\":1818860634,\"\":{\"kiwum\":\"dataf\"}},{\"day\":\"Saturday\",\"occurrence\":158512843,\"\":{\"ecqwdosbsng\":\"datas\",\"cpwrgry\":\"dataiacwdxvlku\",\"zdtfthnjxid\":\"datablrqeqcdikcqc\"}}],\"\":{\"rrzuegindln\":\"datampyixgxtccmqzku\"}},\"\":{\"yracqmfji\":\"dataapszxqnjx\",\"oijtlhxlsxx\":\"datamcgz\"}}},\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"ts\",\"name\":\"agvq\"},\"parameters\":{\"jr\":\"datardfxqhnwh\"}},{\"pipelineReference\":{\"referenceName\":\"dqlzggvocxi\",\"name\":\"mrdbqujy\"},\"parameters\":{\"mntlbfknxzc\":\"dataciaznpsvgupqwqs\",\"ccpbtvgiokz\":\"datauvjbfryortbres\",\"dwawomkzussgjmub\":\"datatpvs\",\"ekzcmfibbozkp\":\"datagjdluwbmwuj\"}},{\"pipelineReference\":{\"referenceName\":\"v\",\"name\":\"ymswfwckucs\"},\"parameters\":{\"vvomcjpjrxvsgga\":\"datafhqxhtcohrhw\"}}],\"description\":\"oenpihtgigaeeqg\",\"runtimeState\":\"Stopped\",\"annotations\":[\"datazlfccpgeqix\"],\"\":{\"ld\":\"dataltqldlhhqpt\",\"nnb\":\"datamacaj\"}}") .toObject(ScheduleTrigger.class); - Assertions.assertEquals("heelmiuprfqyrwtd", model.description()); - Assertions.assertEquals("dcavvj", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("nnhafed", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals(RecurrenceFrequency.MINUTE, model.recurrence().frequency()); - Assertions.assertEquals(1427518805, model.recurrence().interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-04-27T23:49:15Z"), model.recurrence().startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-06-28T00:05:17Z"), model.recurrence().endTime()); - Assertions.assertEquals("rnb", model.recurrence().timeZone()); - Assertions.assertEquals(1119797804, model.recurrence().schedule().minutes().get(0)); - Assertions.assertEquals(999351737, model.recurrence().schedule().hours().get(0)); - Assertions.assertEquals(DaysOfWeek.TUESDAY, model.recurrence().schedule().weekDays().get(0)); - Assertions.assertEquals(466726136, model.recurrence().schedule().monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.THURSDAY, model.recurrence().schedule().monthlyOccurrences().get(0).day()); - Assertions.assertEquals(104057412, model.recurrence().schedule().monthlyOccurrences().get(0).occurrence()); + Assertions.assertEquals("oenpihtgigaeeqg", model.description()); + Assertions.assertEquals("ts", model.pipelines().get(0).pipelineReference().referenceName()); + Assertions.assertEquals("agvq", model.pipelines().get(0).pipelineReference().name()); + Assertions.assertEquals(RecurrenceFrequency.DAY, model.recurrence().frequency()); + Assertions.assertEquals(862096390, model.recurrence().interval()); + Assertions.assertEquals(OffsetDateTime.parse("2021-01-25T07:32:55Z"), model.recurrence().startTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-10-08T06:46:47Z"), model.recurrence().endTime()); + Assertions.assertEquals("lbqdxvxdfkdwk", model.recurrence().timeZone()); + Assertions.assertEquals(248937577, model.recurrence().schedule().minutes().get(0)); + Assertions.assertEquals(239856664, model.recurrence().schedule().hours().get(0)); + Assertions.assertEquals(DaysOfWeek.THURSDAY, model.recurrence().schedule().weekDays().get(0)); + Assertions.assertEquals(333698935, model.recurrence().schedule().monthDays().get(0)); + Assertions.assertEquals(DayOfWeek.SUNDAY, model.recurrence().schedule().monthlyOccurrences().get(0).day()); + Assertions.assertEquals(1064820707, model.recurrence().schedule().monthlyOccurrences().get(0).occurrence()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ScheduleTrigger model = new ScheduleTrigger().withDescription("heelmiuprfqyrwtd") - .withAnnotations(Arrays.asList("datahewd", "dataualgkfo", "datanqmdtuyimdoprkp", "dataghqs")) + ScheduleTrigger model = new ScheduleTrigger().withDescription("oenpihtgigaeeqg") + .withAnnotations(Arrays.asList("datazlfccpgeqix")) .withPipelines(Arrays.asList( new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("dcavvj").withName("nnhafed")) - .withParameters(mapOf("vmuqxsoc", "dataxbauzvxejicpmgl", "djpugais", "datakjrtcifxl", - "dmnvtpbohzcaaq", "datavsj")), + .withPipelineReference(new PipelineReference().withReferenceName("ts").withName("agvq")) + .withParameters(mapOf("jr", "datardfxqhnwh")), new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("daqfvplfy").withName("b")) - .withParameters(mapOf("bbvqsqw", "datashmqn", "ilqscjxpro", "datawxtqdtve", "iytkeqjviawspvbc", - "datafyddrsairxnw")))) - .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.MINUTE) - .withInterval(1427518805).withStartTime(OffsetDateTime.parse("2021-04-27T23:49:15Z")) - .withEndTime(OffsetDateTime.parse("2021-06-28T00:05:17Z")).withTimeZone("rnb") - .withSchedule(new RecurrenceSchedule().withMinutes(Arrays.asList(1119797804)) - .withHours(Arrays.asList(999351737, 732651337)) - .withWeekDays( - Arrays.asList(DaysOfWeek.TUESDAY, DaysOfWeek.THURSDAY, DaysOfWeek.THURSDAY, DaysOfWeek.TUESDAY)) - .withMonthDays(Arrays.asList(466726136, 431958697, 2021158859, 959950633)) - .withMonthlyOccurrences(Arrays.asList( - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.THURSDAY).withOccurrence(104057412) - .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.TUESDAY).withOccurrence(1036046655) - .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.THURSDAY).withOccurrence(1417547783) - .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.FRIDAY).withOccurrence(10999242) - .withAdditionalProperties(mapOf()))) - .withAdditionalProperties(mapOf())) + .withPipelineReference( + new PipelineReference().withReferenceName("dqlzggvocxi").withName("mrdbqujy")) + .withParameters(mapOf("mntlbfknxzc", "dataciaznpsvgupqwqs", "ccpbtvgiokz", "datauvjbfryortbres", + "dwawomkzussgjmub", "datatpvs", "ekzcmfibbozkp", "datagjdluwbmwuj")), + new TriggerPipelineReference() + .withPipelineReference(new PipelineReference().withReferenceName("v").withName("ymswfwckucs")) + .withParameters(mapOf("vvomcjpjrxvsgga", "datafhqxhtcohrhw")))) + .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.DAY) + .withInterval(862096390) + .withStartTime(OffsetDateTime.parse("2021-01-25T07:32:55Z")) + .withEndTime(OffsetDateTime.parse("2021-10-08T06:46:47Z")) + .withTimeZone("lbqdxvxdfkdwk") + .withSchedule( + new RecurrenceSchedule().withMinutes(Arrays.asList(248937577, 815850992, 462109108, 208237939)) + .withHours(Arrays.asList(239856664, 1272427932, 1347880889, 1400698641)) + .withWeekDays(Arrays.asList(DaysOfWeek.THURSDAY, DaysOfWeek.WEDNESDAY, DaysOfWeek.TUESDAY)) + .withMonthDays(Arrays.asList(333698935)) + .withMonthlyOccurrences(Arrays.asList( + new RecurrenceScheduleOccurrence().withDay(DayOfWeek.SUNDAY) + .withOccurrence(1064820707) + .withAdditionalProperties(mapOf()), + new RecurrenceScheduleOccurrence().withDay(DayOfWeek.MONDAY) + .withOccurrence(1818860634) + .withAdditionalProperties(mapOf()), + new RecurrenceScheduleOccurrence().withDay(DayOfWeek.SATURDAY) + .withOccurrence(158512843) + .withAdditionalProperties(mapOf()))) + .withAdditionalProperties(mapOf())) .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(ScheduleTrigger.class); - Assertions.assertEquals("heelmiuprfqyrwtd", model.description()); - Assertions.assertEquals("dcavvj", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("nnhafed", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals(RecurrenceFrequency.MINUTE, model.recurrence().frequency()); - Assertions.assertEquals(1427518805, model.recurrence().interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-04-27T23:49:15Z"), model.recurrence().startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-06-28T00:05:17Z"), model.recurrence().endTime()); - Assertions.assertEquals("rnb", model.recurrence().timeZone()); - Assertions.assertEquals(1119797804, model.recurrence().schedule().minutes().get(0)); - Assertions.assertEquals(999351737, model.recurrence().schedule().hours().get(0)); - Assertions.assertEquals(DaysOfWeek.TUESDAY, model.recurrence().schedule().weekDays().get(0)); - Assertions.assertEquals(466726136, model.recurrence().schedule().monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.THURSDAY, model.recurrence().schedule().monthlyOccurrences().get(0).day()); - Assertions.assertEquals(104057412, model.recurrence().schedule().monthlyOccurrences().get(0).occurrence()); + Assertions.assertEquals("oenpihtgigaeeqg", model.description()); + Assertions.assertEquals("ts", model.pipelines().get(0).pipelineReference().referenceName()); + Assertions.assertEquals("agvq", model.pipelines().get(0).pipelineReference().name()); + Assertions.assertEquals(RecurrenceFrequency.DAY, model.recurrence().frequency()); + Assertions.assertEquals(862096390, model.recurrence().interval()); + Assertions.assertEquals(OffsetDateTime.parse("2021-01-25T07:32:55Z"), model.recurrence().startTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-10-08T06:46:47Z"), model.recurrence().endTime()); + Assertions.assertEquals("lbqdxvxdfkdwk", model.recurrence().timeZone()); + Assertions.assertEquals(248937577, model.recurrence().schedule().minutes().get(0)); + Assertions.assertEquals(239856664, model.recurrence().schedule().hours().get(0)); + Assertions.assertEquals(DaysOfWeek.THURSDAY, model.recurrence().schedule().weekDays().get(0)); + Assertions.assertEquals(333698935, model.recurrence().schedule().monthDays().get(0)); + Assertions.assertEquals(DayOfWeek.SUNDAY, model.recurrence().schedule().monthlyOccurrences().get(0).day()); + Assertions.assertEquals(1064820707, model.recurrence().schedule().monthlyOccurrences().get(0).occurrence()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTypePropertiesTests.java index fcfffe4dd1ff9..a0c045802f69f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTypePropertiesTests.java @@ -22,54 +22,60 @@ public final class ScheduleTriggerTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ScheduleTriggerTypeProperties model = BinaryData.fromString( - "{\"recurrence\":{\"frequency\":\"NotSpecified\",\"interval\":1095256215,\"startTime\":\"2021-02-27T12:11:30Z\",\"endTime\":\"2021-02-18T05:00:01Z\",\"timeZone\":\"vlsspptxdraji\",\"schedule\":{\"minutes\":[1667593447,1271402426,1565030338,906533139],\"hours\":[1448334718],\"weekDays\":[\"Friday\",\"Friday\"],\"monthDays\":[605430402,547584072],\"monthlyOccurrences\":[{\"day\":\"Thursday\",\"occurrence\":1741967093,\"\":{\"yamijgquizvvwyhs\":\"databrent\",\"iwyt\":\"dataewhzaiuoibw\",\"r\":\"datakp\"}},{\"day\":\"Friday\",\"occurrence\":1418940268,\"\":{\"ttiqac\":\"datanxlhdind\",\"jli\":\"datay\",\"hcylvjzufznaed\":\"datagkxrevwvjwtf\",\"dphyxlxvo\":\"datayuxrufwdbimj\"}},{\"day\":\"Wednesday\",\"occurrence\":1510978008,\"\":{\"pcbn\":\"datayttkaufab\"}}],\"\":{\"hllw\":\"datankvsnsi\",\"xtzgxdxq\":\"dataedzodvz\"}},\"\":{\"tuimi\":\"datazub\",\"encgfz\":\"dataccnubynr\",\"hxtbcqjvyzotxkhy\":\"databtzuddqt\"}}}") + "{\"recurrence\":{\"frequency\":\"NotSpecified\",\"interval\":702696908,\"startTime\":\"2021-12-10T14:45:18Z\",\"endTime\":\"2021-02-14T10:58:07Z\",\"timeZone\":\"jlfnjmwbtoqhy\",\"schedule\":{\"minutes\":[686607889,35659933],\"hours\":[972859584],\"weekDays\":[\"Saturday\"],\"monthDays\":[240675793,94288841,1033539808,1637467805],\"monthlyOccurrences\":[{\"day\":\"Tuesday\",\"occurrence\":783846634,\"\":{\"ajqzigqfeaqnbkc\":\"datagitjn\",\"zhczyhtjqtzlf\":\"dataoyqmbupbf\",\"nceowv\":\"dataqp\",\"cvmruxebsl\":\"dataqzxluozmedaqpqu\"}},{\"day\":\"Wednesday\",\"occurrence\":2066615687,\"\":{\"fefyggbacmn\":\"datactiso\",\"dxfstmbb\":\"datazjjrh\"}},{\"day\":\"Saturday\",\"occurrence\":1852769685,\"\":{\"cvksvflur\":\"dataty\",\"rohyecblvpwu\":\"datafnlhlfv\"}},{\"day\":\"Wednesday\",\"occurrence\":1090628733,\"\":{\"mgvipzvvrfplkemv\":\"datahmftshgcrcx\",\"wplyv\":\"datalgezyishipl\",\"pemcf\":\"datao\",\"zopwud\":\"dataxkifjvil\"}}],\"\":{\"gephqdoc\":\"databxaufowhmdpggakt\",\"ibbkly\":\"datagnomqwfwtwjzzy\",\"sfjwty\":\"dataelvhxutctakkdjus\",\"twgdlfgmuojnikwz\":\"datavknbucjy\"}},\"\":{\"zft\":\"datausaddmj\",\"xayswvpavu\":\"datafmzlgjce\",\"yclehagb\":\"dataisz\"}}}") .toObject(ScheduleTriggerTypeProperties.class); Assertions.assertEquals(RecurrenceFrequency.NOT_SPECIFIED, model.recurrence().frequency()); - Assertions.assertEquals(1095256215, model.recurrence().interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-02-27T12:11:30Z"), model.recurrence().startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-02-18T05:00:01Z"), model.recurrence().endTime()); - Assertions.assertEquals("vlsspptxdraji", model.recurrence().timeZone()); - Assertions.assertEquals(1667593447, model.recurrence().schedule().minutes().get(0)); - Assertions.assertEquals(1448334718, model.recurrence().schedule().hours().get(0)); - Assertions.assertEquals(DaysOfWeek.FRIDAY, model.recurrence().schedule().weekDays().get(0)); - Assertions.assertEquals(605430402, model.recurrence().schedule().monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.THURSDAY, model.recurrence().schedule().monthlyOccurrences().get(0).day()); - Assertions.assertEquals(1741967093, model.recurrence().schedule().monthlyOccurrences().get(0).occurrence()); + Assertions.assertEquals(702696908, model.recurrence().interval()); + Assertions.assertEquals(OffsetDateTime.parse("2021-12-10T14:45:18Z"), model.recurrence().startTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-02-14T10:58:07Z"), model.recurrence().endTime()); + Assertions.assertEquals("jlfnjmwbtoqhy", model.recurrence().timeZone()); + Assertions.assertEquals(686607889, model.recurrence().schedule().minutes().get(0)); + Assertions.assertEquals(972859584, model.recurrence().schedule().hours().get(0)); + Assertions.assertEquals(DaysOfWeek.SATURDAY, model.recurrence().schedule().weekDays().get(0)); + Assertions.assertEquals(240675793, model.recurrence().schedule().monthDays().get(0)); + Assertions.assertEquals(DayOfWeek.TUESDAY, model.recurrence().schedule().monthlyOccurrences().get(0).day()); + Assertions.assertEquals(783846634, model.recurrence().schedule().monthlyOccurrences().get(0).occurrence()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ScheduleTriggerTypeProperties model - = new ScheduleTriggerTypeProperties() - .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.NOT_SPECIFIED) - .withInterval(1095256215).withStartTime(OffsetDateTime.parse("2021-02-27T12:11:30Z")) - .withEndTime(OffsetDateTime.parse("2021-02-18T05:00:01Z")).withTimeZone("vlsspptxdraji") - .withSchedule(new RecurrenceSchedule() - .withMinutes(Arrays.asList(1667593447, 1271402426, 1565030338, 906533139)) - .withHours(Arrays.asList(1448334718)) - .withWeekDays(Arrays.asList(DaysOfWeek.FRIDAY, DaysOfWeek.FRIDAY)) - .withMonthDays(Arrays.asList(605430402, 547584072)) - .withMonthlyOccurrences(Arrays.asList( - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.THURSDAY).withOccurrence(1741967093) - .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.FRIDAY).withOccurrence(1418940268) - .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.WEDNESDAY).withOccurrence(1510978008) - .withAdditionalProperties(mapOf()))) - .withAdditionalProperties(mapOf())) - .withAdditionalProperties(mapOf())); + ScheduleTriggerTypeProperties model = new ScheduleTriggerTypeProperties() + .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.NOT_SPECIFIED) + .withInterval(702696908) + .withStartTime(OffsetDateTime.parse("2021-12-10T14:45:18Z")) + .withEndTime(OffsetDateTime.parse("2021-02-14T10:58:07Z")) + .withTimeZone("jlfnjmwbtoqhy") + .withSchedule(new RecurrenceSchedule().withMinutes(Arrays.asList(686607889, 35659933)) + .withHours(Arrays.asList(972859584)) + .withWeekDays(Arrays.asList(DaysOfWeek.SATURDAY)) + .withMonthDays(Arrays.asList(240675793, 94288841, 1033539808, 1637467805)) + .withMonthlyOccurrences(Arrays.asList( + new RecurrenceScheduleOccurrence().withDay(DayOfWeek.TUESDAY) + .withOccurrence(783846634) + .withAdditionalProperties(mapOf()), + new RecurrenceScheduleOccurrence().withDay(DayOfWeek.WEDNESDAY) + .withOccurrence(2066615687) + .withAdditionalProperties(mapOf()), + new RecurrenceScheduleOccurrence().withDay(DayOfWeek.SATURDAY) + .withOccurrence(1852769685) + .withAdditionalProperties(mapOf()), + new RecurrenceScheduleOccurrence().withDay(DayOfWeek.WEDNESDAY) + .withOccurrence(1090628733) + .withAdditionalProperties(mapOf()))) + .withAdditionalProperties(mapOf())) + .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(ScheduleTriggerTypeProperties.class); Assertions.assertEquals(RecurrenceFrequency.NOT_SPECIFIED, model.recurrence().frequency()); - Assertions.assertEquals(1095256215, model.recurrence().interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-02-27T12:11:30Z"), model.recurrence().startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-02-18T05:00:01Z"), model.recurrence().endTime()); - Assertions.assertEquals("vlsspptxdraji", model.recurrence().timeZone()); - Assertions.assertEquals(1667593447, model.recurrence().schedule().minutes().get(0)); - Assertions.assertEquals(1448334718, model.recurrence().schedule().hours().get(0)); - Assertions.assertEquals(DaysOfWeek.FRIDAY, model.recurrence().schedule().weekDays().get(0)); - Assertions.assertEquals(605430402, model.recurrence().schedule().monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.THURSDAY, model.recurrence().schedule().monthlyOccurrences().get(0).day()); - Assertions.assertEquals(1741967093, model.recurrence().schedule().monthlyOccurrences().get(0).occurrence()); + Assertions.assertEquals(702696908, model.recurrence().interval()); + Assertions.assertEquals(OffsetDateTime.parse("2021-12-10T14:45:18Z"), model.recurrence().startTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-02-14T10:58:07Z"), model.recurrence().endTime()); + Assertions.assertEquals("jlfnjmwbtoqhy", model.recurrence().timeZone()); + Assertions.assertEquals(686607889, model.recurrence().schedule().minutes().get(0)); + Assertions.assertEquals(972859584, model.recurrence().schedule().hours().get(0)); + Assertions.assertEquals(DaysOfWeek.SATURDAY, model.recurrence().schedule().weekDays().get(0)); + Assertions.assertEquals(240675793, model.recurrence().schedule().monthDays().get(0)); + Assertions.assertEquals(DayOfWeek.TUESDAY, model.recurrence().schedule().monthlyOccurrences().get(0).day()); + Assertions.assertEquals(783846634, model.recurrence().schedule().monthlyOccurrences().get(0).occurrence()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActionTests.java index daa28eb2d6548..cb7e7f1223296 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActionTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActionTests.java @@ -12,21 +12,20 @@ public final class ScriptActionTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ScriptAction model = BinaryData - .fromString( - "{\"name\":\"kgxcewzg\",\"uri\":\"xz\",\"roles\":\"dataktcr\",\"parameters\":\"ttedzyzbvsjuths\"}") + .fromString("{\"name\":\"ljeh\",\"uri\":\"hxxuofn\",\"roles\":\"dataa\",\"parameters\":\"hzkvna\"}") .toObject(ScriptAction.class); - Assertions.assertEquals("kgxcewzg", model.name()); - Assertions.assertEquals("xz", model.uri()); - Assertions.assertEquals("ttedzyzbvsjuths", model.parameters()); + Assertions.assertEquals("ljeh", model.name()); + Assertions.assertEquals("hxxuofn", model.uri()); + Assertions.assertEquals("hzkvna", model.parameters()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ScriptAction model = new ScriptAction().withName("kgxcewzg").withUri("xz").withRoles("dataktcr") - .withParameters("ttedzyzbvsjuths"); + ScriptAction model + = new ScriptAction().withName("ljeh").withUri("hxxuofn").withRoles("dataa").withParameters("hzkvna"); model = BinaryData.fromObject(model).toObject(ScriptAction.class); - Assertions.assertEquals("kgxcewzg", model.name()); - Assertions.assertEquals("xz", model.uri()); - Assertions.assertEquals("ttedzyzbvsjuths", model.parameters()); + Assertions.assertEquals("ljeh", model.name()); + Assertions.assertEquals("hxxuofn", model.uri()); + Assertions.assertEquals("hzkvna", model.parameters()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityParameterTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityParameterTests.java index 469434ba5af84..cf06981ccdb42 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityParameterTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityParameterTests.java @@ -14,21 +14,23 @@ public final class ScriptActivityParameterTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ScriptActivityParameter model = BinaryData.fromString( - "{\"name\":\"dataj\",\"type\":\"Int64\",\"value\":\"dataiyzjdrkcsheoxss\",\"direction\":\"Output\",\"size\":1317589595}") + "{\"name\":\"datattoplx\",\"type\":\"Timespan\",\"value\":\"dataliyikcnlb\",\"direction\":\"InputOutput\",\"size\":401356233}") .toObject(ScriptActivityParameter.class); - Assertions.assertEquals(ScriptActivityParameterType.INT64, model.type()); - Assertions.assertEquals(ScriptActivityParameterDirection.OUTPUT, model.direction()); - Assertions.assertEquals(1317589595, model.size()); + Assertions.assertEquals(ScriptActivityParameterType.TIMESPAN, model.type()); + Assertions.assertEquals(ScriptActivityParameterDirection.INPUT_OUTPUT, model.direction()); + Assertions.assertEquals(401356233, model.size()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ScriptActivityParameter model = new ScriptActivityParameter().withName("dataj") - .withType(ScriptActivityParameterType.INT64).withValue("dataiyzjdrkcsheoxss") - .withDirection(ScriptActivityParameterDirection.OUTPUT).withSize(1317589595); + ScriptActivityParameter model = new ScriptActivityParameter().withName("datattoplx") + .withType(ScriptActivityParameterType.TIMESPAN) + .withValue("dataliyikcnlb") + .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT) + .withSize(401356233); model = BinaryData.fromObject(model).toObject(ScriptActivityParameter.class); - Assertions.assertEquals(ScriptActivityParameterType.INT64, model.type()); - Assertions.assertEquals(ScriptActivityParameterDirection.OUTPUT, model.direction()); - Assertions.assertEquals(1317589595, model.size()); + Assertions.assertEquals(ScriptActivityParameterType.TIMESPAN, model.type()); + Assertions.assertEquals(ScriptActivityParameterDirection.INPUT_OUTPUT, model.direction()); + Assertions.assertEquals(401356233, model.size()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityScriptBlockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityScriptBlockTests.java index c1bb7241275b7..e20c4a3e8e49e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityScriptBlockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityScriptBlockTests.java @@ -9,7 +9,6 @@ import com.azure.resourcemanager.datafactory.models.ScriptActivityParameterDirection; import com.azure.resourcemanager.datafactory.models.ScriptActivityParameterType; import com.azure.resourcemanager.datafactory.models.ScriptActivityScriptBlock; -import com.azure.resourcemanager.datafactory.models.ScriptType; import java.util.Arrays; import org.junit.jupiter.api.Assertions; @@ -17,35 +16,36 @@ public final class ScriptActivityScriptBlockTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ScriptActivityScriptBlock model = BinaryData.fromString( - "{\"text\":\"datacqaejlebcyd\",\"type\":\"Query\",\"parameters\":[{\"name\":\"dataprpwsgazdkcike\",\"type\":\"Double\",\"value\":\"dataialebc\",\"direction\":\"Output\",\"size\":2102195500},{\"name\":\"dataepjsfhxh\",\"type\":\"Guid\",\"value\":\"datakroyjd\",\"direction\":\"Output\",\"size\":2121426093},{\"name\":\"datakgthydyz\",\"type\":\"Single\",\"value\":\"dataguesoivaoryefg\",\"direction\":\"Output\",\"size\":1602176624},{\"name\":\"dataksdatjtg\",\"type\":\"Timespan\",\"value\":\"dataqvindhixddcocs\",\"direction\":\"Output\",\"size\":497633824}]}") + "{\"text\":\"datadselbcudxq\",\"type\":\"datarwwmukxk\",\"parameters\":[{\"name\":\"datafr\",\"type\":\"Int32\",\"value\":\"datat\",\"direction\":\"InputOutput\",\"size\":1691603559},{\"name\":\"datavzqi\",\"type\":\"Int64\",\"value\":\"datajodvkn\",\"direction\":\"Output\",\"size\":471985111},{\"name\":\"datahmhqucasfqod\",\"type\":\"Single\",\"value\":\"dataqfkjghlcfoa\",\"direction\":\"Output\",\"size\":754533951}]}") .toObject(ScriptActivityScriptBlock.class); - Assertions.assertEquals(ScriptType.QUERY, model.type()); - Assertions.assertEquals(ScriptActivityParameterType.DOUBLE, model.parameters().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterDirection.OUTPUT, model.parameters().get(0).direction()); - Assertions.assertEquals(2102195500, model.parameters().get(0).size()); + Assertions.assertEquals(ScriptActivityParameterType.INT32, model.parameters().get(0).type()); + Assertions.assertEquals(ScriptActivityParameterDirection.INPUT_OUTPUT, model.parameters().get(0).direction()); + Assertions.assertEquals(1691603559, model.parameters().get(0).size()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ScriptActivityScriptBlock model - = new ScriptActivityScriptBlock().withText("datacqaejlebcyd").withType(ScriptType.QUERY) - .withParameters(Arrays.asList( - new ScriptActivityParameter().withName("dataprpwsgazdkcike") - .withType(ScriptActivityParameterType.DOUBLE).withValue("dataialebc") - .withDirection(ScriptActivityParameterDirection.OUTPUT).withSize(2102195500), - new ScriptActivityParameter().withName("dataepjsfhxh").withType(ScriptActivityParameterType.GUID) - .withValue("datakroyjd").withDirection(ScriptActivityParameterDirection.OUTPUT) - .withSize(2121426093), - new ScriptActivityParameter().withName("datakgthydyz").withType(ScriptActivityParameterType.SINGLE) - .withValue("dataguesoivaoryefg").withDirection(ScriptActivityParameterDirection.OUTPUT) - .withSize(1602176624), - new ScriptActivityParameter().withName("dataksdatjtg") - .withType(ScriptActivityParameterType.TIMESPAN).withValue("dataqvindhixddcocs") - .withDirection(ScriptActivityParameterDirection.OUTPUT).withSize(497633824))); + ScriptActivityScriptBlock model = new ScriptActivityScriptBlock().withText("datadselbcudxq") + .withType("datarwwmukxk") + .withParameters(Arrays.asList( + new ScriptActivityParameter().withName("datafr") + .withType(ScriptActivityParameterType.INT32) + .withValue("datat") + .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT) + .withSize(1691603559), + new ScriptActivityParameter().withName("datavzqi") + .withType(ScriptActivityParameterType.INT64) + .withValue("datajodvkn") + .withDirection(ScriptActivityParameterDirection.OUTPUT) + .withSize(471985111), + new ScriptActivityParameter().withName("datahmhqucasfqod") + .withType(ScriptActivityParameterType.SINGLE) + .withValue("dataqfkjghlcfoa") + .withDirection(ScriptActivityParameterDirection.OUTPUT) + .withSize(754533951))); model = BinaryData.fromObject(model).toObject(ScriptActivityScriptBlock.class); - Assertions.assertEquals(ScriptType.QUERY, model.type()); - Assertions.assertEquals(ScriptActivityParameterType.DOUBLE, model.parameters().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterDirection.OUTPUT, model.parameters().get(0).direction()); - Assertions.assertEquals(2102195500, model.parameters().get(0).size()); + Assertions.assertEquals(ScriptActivityParameterType.INT32, model.parameters().get(0).type()); + Assertions.assertEquals(ScriptActivityParameterDirection.INPUT_OUTPUT, model.parameters().get(0).direction()); + Assertions.assertEquals(1691603559, model.parameters().get(0).size()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTests.java index 7b9bdd3059869..c40cbcbe83871 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTests.java @@ -19,7 +19,6 @@ import com.azure.resourcemanager.datafactory.models.ScriptActivityParameterType; import com.azure.resourcemanager.datafactory.models.ScriptActivityScriptBlock; import com.azure.resourcemanager.datafactory.models.ScriptActivityTypePropertiesLogSettings; -import com.azure.resourcemanager.datafactory.models.ScriptType; import com.azure.resourcemanager.datafactory.models.UserProperty; import java.util.Arrays; import java.util.HashMap; @@ -30,104 +29,78 @@ public final class ScriptActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ScriptActivity model = BinaryData.fromString( - "{\"type\":\"Script\",\"typeProperties\":{\"scriptBlockExecutionTimeout\":\"dataikiogdtde\",\"scripts\":[{\"text\":\"datazsmya\",\"type\":\"NonQuery\",\"parameters\":[{\"name\":\"datamkhkuknccdbs\",\"type\":\"String\",\"value\":\"datadmbnqyswpnog\",\"direction\":\"Input\",\"size\":2077560759},{\"name\":\"datansduwttrvg\",\"type\":\"Timespan\",\"value\":\"datatprfqttz\",\"direction\":\"Output\",\"size\":1804249174}]},{\"text\":\"datazvtfkdzqtkxiyjq\",\"type\":\"Query\",\"parameters\":[{\"name\":\"dataebdhpizk\",\"type\":\"DateTimeOffset\",\"value\":\"datalmfydiodcgwbk\",\"direction\":\"Output\",\"size\":1540037039},{\"name\":\"datazduqthqgngrf\",\"type\":\"Double\",\"value\":\"dataewftq\",\"direction\":\"Output\",\"size\":563491415},{\"name\":\"dataaqahmeskd\",\"type\":\"Decimal\",\"value\":\"databdajc\",\"direction\":\"Output\",\"size\":130816507}]}],\"logSettings\":{\"logDestination\":\"ActivityOutput\",\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"ojzjryp\",\"parameters\":{\"zst\":\"datahklcczgflo\",\"gwwrclxhveso\":\"datafwbftafrbuvw\",\"xuiod\":\"datadxmmtyumejp\",\"aujhox\":\"datab\"}},\"path\":\"datavceuyw\"}}},\"linkedServiceName\":{\"referenceName\":\"mtzlcvokvo\",\"parameters\":{\"ccnlvyhietezn\":\"datajdy\",\"zulkk\":\"datalsqymvihhgpe\",\"rysanifcfrtijz\":\"datayfiuxdadc\",\"zebqbdewepoan\":\"dataum\"}},\"policy\":{\"timeout\":\"dataspde\",\"retry\":\"datajgjdqlazune\",\"retryIntervalInSeconds\":2015861080,\"secureInput\":false,\"secureOutput\":true,\"\":{\"utjjqzqixsvo\":\"datatiswxcvw\"}},\"name\":\"bjsrisfcc\",\"description\":\"u\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"kyvu\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Succeeded\"],\"\":{\"qk\":\"dataicjmvspydtladf\",\"ue\":\"dataci\"}},{\"activity\":\"yljw\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Completed\"],\"\":{\"bacgrysjgzuzo\":\"datarvbuphbwais\",\"ydd\":\"dataupdcmpfwfdcpedu\",\"nkhgg\":\"datapfdhfp\"}}],\"userProperties\":[{\"name\":\"apsypgmmieheq\",\"value\":\"datatetnywgme\"},{\"name\":\"ihaboj\",\"value\":\"datargetnc\"},{\"name\":\"ljwjrpljkc\",\"value\":\"dataed\"}],\"\":{\"acc\":\"dataefzlwohob\",\"cnoeiqhbr\":\"datalvixf\",\"kpbrr\":\"datacgmyjmcw\",\"teblrnu\":\"databzvink\"}}") + "{\"type\":\"sb\",\"typeProperties\":{\"scriptBlockExecutionTimeout\":\"datacnwqeixyjlfobj\",\"scripts\":[{\"text\":\"datatsvnlod\",\"type\":\"datavcqowcglf\",\"parameters\":[{\"name\":\"datasyskivlz\",\"type\":\"Int32\",\"value\":\"datavlgcppns\",\"direction\":\"Output\",\"size\":1864282211}]}],\"logSettings\":{\"logDestination\":\"ExternalStore\",\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"urwgty\",\"parameters\":{\"d\":\"dataoxollc\"}},\"path\":\"datacjsktejcmh\"}}},\"linkedServiceName\":{\"referenceName\":\"eavbezzp\",\"parameters\":{\"whodfwvzxrfr\":\"datadvv\"}},\"policy\":{\"timeout\":\"datayktlofgpnswv\",\"retry\":\"dataekwpgdfpoqbekk\",\"retryIntervalInSeconds\":539669660,\"secureInput\":true,\"secureOutput\":true,\"\":{\"flgtq\":\"datawprbzfbdsnc\",\"jomnkeaiamh\":\"dataowtazqexwkkjx\",\"clzrot\":\"datajxud\",\"fkdf\":\"datairttlwuqgaaj\"}},\"name\":\"kmeeowdo\",\"description\":\"jaqfebtvnsk\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"tsy\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"gomdqght\":\"datajcbzijnykf\",\"kkdb\":\"dataweltnev\",\"lieyyfqhndj\":\"datagurnxqiwgrj\",\"hsgpym\":\"datayovuyxccrajx\"}}],\"userProperties\":[{\"name\":\"sdj\",\"value\":\"datarlpsj\"},{\"name\":\"nnuqszy\",\"value\":\"dataoiufrqsmjgddbunx\"}],\"\":{\"whacurmmbun\":\"datataqsfphg\"}}") .toObject(ScriptActivity.class); - Assertions.assertEquals("bjsrisfcc", model.name()); - Assertions.assertEquals("u", model.description()); + Assertions.assertEquals("kmeeowdo", model.name()); + Assertions.assertEquals("jaqfebtvnsk", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("kyvu", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("apsypgmmieheq", model.userProperties().get(0).name()); - Assertions.assertEquals("mtzlcvokvo", model.linkedServiceName().referenceName()); - Assertions.assertEquals(2015861080, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals("tsy", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("sdj", model.userProperties().get(0).name()); + Assertions.assertEquals("eavbezzp", model.linkedServiceName().referenceName()); + Assertions.assertEquals(539669660, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals(ScriptType.NON_QUERY, model.scripts().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterType.STRING, model.scripts().get(0).parameters().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterDirection.INPUT, + Assertions.assertEquals(ScriptActivityParameterType.INT32, model.scripts().get(0).parameters().get(0).type()); + Assertions.assertEquals(ScriptActivityParameterDirection.OUTPUT, model.scripts().get(0).parameters().get(0).direction()); - Assertions.assertEquals(2077560759, model.scripts().get(0).parameters().get(0).size()); - Assertions.assertEquals(ScriptActivityLogDestination.ACTIVITY_OUTPUT, model.logSettings().logDestination()); - Assertions.assertEquals("ojzjryp", + Assertions.assertEquals(1864282211, model.scripts().get(0).parameters().get(0).size()); + Assertions.assertEquals(ScriptActivityLogDestination.EXTERNAL_STORE, model.logSettings().logDestination()); + Assertions.assertEquals("urwgty", model.logSettings().logLocationSettings().linkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ScriptActivity model = new ScriptActivity().withName("bjsrisfcc").withDescription("u") - .withState(ActivityState.INACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("kyvu") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("yljw") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("apsypgmmieheq").withValue("datatetnywgme"), - new UserProperty().withName("ihaboj").withValue("datargetnc"), - new UserProperty().withName("ljwjrpljkc").withValue("dataed"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("mtzlcvokvo") - .withParameters(mapOf("ccnlvyhietezn", "datajdy", "zulkk", "datalsqymvihhgpe", "rysanifcfrtijz", - "datayfiuxdadc", "zebqbdewepoan", "dataum"))) - .withPolicy( - new ActivityPolicy().withTimeout("dataspde").withRetry("datajgjdqlazune") - .withRetryIntervalInSeconds(2015861080).withSecureInput( - false) - .withSecureOutput(true).withAdditionalProperties(mapOf())) - .withScriptBlockExecutionTimeout( - "dataikiogdtde") - .withScripts( - Arrays - .asList(new ScriptActivityScriptBlock().withText("datazsmya").withType(ScriptType.NON_QUERY) - .withParameters(Arrays.asList( - new ScriptActivityParameter().withName("datamkhkuknccdbs") - .withType(ScriptActivityParameterType.STRING).withValue("datadmbnqyswpnog") - .withDirection(ScriptActivityParameterDirection.INPUT).withSize(2077560759), - new ScriptActivityParameter().withName("datansduwttrvg") - .withType(ScriptActivityParameterType.TIMESPAN).withValue("datatprfqttz").withDirection( - ScriptActivityParameterDirection.OUTPUT) - .withSize(1804249174))), - new ScriptActivityScriptBlock().withText("datazvtfkdzqtkxiyjq").withType(ScriptType.QUERY) - .withParameters(Arrays.asList(new ScriptActivityParameter().withName("dataebdhpizk") - .withType(ScriptActivityParameterType.DATE_TIME_OFFSET).withValue("datalmfydiodcgwbk") - .withDirection(ScriptActivityParameterDirection.OUTPUT).withSize(1540037039), - new ScriptActivityParameter().withName("datazduqthqgngrf") - .withType(ScriptActivityParameterType.DOUBLE).withValue("dataewftq") - .withDirection(ScriptActivityParameterDirection.OUTPUT).withSize(563491415), - new ScriptActivityParameter().withName("dataaqahmeskd") - .withType(ScriptActivityParameterType.DECIMAL).withValue("databdajc") - .withDirection(ScriptActivityParameterDirection.OUTPUT).withSize(130816507))))) - .withLogSettings( - new ScriptActivityTypePropertiesLogSettings() - .withLogDestination( - ScriptActivityLogDestination.ACTIVITY_OUTPUT) - .withLogLocationSettings(new LogLocationSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ojzjryp") - .withParameters(mapOf("zst", "datahklcczgflo", "gwwrclxhveso", "datafwbftafrbuvw", "xuiod", - "datadxmmtyumejp", "aujhox", "datab"))) - .withPath("datavceuyw"))); + ScriptActivity model = new ScriptActivity().withName("kmeeowdo") + .withDescription("jaqfebtvnsk") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("tsy") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("sdj").withValue("datarlpsj"), + new UserProperty().withName("nnuqszy").withValue("dataoiufrqsmjgddbunx"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("eavbezzp") + .withParameters(mapOf("whodfwvzxrfr", "datadvv"))) + .withPolicy(new ActivityPolicy().withTimeout("datayktlofgpnswv") + .withRetry("dataekwpgdfpoqbekk") + .withRetryIntervalInSeconds(539669660) + .withSecureInput(true) + .withSecureOutput(true) + .withAdditionalProperties(mapOf())) + .withScriptBlockExecutionTimeout("datacnwqeixyjlfobj") + .withScripts(Arrays.asList(new ScriptActivityScriptBlock().withText("datatsvnlod") + .withType("datavcqowcglf") + .withParameters(Arrays.asList(new ScriptActivityParameter().withName("datasyskivlz") + .withType(ScriptActivityParameterType.INT32) + .withValue("datavlgcppns") + .withDirection(ScriptActivityParameterDirection.OUTPUT) + .withSize(1864282211))))) + .withLogSettings(new ScriptActivityTypePropertiesLogSettings() + .withLogDestination(ScriptActivityLogDestination.EXTERNAL_STORE) + .withLogLocationSettings(new LogLocationSettings().withLinkedServiceName( + new LinkedServiceReference().withReferenceName("urwgty").withParameters(mapOf("d", "dataoxollc"))) + .withPath("datacjsktejcmh"))); model = BinaryData.fromObject(model).toObject(ScriptActivity.class); - Assertions.assertEquals("bjsrisfcc", model.name()); - Assertions.assertEquals("u", model.description()); + Assertions.assertEquals("kmeeowdo", model.name()); + Assertions.assertEquals("jaqfebtvnsk", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("kyvu", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("apsypgmmieheq", model.userProperties().get(0).name()); - Assertions.assertEquals("mtzlcvokvo", model.linkedServiceName().referenceName()); - Assertions.assertEquals(2015861080, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); + Assertions.assertEquals("tsy", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("sdj", model.userProperties().get(0).name()); + Assertions.assertEquals("eavbezzp", model.linkedServiceName().referenceName()); + Assertions.assertEquals(539669660, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals(ScriptType.NON_QUERY, model.scripts().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterType.STRING, model.scripts().get(0).parameters().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterDirection.INPUT, + Assertions.assertEquals(ScriptActivityParameterType.INT32, model.scripts().get(0).parameters().get(0).type()); + Assertions.assertEquals(ScriptActivityParameterDirection.OUTPUT, model.scripts().get(0).parameters().get(0).direction()); - Assertions.assertEquals(2077560759, model.scripts().get(0).parameters().get(0).size()); - Assertions.assertEquals(ScriptActivityLogDestination.ACTIVITY_OUTPUT, model.logSettings().logDestination()); - Assertions.assertEquals("ojzjryp", + Assertions.assertEquals(1864282211, model.scripts().get(0).parameters().get(0).size()); + Assertions.assertEquals(ScriptActivityLogDestination.EXTERNAL_STORE, model.logSettings().logDestination()); + Assertions.assertEquals("urwgty", model.logSettings().logLocationSettings().linkedServiceName().referenceName()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesLogSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesLogSettingsTests.java index b222078c3fc67..6b0a7f253a6e5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesLogSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesLogSettingsTests.java @@ -17,23 +17,26 @@ public final class ScriptActivityTypePropertiesLogSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ScriptActivityTypePropertiesLogSettings model = BinaryData.fromString( - "{\"logDestination\":\"ActivityOutput\",\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"yyxhzgxkwc\",\"parameters\":{\"fylbokbriy\":\"datarrmlkrroqsdvxdd\",\"buravswnnsb\":\"datarxae\"}},\"path\":\"dataum\"}}") + "{\"logDestination\":\"ExternalStore\",\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"jqysfe\",\"parameters\":{\"hfgafznzemisqun\":\"dataiogwckvoxl\",\"uvtbptdeumlfszx\":\"datawosanchr\",\"gp\":\"datarabknkeo\",\"wsnccmunh\":\"datadcrnubntowohtu\"}},\"path\":\"datacsgczv\"}}") .toObject(ScriptActivityTypePropertiesLogSettings.class); - Assertions.assertEquals(ScriptActivityLogDestination.ACTIVITY_OUTPUT, model.logDestination()); - Assertions.assertEquals("yyxhzgxkwc", model.logLocationSettings().linkedServiceName().referenceName()); + Assertions.assertEquals(ScriptActivityLogDestination.EXTERNAL_STORE, model.logDestination()); + Assertions.assertEquals("jqysfe", model.logLocationSettings().linkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ScriptActivityTypePropertiesLogSettings model = new ScriptActivityTypePropertiesLogSettings() - .withLogDestination(ScriptActivityLogDestination.ACTIVITY_OUTPUT) - .withLogLocationSettings(new LogLocationSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("yyxhzgxkwc") - .withParameters(mapOf("fylbokbriy", "datarrmlkrroqsdvxdd", "buravswnnsb", "datarxae"))) - .withPath("dataum")); + ScriptActivityTypePropertiesLogSettings model + = new ScriptActivityTypePropertiesLogSettings() + .withLogDestination(ScriptActivityLogDestination.EXTERNAL_STORE) + .withLogLocationSettings( + new LogLocationSettings() + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("jqysfe") + .withParameters(mapOf("hfgafznzemisqun", "dataiogwckvoxl", "uvtbptdeumlfszx", + "datawosanchr", "gp", "datarabknkeo", "wsnccmunh", "datadcrnubntowohtu"))) + .withPath("datacsgczv")); model = BinaryData.fromObject(model).toObject(ScriptActivityTypePropertiesLogSettings.class); - Assertions.assertEquals(ScriptActivityLogDestination.ACTIVITY_OUTPUT, model.logDestination()); - Assertions.assertEquals("yyxhzgxkwc", model.logLocationSettings().linkedServiceName().referenceName()); + Assertions.assertEquals(ScriptActivityLogDestination.EXTERNAL_STORE, model.logDestination()); + Assertions.assertEquals("jqysfe", model.logLocationSettings().linkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesTests.java index e550e2a4a3b0d..7f81dc79594d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesTests.java @@ -14,7 +14,6 @@ import com.azure.resourcemanager.datafactory.models.ScriptActivityParameterType; import com.azure.resourcemanager.datafactory.models.ScriptActivityScriptBlock; import com.azure.resourcemanager.datafactory.models.ScriptActivityTypePropertiesLogSettings; -import com.azure.resourcemanager.datafactory.models.ScriptType; import java.util.Arrays; import java.util.HashMap; import java.util.Map; @@ -24,51 +23,86 @@ public final class ScriptActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ScriptActivityTypeProperties model = BinaryData.fromString( - "{\"scriptBlockExecutionTimeout\":\"dataandmusud\",\"scripts\":[{\"text\":\"datas\",\"type\":\"Query\",\"parameters\":[{\"name\":\"datatcpffmi\",\"type\":\"Timespan\",\"value\":\"datafbeott\",\"direction\":\"InputOutput\",\"size\":1855489224}]},{\"text\":\"dataejpjzqbdutvnlowv\",\"type\":\"Query\",\"parameters\":[{\"name\":\"datah\",\"type\":\"Boolean\",\"value\":\"databdqobngjbeihcaxk\",\"direction\":\"Output\",\"size\":1878476180},{\"name\":\"datacxnnirnfuvesmepq\",\"type\":\"Int64\",\"value\":\"dataptsvn\",\"direction\":\"InputOutput\",\"size\":473285132}]}],\"logSettings\":{\"logDestination\":\"ActivityOutput\",\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"fmwbte\",\"parameters\":{\"cyvmsduodpmtisg\":\"datamndt\"}},\"path\":\"datae\"}}}") + "{\"scriptBlockExecutionTimeout\":\"datalivvnyzc\",\"scripts\":[{\"text\":\"dataisuhareqyiadvv\",\"type\":\"datandfyelpnlpnyyux\",\"parameters\":[{\"name\":\"datafxsdntukoss\",\"type\":\"Decimal\",\"value\":\"datavuygeclyrtoshk\",\"direction\":\"Output\",\"size\":923591034},{\"name\":\"datazdnkgpvbvic\",\"type\":\"Decimal\",\"value\":\"databv\",\"direction\":\"Output\",\"size\":1502886890}]},{\"text\":\"datajghdfus\",\"type\":\"dataho\",\"parameters\":[{\"name\":\"dataynnm\",\"type\":\"Timespan\",\"value\":\"dataiighpxxwbetmqugo\",\"direction\":\"Output\",\"size\":1497199399}]},{\"text\":\"datalrbsfqrgjejabqv\",\"type\":\"dataohiqyazpxlyabj\",\"parameters\":[{\"name\":\"datasjfwurhkuxp\",\"type\":\"Guid\",\"value\":\"databgwgmygln\",\"direction\":\"Input\",\"size\":1386262224},{\"name\":\"datadsygdzzufr\",\"type\":\"Boolean\",\"value\":\"datawdglmfs\",\"direction\":\"InputOutput\",\"size\":45666387},{\"name\":\"datazlt\",\"type\":\"Boolean\",\"value\":\"datakfuovkgqtz\",\"direction\":\"InputOutput\",\"size\":988525427}]},{\"text\":\"datatz\",\"type\":\"datawoqmpgvx\",\"parameters\":[{\"name\":\"datasvykthxudowjwlte\",\"type\":\"Int64\",\"value\":\"datadcohsqufsyihsnz\",\"direction\":\"Input\",\"size\":1073338575},{\"name\":\"datag\",\"type\":\"DateTimeOffset\",\"value\":\"dataxjzalhun\",\"direction\":\"InputOutput\",\"size\":359226388}]}],\"logSettings\":{\"logDestination\":\"ActivityOutput\",\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"kshaulltvlylboq\",\"parameters\":{\"l\":\"dataxxgxncoaiy\",\"xakuq\":\"datavmdjzytao\"}},\"path\":\"dataiohanxlioj\"}}}") .toObject(ScriptActivityTypeProperties.class); - Assertions.assertEquals(ScriptType.QUERY, model.scripts().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterType.TIMESPAN, - model.scripts().get(0).parameters().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterDirection.INPUT_OUTPUT, + Assertions.assertEquals(ScriptActivityParameterType.DECIMAL, model.scripts().get(0).parameters().get(0).type()); + Assertions.assertEquals(ScriptActivityParameterDirection.OUTPUT, model.scripts().get(0).parameters().get(0).direction()); - Assertions.assertEquals(1855489224, model.scripts().get(0).parameters().get(0).size()); + Assertions.assertEquals(923591034, model.scripts().get(0).parameters().get(0).size()); Assertions.assertEquals(ScriptActivityLogDestination.ACTIVITY_OUTPUT, model.logSettings().logDestination()); - Assertions.assertEquals("fmwbte", + Assertions.assertEquals("kshaulltvlylboq", model.logSettings().logLocationSettings().linkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ScriptActivityTypeProperties model - = new ScriptActivityTypeProperties().withScriptBlockExecutionTimeout("dataandmusud") + = new ScriptActivityTypeProperties().withScriptBlockExecutionTimeout("datalivvnyzc") .withScripts(Arrays.asList( - new ScriptActivityScriptBlock().withText("datas").withType(ScriptType.QUERY) - .withParameters(Arrays.asList(new ScriptActivityParameter().withName("datatcpffmi") - .withType(ScriptActivityParameterType.TIMESPAN).withValue("datafbeott") - .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT).withSize(1855489224))), - new ScriptActivityScriptBlock().withText("dataejpjzqbdutvnlowv").withType(ScriptType.QUERY) + new ScriptActivityScriptBlock().withText("dataisuhareqyiadvv") + .withType("datandfyelpnlpnyyux") .withParameters(Arrays.asList( - new ScriptActivityParameter().withName("datah") - .withType(ScriptActivityParameterType.BOOLEAN).withValue("databdqobngjbeihcaxk") - .withDirection(ScriptActivityParameterDirection.OUTPUT).withSize(1878476180), - new ScriptActivityParameter().withName("datacxnnirnfuvesmepq") - .withType(ScriptActivityParameterType.INT64).withValue("dataptsvn") - .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT).withSize(473285132))))) + new ScriptActivityParameter().withName("datafxsdntukoss") + .withType(ScriptActivityParameterType.DECIMAL) + .withValue("datavuygeclyrtoshk") + .withDirection(ScriptActivityParameterDirection.OUTPUT) + .withSize(923591034), + new ScriptActivityParameter().withName("datazdnkgpvbvic") + .withType(ScriptActivityParameterType.DECIMAL) + .withValue("databv") + .withDirection(ScriptActivityParameterDirection.OUTPUT) + .withSize(1502886890))), + new ScriptActivityScriptBlock().withText("datajghdfus") + .withType("dataho") + .withParameters(Arrays.asList(new ScriptActivityParameter().withName("dataynnm") + .withType(ScriptActivityParameterType.TIMESPAN) + .withValue("dataiighpxxwbetmqugo") + .withDirection(ScriptActivityParameterDirection.OUTPUT) + .withSize(1497199399))), + new ScriptActivityScriptBlock().withText("datalrbsfqrgjejabqv") + .withType("dataohiqyazpxlyabj") + .withParameters(Arrays.asList( + new ScriptActivityParameter().withName("datasjfwurhkuxp") + .withType(ScriptActivityParameterType.GUID) + .withValue("databgwgmygln") + .withDirection(ScriptActivityParameterDirection.INPUT) + .withSize(1386262224), + new ScriptActivityParameter().withName("datadsygdzzufr") + .withType(ScriptActivityParameterType.BOOLEAN) + .withValue("datawdglmfs") + .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT) + .withSize(45666387), + new ScriptActivityParameter().withName("datazlt") + .withType(ScriptActivityParameterType.BOOLEAN) + .withValue("datakfuovkgqtz") + .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT) + .withSize(988525427))), + new ScriptActivityScriptBlock().withText("datatz") + .withType("datawoqmpgvx") + .withParameters(Arrays.asList( + new ScriptActivityParameter().withName("datasvykthxudowjwlte") + .withType(ScriptActivityParameterType.INT64) + .withValue("datadcohsqufsyihsnz") + .withDirection(ScriptActivityParameterDirection.INPUT) + .withSize(1073338575), + new ScriptActivityParameter().withName("datag") + .withType(ScriptActivityParameterType.DATE_TIME_OFFSET) + .withValue("dataxjzalhun") + .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT) + .withSize(359226388))))) .withLogSettings(new ScriptActivityTypePropertiesLogSettings() .withLogDestination(ScriptActivityLogDestination.ACTIVITY_OUTPUT) .withLogLocationSettings(new LogLocationSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("fmwbte") - .withParameters(mapOf("cyvmsduodpmtisg", "datamndt"))) - .withPath("datae"))); + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("kshaulltvlylboq") + .withParameters(mapOf("l", "dataxxgxncoaiy", "xakuq", "datavmdjzytao"))) + .withPath("dataiohanxlioj"))); model = BinaryData.fromObject(model).toObject(ScriptActivityTypeProperties.class); - Assertions.assertEquals(ScriptType.QUERY, model.scripts().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterType.TIMESPAN, - model.scripts().get(0).parameters().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterDirection.INPUT_OUTPUT, + Assertions.assertEquals(ScriptActivityParameterType.DECIMAL, model.scripts().get(0).parameters().get(0).type()); + Assertions.assertEquals(ScriptActivityParameterDirection.OUTPUT, model.scripts().get(0).parameters().get(0).direction()); - Assertions.assertEquals(1855489224, model.scripts().get(0).parameters().get(0).size()); + Assertions.assertEquals(923591034, model.scripts().get(0).parameters().get(0).size()); Assertions.assertEquals(ScriptActivityLogDestination.ACTIVITY_OUTPUT, model.logSettings().logDestination()); - Assertions.assertEquals("fmwbte", + Assertions.assertEquals("kshaulltvlylboq", model.logSettings().logLocationSettings().linkedServiceName().referenceName()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecretBaseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecretBaseTests.java index e1268f0b1eae5..1f1f8a129b640 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecretBaseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecretBaseTests.java @@ -10,7 +10,7 @@ public final class SecretBaseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SecretBase model = BinaryData.fromString("{\"type\":\"SecretBase\"}").toObject(SecretBase.class); + SecretBase model = BinaryData.fromString("{\"type\":\"hydwbdbfgrlp\"}").toObject(SecretBase.class); } @org.junit.jupiter.api.Test diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureInputOutputPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureInputOutputPolicyTests.java index f5c60df18322a..f48ba57c2b67f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureInputOutputPolicyTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureInputOutputPolicyTests.java @@ -11,17 +11,17 @@ public final class SecureInputOutputPolicyTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SecureInputOutputPolicy model = BinaryData.fromString("{\"secureInput\":true,\"secureOutput\":false}") + SecureInputOutputPolicy model = BinaryData.fromString("{\"secureInput\":false,\"secureOutput\":false}") .toObject(SecureInputOutputPolicy.class); - Assertions.assertEquals(true, model.secureInput()); + Assertions.assertEquals(false, model.secureInput()); Assertions.assertEquals(false, model.secureOutput()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SecureInputOutputPolicy model = new SecureInputOutputPolicy().withSecureInput(true).withSecureOutput(false); + SecureInputOutputPolicy model = new SecureInputOutputPolicy().withSecureInput(false).withSecureOutput(false); model = BinaryData.fromObject(model).toObject(SecureInputOutputPolicy.class); - Assertions.assertEquals(true, model.secureInput()); + Assertions.assertEquals(false, model.secureInput()); Assertions.assertEquals(false, model.secureOutput()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureStringTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureStringTests.java index 9a00e7e1c9776..02cfbd9c7542a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureStringTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureStringTests.java @@ -12,14 +12,14 @@ public final class SecureStringTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SecureString model - = BinaryData.fromString("{\"type\":\"SecureString\",\"value\":\"pylx\"}").toObject(SecureString.class); - Assertions.assertEquals("pylx", model.value()); + = BinaryData.fromString("{\"type\":\"qbeiv\",\"value\":\"xolriy\"}").toObject(SecureString.class); + Assertions.assertEquals("xolriy", model.value()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SecureString model = new SecureString().withValue("pylx"); + SecureString model = new SecureString().withValue("xolriy"); model = BinaryData.fromObject(model).toObject(SecureString.class); - Assertions.assertEquals("pylx", model.value()); + Assertions.assertEquals("xolriy", model.value()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfDependencyTumblingWindowTriggerReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfDependencyTumblingWindowTriggerReferenceTests.java index 3effbe53e062d..7e1a9c9276316 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfDependencyTumblingWindowTriggerReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfDependencyTumblingWindowTriggerReferenceTests.java @@ -11,19 +11,19 @@ public final class SelfDependencyTumblingWindowTriggerReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SelfDependencyTumblingWindowTriggerReference model = BinaryData.fromString( - "{\"type\":\"SelfDependencyTumblingWindowTriggerReference\",\"offset\":\"vpoyhvfcwed\",\"size\":\"eroezgibfisfmcx\"}") - .toObject(SelfDependencyTumblingWindowTriggerReference.class); - Assertions.assertEquals("vpoyhvfcwed", model.offset()); - Assertions.assertEquals("eroezgibfisfmcx", model.size()); + SelfDependencyTumblingWindowTriggerReference model + = BinaryData.fromString("{\"type\":\"q\",\"offset\":\"nvwaxmeyjimf\",\"size\":\"hcq\"}") + .toObject(SelfDependencyTumblingWindowTriggerReference.class); + Assertions.assertEquals("nvwaxmeyjimf", model.offset()); + Assertions.assertEquals("hcq", model.size()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SelfDependencyTumblingWindowTriggerReference model - = new SelfDependencyTumblingWindowTriggerReference().withOffset("vpoyhvfcwed").withSize("eroezgibfisfmcx"); + = new SelfDependencyTumblingWindowTriggerReference().withOffset("nvwaxmeyjimf").withSize("hcq"); model = BinaryData.fromObject(model).toObject(SelfDependencyTumblingWindowTriggerReference.class); - Assertions.assertEquals("vpoyhvfcwed", model.offset()); - Assertions.assertEquals("eroezgibfisfmcx", model.size()); + Assertions.assertEquals("nvwaxmeyjimf", model.offset()); + Assertions.assertEquals("hcq", model.size()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeNodeInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeNodeInnerTests.java index f7357746c5b6b..6eb3776cfd6c6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeNodeInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeNodeInnerTests.java @@ -15,23 +15,22 @@ public final class SelfHostedIntegrationRuntimeNodeInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SelfHostedIntegrationRuntimeNodeInner model = BinaryData.fromString( - "{\"nodeName\":\"e\",\"machineName\":\"sgzvahapjyzhpv\",\"hostServiceUri\":\"zcjrvxdjzlmwlx\",\"status\":\"Online\",\"capabilities\":{\"nnprn\":\"hzovawjvzunlut\",\"eilpjzuaejxdu\":\"i\",\"pwo\":\"tskzbbtdzumveek\",\"fpbsjyofdxl\":\"uh\"},\"versionStatus\":\"sd\",\"version\":\"ouwaboekqvkeln\",\"registerTime\":\"2021-02-02T03:25:54Z\",\"lastConnectTime\":\"2020-12-28T23:28:21Z\",\"expiryTime\":\"2021-07-06T07:16:14Z\",\"lastStartTime\":\"2021-03-30T06:11:19Z\",\"lastStopTime\":\"2021-08-23T09:42:03Z\",\"lastUpdateResult\":\"Fail\",\"lastStartUpdateTime\":\"2021-01-31T17:20:46Z\",\"lastEndUpdateTime\":\"2021-07-31T18:32:18Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":1744629944,\"maxConcurrentJobs\":923639125,\"\":{\"iidzyexzne\":\"dataawjoyaqcslyjp\"}}") + "{\"nodeName\":\"gmtsavjcbpwxqpsr\",\"machineName\":\"ftguv\",\"hostServiceUri\":\"uhprwmdyvxqt\",\"status\":\"InitializeFailed\",\"capabilities\":{\"bycnojvkn\":\"wroyqbexrmcq\"},\"versionStatus\":\"fqsgzvahapjy\",\"version\":\"pvgqzcjrvxdjzlm\",\"registerTime\":\"2021-03-28T06:32:39Z\",\"lastConnectTime\":\"2021-07-11T22:18:52Z\",\"expiryTime\":\"2021-11-03T14:53:04Z\",\"lastStartTime\":\"2021-11-07T19:38:17Z\",\"lastStopTime\":\"2021-02-21T21:24:09Z\",\"lastUpdateResult\":\"None\",\"lastStartUpdateTime\":\"2021-04-06T17:45:34Z\",\"lastEndUpdateTime\":\"2021-08-30T15:15:07Z\",\"isActiveDispatcher\":false,\"concurrentJobsLimit\":2116872834,\"maxConcurrentJobs\":1184758782,\"\":{\"jzuaejxdultskzbb\":\"dataprnxipeil\"}}") .toObject(SelfHostedIntegrationRuntimeNodeInner.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SelfHostedIntegrationRuntimeNodeInner model = new SelfHostedIntegrationRuntimeNodeInner() - .withAdditionalProperties(mapOf("nodeName", "e", "lastStartUpdateTime", "2021-01-31T17:20:46Z", - "lastConnectTime", "2020-12-28T23:28:21Z", "capabilities", - JacksonAdapter.createDefaultSerializerAdapter().deserialize( - "{\"nnprn\":\"hzovawjvzunlut\",\"eilpjzuaejxdu\":\"i\",\"pwo\":\"tskzbbtdzumveek\",\"fpbsjyofdxl\":\"uh\"}", - Object.class, SerializerEncoding.JSON), - "hostServiceUri", "zcjrvxdjzlmwlx", "registerTime", "2021-02-02T03:25:54Z", "maxConcurrentJobs", - 923639125, "lastStopTime", "2021-08-23T09:42:03Z", "version", "ouwaboekqvkeln", "machineName", - "sgzvahapjyzhpv", "versionStatus", "sd", "concurrentJobsLimit", 1744629944, "lastEndUpdateTime", - "2021-07-31T18:32:18Z", "expiryTime", "2021-07-06T07:16:14Z", "lastStartTime", "2021-03-30T06:11:19Z", - "lastUpdateResult", "Fail", "isActiveDispatcher", true, "status", "Online")); + .withAdditionalProperties(mapOf("nodeName", "gmtsavjcbpwxqpsr", "lastStartUpdateTime", + "2021-04-06T17:45:34Z", "lastConnectTime", "2021-07-11T22:18:52Z", "capabilities", + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize("{\"bycnojvkn\":\"wroyqbexrmcq\"}", Object.class, SerializerEncoding.JSON), + "hostServiceUri", "uhprwmdyvxqt", "registerTime", "2021-03-28T06:32:39Z", "maxConcurrentJobs", + 1184758782, "lastStopTime", "2021-02-21T21:24:09Z", "version", "pvgqzcjrvxdjzlm", "machineName", + "ftguv", "versionStatus", "fqsgzvahapjy", "concurrentJobsLimit", 2116872834, "lastEndUpdateTime", + "2021-08-30T15:15:07Z", "expiryTime", "2021-11-03T14:53:04Z", "lastStartTime", "2021-11-07T19:38:17Z", + "lastUpdateResult", "None", "isActiveDispatcher", false, "status", "InitializeFailed")); model = BinaryData.fromObject(model).toObject(SelfHostedIntegrationRuntimeNodeInner.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTests.java index ea1872e2dc90e..6215590704e70 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTests.java @@ -18,64 +18,59 @@ public final class SelfHostedIntegrationRuntimeStatusTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SelfHostedIntegrationRuntimeStatus model = BinaryData.fromString( - "{\"type\":\"SelfHosted\",\"typeProperties\":{\"createTime\":\"2021-07-01T23:20:22Z\",\"taskQueueId\":\"s\",\"internalChannelEncryption\":\"SslEncrypted\",\"version\":\"emkrhbsdgktluifi\",\"nodes\":[{\"nodeName\":\"cpenobqysbees\",\"machineName\":\"bvvaerszsufzsa\",\"hostServiceUri\":\"bric\",\"status\":\"Limited\",\"capabilities\":{\"ntzunhyyqxckdlx\":\"n\",\"fvijnu\":\"pisrdnowincbe\"},\"versionStatus\":\"fiiytqxewjsyu\",\"version\":\"zlghkvoxdp\",\"registerTime\":\"2021-05-27T22:13:23Z\",\"lastConnectTime\":\"2021-12-03T10:30:18Z\",\"expiryTime\":\"2021-08-14T12:28:51Z\",\"lastStartTime\":\"2021-09-05T12:26:50Z\",\"lastStopTime\":\"2021-10-25T10:44:43Z\",\"lastUpdateResult\":\"Succeed\",\"lastStartUpdateTime\":\"2021-10-18T15:20:22Z\",\"lastEndUpdateTime\":\"2021-02-18T11:20:26Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":1098696529,\"maxConcurrentJobs\":1386786245,\"\":{\"pwxnblzrmi\":\"datanpwdwdmuvyakrb\"}},{\"nodeName\":\"othyfjbp\",\"machineName\":\"dhfrvsi\",\"hostServiceUri\":\"wgnpcjnia\",\"status\":\"NeedRegistration\",\"capabilities\":{\"uuogdkpnm\":\"jjioq\",\"xqucnbgibkls\":\"rfuqjdeb\",\"evbfvxmtsmgkret\":\"wdkouzyvi\",\"ceulbyz\":\"ny\"},\"versionStatus\":\"xsygaoymyckd\",\"version\":\"brxbmljrjyfjl\",\"registerTime\":\"2021-08-29T14:58:42Z\",\"lastConnectTime\":\"2021-10-18T07:17:24Z\",\"expiryTime\":\"2021-03-14T00:44:48Z\",\"lastStartTime\":\"2021-04-09T05:29:06Z\",\"lastStopTime\":\"2021-08-10T05:40:55Z\",\"lastUpdateResult\":\"Succeed\",\"lastStartUpdateTime\":\"2021-08-17T06:45Z\",\"lastEndUpdateTime\":\"2021-02-26T23:11:52Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":1855197747,\"maxConcurrentJobs\":582722035,\"\":{\"csotwqtkpdcdefqo\":\"datai\",\"nddaxaogsk\":\"datarmgm\",\"fjcvmkkbpgdiwd\":\"datacmmmbipysehyybo\",\"cehzrtrgpd\":\"datayhdtiembrwqwvc\"}},{\"nodeName\":\"t\",\"machineName\":\"hyfwjfqktuzr\",\"hostServiceUri\":\"pecsdk\",\"status\":\"Online\",\"capabilities\":{\"hjlugcupcyfrhoo\":\"vttqjntvhnjp\",\"vuxyeeafdxsuwly\":\"v\",\"hj\":\"xzhgbspdx\"},\"versionStatus\":\"xkzxqomzdfa\",\"version\":\"qve\",\"registerTime\":\"2021-04-19T01:11:47Z\",\"lastConnectTime\":\"2021-01-05T03:49:30Z\",\"expiryTime\":\"2021-12-04T14:35:50Z\",\"lastStartTime\":\"2021-09-24T04:45:33Z\",\"lastStopTime\":\"2021-08-21T16:12:51Z\",\"lastUpdateResult\":\"Succeed\",\"lastStartUpdateTime\":\"2021-01-08T15:40:51Z\",\"lastEndUpdateTime\":\"2021-07-23T22:08:33Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":1876720469,\"maxConcurrentJobs\":988799296,\"\":{\"wqlbxmynslcvyn\":\"dataxapew\",\"czroddc\":\"datavwttts\",\"k\":\"dataqimodnbjmj\",\"fja\":\"databucmzkqt\"}},{\"nodeName\":\"tavc\",\"machineName\":\"godjfyplavb\",\"hostServiceUri\":\"ecedsoqwexie\",\"status\":\"Online\",\"capabilities\":{\"fadgywylavetq\":\"eikffjq\",\"tdgj\":\"vohy\"},\"versionStatus\":\"btkogfggyl\",\"version\":\"olrvwsgseqjt\",\"registerTime\":\"2021-12-09T01:57:30Z\",\"lastConnectTime\":\"2021-08-03T22:33:15Z\",\"expiryTime\":\"2021-01-29T17:19:57Z\",\"lastStartTime\":\"2021-04-07T03:22:50Z\",\"lastStopTime\":\"2021-02-27T15:10:16Z\",\"lastUpdateResult\":\"None\",\"lastStartUpdateTime\":\"2021-01-22T08:52:14Z\",\"lastEndUpdateTime\":\"2021-06-26T15:14:55Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":99010631,\"maxConcurrentJobs\":1381935048,\"\":{\"oflzuk\":\"datairdzdgv\",\"sxblmnxrxkuly\":\"datagougxpypbmz\",\"qon\":\"dataivvi\"}}],\"scheduledUpdateDate\":\"2021-11-20T22:58:39Z\",\"updateDelayOffset\":\"fozbgodywxj\",\"localTimeZoneOffset\":\"frxvlusedpnkz\",\"capabilities\":{\"vvyjehy\":\"axtmvmycvjpaxjd\",\"v\":\"nfjngoqmr\",\"xunwenbphyl\":\"bgtuhw\"},\"serviceUrls\":[\"gzrxxdusebkcfet\"],\"autoUpdate\":\"Off\",\"versionStatus\":\"terma\",\"links\":[{\"name\":\"rsnmftubqwxvsxr\",\"subscriptionId\":\"yzjlgrwjbsycukb\",\"dataFactoryName\":\"sitsxhvs\",\"dataFactoryLocation\":\"pwqieyxjkctyqst\",\"createTime\":\"2021-02-25T12:41:44Z\"},{\"name\":\"qepeftmub\",\"subscriptionId\":\"zoepeqlhbtysyiz\",\"dataFactoryName\":\"lctpqnofkwh\",\"dataFactoryLocation\":\"zwfyfdbvoo\",\"createTime\":\"2020-12-20T02:18:07Z\"},{\"name\":\"kd\",\"subscriptionId\":\"qykgjjsmvsi\",\"dataFactoryName\":\"mlmwj\",\"dataFactoryLocation\":\"m\",\"createTime\":\"2021-08-14T08:35:22Z\"},{\"name\":\"acvemmriyz\",\"subscriptionId\":\"quesxplcsinbulo\",\"dataFactoryName\":\"xhcynnmv\",\"dataFactoryLocation\":\"zvkwqqpw\",\"createTime\":\"2021-02-12T08:54:34Z\"}],\"pushedVersion\":\"jqcqyzmrtfdlgpr\",\"latestVersion\":\"jl\",\"autoUpdateETA\":\"2021-02-22T05:22Z\",\"selfContainedInteractiveAuthoringEnabled\":true},\"dataFactoryName\":\"yuwa\",\"state\":\"Stopping\",\"\":{\"nuhgy\":\"datavaidzcephn\",\"mrwpe\":\"datazkhi\",\"rjbpertjpair\":\"datai\"}}") + "{\"type\":\"SelfHosted\",\"typeProperties\":{\"createTime\":\"2021-09-03T21:24:36Z\",\"taskQueueId\":\"vopvfgu\",\"internalChannelEncryption\":\"SslEncrypted\",\"version\":\"mwdqw\",\"nodes\":[{\"nodeName\":\"wzeleqpnkq\",\"machineName\":\"rumhaeqboig\",\"hostServiceUri\":\"xatwxiyarf\",\"status\":\"Offline\",\"capabilities\":{\"ruexhgdh\":\"tidzvvndpr\",\"pfopekyzdzvhyoid\":\"kplaseyltqx\",\"vzpqrxsifixo\":\"veqyje\",\"pwpuayxtvpiul\":\"vnylfutg\"},\"versionStatus\":\"b\",\"version\":\"mjrsmkjalor\",\"registerTime\":\"2021-10-31T11:34:50Z\",\"lastConnectTime\":\"2021-07-23T11:41:20Z\",\"expiryTime\":\"2021-11-24T03:52:34Z\",\"lastStartTime\":\"2021-11-03T15:28:53Z\",\"lastStopTime\":\"2021-04-25T12:51:57Z\",\"lastUpdateResult\":\"Fail\",\"lastStartUpdateTime\":\"2021-10-12T04:43:20Z\",\"lastEndUpdateTime\":\"2021-09-26T14:51:21Z\",\"isActiveDispatcher\":false,\"concurrentJobsLimit\":674191876,\"maxConcurrentJobs\":553557202,\"\":{\"g\":\"datakkucwmj\"}},{\"nodeName\":\"twmskuqg\",\"machineName\":\"orqtr\",\"hostServiceUri\":\"eikqvaagnzbkqq\",\"status\":\"Initializing\",\"capabilities\":{\"omzvgjynuwcqu\":\"uc\"},\"versionStatus\":\"anqdmmrodpgtblm\",\"version\":\"lcbwvs\",\"registerTime\":\"2021-10-23T18:25:33Z\",\"lastConnectTime\":\"2021-09-09T10:01Z\",\"expiryTime\":\"2021-09-23T07:22:31Z\",\"lastStartTime\":\"2021-07-30T11:30:39Z\",\"lastStopTime\":\"2021-10-24T02:18:38Z\",\"lastUpdateResult\":\"Succeed\",\"lastStartUpdateTime\":\"2021-06-10T03:00:04Z\",\"lastEndUpdateTime\":\"2021-10-09T07:22:10Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":372484908,\"maxConcurrentJobs\":1239610602,\"\":{\"burbxovgjipbnngh\":\"datazcnxjvipehxl\",\"brsaeuzsojblyygf\":\"datajxwjnwgoxenhpyix\",\"teuyqtzo\":\"dataicuydrswq\",\"oqoafdvxrlrsrfrx\":\"dataaotrbnvoepht\"}},{\"nodeName\":\"mdjpspipkpcvhqa\",\"machineName\":\"casncgqui\",\"hostServiceUri\":\"qvatst\",\"status\":\"Offline\",\"capabilities\":{\"r\":\"cktjscjkexybx\",\"mbf\":\"jgptxvzhyun\"},\"versionStatus\":\"selblwmfudpntpsw\",\"version\":\"fktytrivfleqv\",\"registerTime\":\"2021-07-12T03:07:59Z\",\"lastConnectTime\":\"2021-07-09T08:56:23Z\",\"expiryTime\":\"2021-05-08T20:41:53Z\",\"lastStartTime\":\"2021-01-16T11:21:54Z\",\"lastStopTime\":\"2021-08-06T15:03:51Z\",\"lastUpdateResult\":\"Succeed\",\"lastStartUpdateTime\":\"2021-09-06T14:18:49Z\",\"lastEndUpdateTime\":\"2021-04-02T19:34:10Z\",\"isActiveDispatcher\":false,\"concurrentJobsLimit\":777618824,\"maxConcurrentJobs\":130802880,\"\":{\"bdhmxbyzfjgr\":\"datailhvtroxlybddx\"}},{\"nodeName\":\"brbmcrtvxjwpvqre\",\"machineName\":\"ui\",\"hostServiceUri\":\"lqwbrfiovpdl\",\"status\":\"Limited\",\"capabilities\":{\"sd\":\"iwerlbcetrvird\",\"yzegchtabhac\":\"wpvnloop\"},\"versionStatus\":\"fqzcklvvwvpfqt\",\"version\":\"zhmlnfvyhdhf\",\"registerTime\":\"2021-08-12T19:39:03Z\",\"lastConnectTime\":\"2021-02-09T04:53:23Z\",\"expiryTime\":\"2021-03-20T04:17:59Z\",\"lastStartTime\":\"2021-07-22T21:24:26Z\",\"lastStopTime\":\"2021-12-03T23:38:17Z\",\"lastUpdateResult\":\"Succeed\",\"lastStartUpdateTime\":\"2021-06-22T05:42:16Z\",\"lastEndUpdateTime\":\"2021-02-11T16:56:09Z\",\"isActiveDispatcher\":false,\"concurrentJobsLimit\":1715409221,\"maxConcurrentJobs\":815203127,\"\":{\"yqtmdhb\":\"dataayehfzzi\",\"bejhxopehel\":\"datatkgmwxzlphzujkqa\"}}],\"scheduledUpdateDate\":\"2021-04-21T19:03:52Z\",\"updateDelayOffset\":\"hdapxdiibjjwue\",\"localTimeZoneOffset\":\"hqyhxhuhhnr\",\"capabilities\":{\"v\":\"zfkhfnzjffajh\"},\"serviceUrls\":[\"gciedlqvpdtrls\",\"srwlfgbuu\"],\"autoUpdate\":\"Off\",\"versionStatus\":\"tps\",\"links\":[{\"name\":\"fupnypluiv\",\"subscriptionId\":\"sel\",\"dataFactoryName\":\"zrpespeb\",\"dataFactoryLocation\":\"gzx\",\"createTime\":\"2021-11-26T17:46:18Z\"},{\"name\":\"fvflyqauu\",\"subscriptionId\":\"skimntxdvlqmnwhd\",\"dataFactoryName\":\"yvpduma\",\"dataFactoryLocation\":\"p\",\"createTime\":\"2021-04-01T14:53:53Z\"}],\"pushedVersion\":\"ettnmiufpbfqml\",\"latestVersion\":\"yqyekuw\",\"autoUpdateETA\":\"2021-11-04T23:26:01Z\",\"selfContainedInteractiveAuthoringEnabled\":true},\"dataFactoryName\":\"mydogrtfwzecgbxr\",\"state\":\"Stopping\",\"\":{\"wqbvanvgpxn\":\"datajdxii\",\"ruywrxnk\":\"dataaiitn\",\"xeovbgrqwfuxn\":\"datamaebdrxjsmrsea\",\"yl\":\"dataue\"}}") .toObject(SelfHostedIntegrationRuntimeStatus.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SelfHostedIntegrationRuntimeStatus model = new SelfHostedIntegrationRuntimeStatus() - .withNodes(Arrays.asList( - new SelfHostedIntegrationRuntimeNodeInner() - .withAdditionalProperties(mapOf("nodeName", "cpenobqysbees", "lastStartUpdateTime", - "2021-10-18T15:20:22Z", "lastConnectTime", "2021-12-03T10:30:18Z", "capabilities", - JacksonAdapter - .createDefaultSerializerAdapter() - .deserialize("{\"ntzunhyyqxckdlx\":\"n\",\"fvijnu\":\"pisrdnowincbe\"}", Object.class, - SerializerEncoding.JSON), - "hostServiceUri", "bric", "registerTime", "2021-05-27T22:13:23Z", "maxConcurrentJobs", - 1386786245, "lastStopTime", "2021-10-25T10:44:43Z", "version", "zlghkvoxdp", "machineName", - "bvvaerszsufzsa", "versionStatus", "fiiytqxewjsyu", "concurrentJobsLimit", 1098696529, - "lastEndUpdateTime", "2021-02-18T11:20:26Z", "expiryTime", "2021-08-14T12:28:51Z", - "lastStartTime", "2021-09-05T12:26:50Z", "lastUpdateResult", "Succeed", "isActiveDispatcher", - true, "status", "Limited")), - new SelfHostedIntegrationRuntimeNodeInner().withAdditionalProperties(mapOf("nodeName", "othyfjbp", - "lastStartUpdateTime", "2021-08-17T06:45Z", "lastConnectTime", "2021-10-18T07:17:24Z", - "capabilities", - JacksonAdapter.createDefaultSerializerAdapter().deserialize( - "{\"uuogdkpnm\":\"jjioq\",\"xqucnbgibkls\":\"rfuqjdeb\",\"evbfvxmtsmgkret\":\"wdkouzyvi\",\"ceulbyz\":\"ny\"}", + SelfHostedIntegrationRuntimeStatus model = new SelfHostedIntegrationRuntimeStatus().withNodes(Arrays.asList( + new SelfHostedIntegrationRuntimeNodeInner().withAdditionalProperties(mapOf("nodeName", "wzeleqpnkq", + "lastStartUpdateTime", "2021-10-12T04:43:20Z", "lastConnectTime", "2021-07-23T11:41:20Z", + "capabilities", + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize( + "{\"ruexhgdh\":\"tidzvvndpr\",\"pfopekyzdzvhyoid\":\"kplaseyltqx\",\"vzpqrxsifixo\":\"veqyje\",\"pwpuayxtvpiul\":\"vnylfutg\"}", Object.class, SerializerEncoding.JSON), - "hostServiceUri", "wgnpcjnia", "registerTime", "2021-08-29T14:58:42Z", "maxConcurrentJobs", - 582722035, "lastStopTime", "2021-08-10T05:40:55Z", "version", "brxbmljrjyfjl", "machineName", - "dhfrvsi", "versionStatus", "xsygaoymyckd", "concurrentJobsLimit", 1855197747, "lastEndUpdateTime", - "2021-02-26T23:11:52Z", "expiryTime", "2021-03-14T00:44:48Z", "lastStartTime", - "2021-04-09T05:29:06Z", "lastUpdateResult", "Succeed", "isActiveDispatcher", true, "status", - "NeedRegistration")), - new SelfHostedIntegrationRuntimeNodeInner().withAdditionalProperties(mapOf("nodeName", "t", - "lastStartUpdateTime", "2021-01-08T15:40:51Z", "lastConnectTime", "2021-01-05T03:49:30Z", - "capabilities", - JacksonAdapter.createDefaultSerializerAdapter().deserialize( - "{\"hjlugcupcyfrhoo\":\"vttqjntvhnjp\",\"vuxyeeafdxsuwly\":\"v\",\"hj\":\"xzhgbspdx\"}", - Object.class, SerializerEncoding.JSON), - "hostServiceUri", "pecsdk", "registerTime", "2021-04-19T01:11:47Z", "maxConcurrentJobs", 988799296, - "lastStopTime", "2021-08-21T16:12:51Z", "version", "qve", "machineName", "hyfwjfqktuzr", - "versionStatus", "xkzxqomzdfa", "concurrentJobsLimit", 1876720469, "lastEndUpdateTime", - "2021-07-23T22:08:33Z", "expiryTime", "2021-12-04T14:35:50Z", "lastStartTime", - "2021-09-24T04:45:33Z", "lastUpdateResult", "Succeed", "isActiveDispatcher", true, "status", - "Online")), - new SelfHostedIntegrationRuntimeNodeInner() - .withAdditionalProperties(mapOf("nodeName", "tavc", "lastStartUpdateTime", "2021-01-22T08:52:14Z", - "lastConnectTime", "2021-08-03T22:33:15Z", "capabilities", - JacksonAdapter.createDefaultSerializerAdapter().deserialize( - "{\"fadgywylavetq\":\"eikffjq\",\"tdgj\":\"vohy\"}", Object.class, SerializerEncoding.JSON), - "hostServiceUri", "ecedsoqwexie", "registerTime", "2021-12-09T01:57:30Z", "maxConcurrentJobs", - 1381935048, "lastStopTime", "2021-02-27T15:10:16Z", "version", "olrvwsgseqjt", "machineName", - "godjfyplavb", "versionStatus", "btkogfggyl", "concurrentJobsLimit", 99010631, - "lastEndUpdateTime", "2021-06-26T15:14:55Z", "expiryTime", "2021-01-29T17:19:57Z", - "lastStartTime", "2021-04-07T03:22:50Z", "lastUpdateResult", "None", "isActiveDispatcher", true, - "status", "Online")))) - .withLinks(Arrays.asList(new LinkedIntegrationRuntime(), new LinkedIntegrationRuntime(), - new LinkedIntegrationRuntime(), new LinkedIntegrationRuntime())); + "hostServiceUri", "xatwxiyarf", "registerTime", "2021-10-31T11:34:50Z", "maxConcurrentJobs", 553557202, + "lastStopTime", "2021-04-25T12:51:57Z", "version", "mjrsmkjalor", "machineName", "rumhaeqboig", + "versionStatus", "b", "concurrentJobsLimit", 674191876, "lastEndUpdateTime", "2021-09-26T14:51:21Z", + "expiryTime", "2021-11-24T03:52:34Z", "lastStartTime", "2021-11-03T15:28:53Z", "lastUpdateResult", + "Fail", "isActiveDispatcher", false, "status", "Offline")), + new SelfHostedIntegrationRuntimeNodeInner().withAdditionalProperties(mapOf("nodeName", "twmskuqg", + "lastStartUpdateTime", "2021-06-10T03:00:04Z", "lastConnectTime", "2021-09-09T10:01Z", "capabilities", + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize("{\"omzvgjynuwcqu\":\"uc\"}", Object.class, SerializerEncoding.JSON), + "hostServiceUri", "eikqvaagnzbkqq", "registerTime", "2021-10-23T18:25:33Z", "maxConcurrentJobs", + 1239610602, "lastStopTime", "2021-10-24T02:18:38Z", "version", "lcbwvs", "machineName", "orqtr", + "versionStatus", "anqdmmrodpgtblm", "concurrentJobsLimit", 372484908, "lastEndUpdateTime", + "2021-10-09T07:22:10Z", "expiryTime", "2021-09-23T07:22:31Z", "lastStartTime", "2021-07-30T11:30:39Z", + "lastUpdateResult", "Succeed", "isActiveDispatcher", true, "status", "Initializing")), + new SelfHostedIntegrationRuntimeNodeInner() + .withAdditionalProperties(mapOf("nodeName", "mdjpspipkpcvhqa", "lastStartUpdateTime", + "2021-09-06T14:18:49Z", "lastConnectTime", "2021-07-09T08:56:23Z", "capabilities", + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize("{\"r\":\"cktjscjkexybx\",\"mbf\":\"jgptxvzhyun\"}", Object.class, + SerializerEncoding.JSON), + "hostServiceUri", "qvatst", "registerTime", "2021-07-12T03:07:59Z", "maxConcurrentJobs", 130802880, + "lastStopTime", "2021-08-06T15:03:51Z", "version", "fktytrivfleqv", "machineName", "casncgqui", + "versionStatus", "selblwmfudpntpsw", "concurrentJobsLimit", 777618824, "lastEndUpdateTime", + "2021-04-02T19:34:10Z", "expiryTime", "2021-05-08T20:41:53Z", "lastStartTime", + "2021-01-16T11:21:54Z", "lastUpdateResult", "Succeed", "isActiveDispatcher", false, "status", + "Offline")), + new SelfHostedIntegrationRuntimeNodeInner() + .withAdditionalProperties(mapOf("nodeName", "brbmcrtvxjwpvqre", "lastStartUpdateTime", + "2021-06-22T05:42:16Z", "lastConnectTime", "2021-02-09T04:53:23Z", "capabilities", + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize("{\"sd\":\"iwerlbcetrvird\",\"yzegchtabhac\":\"wpvnloop\"}", Object.class, + SerializerEncoding.JSON), + "hostServiceUri", "lqwbrfiovpdl", "registerTime", "2021-08-12T19:39:03Z", "maxConcurrentJobs", + 815203127, "lastStopTime", "2021-12-03T23:38:17Z", "version", "zhmlnfvyhdhf", "machineName", "ui", + "versionStatus", "fqzcklvvwvpfqt", "concurrentJobsLimit", 1715409221, "lastEndUpdateTime", + "2021-02-11T16:56:09Z", "expiryTime", "2021-03-20T04:17:59Z", "lastStartTime", + "2021-07-22T21:24:26Z", "lastUpdateResult", "Succeed", "isActiveDispatcher", false, "status", + "Limited")))) + .withLinks(Arrays.asList(new LinkedIntegrationRuntime(), new LinkedIntegrationRuntime())); model = BinaryData.fromObject(model).toObject(SelfHostedIntegrationRuntimeStatus.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTypePropertiesTests.java index 0fb0cdcc02f42..2ae7b9ed5c1fd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTypePropertiesTests.java @@ -18,49 +18,56 @@ public final class SelfHostedIntegrationRuntimeStatusTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SelfHostedIntegrationRuntimeStatusTypeProperties model = BinaryData.fromString( - "{\"createTime\":\"2021-08-23T11:38:34Z\",\"taskQueueId\":\"cgiwsywpejtvqop\",\"internalChannelEncryption\":\"NotSet\",\"version\":\"er\",\"nodes\":[{\"nodeName\":\"tqe\",\"machineName\":\"ypul\",\"hostServiceUri\":\"mcbcen\",\"status\":\"NeedRegistration\",\"capabilities\":{\"wc\":\"epi\",\"kqmki\":\"oasg\"},\"versionStatus\":\"bfvkiwmqnwm\",\"version\":\"cct\",\"registerTime\":\"2021-06-23T08:30:31Z\",\"lastConnectTime\":\"2021-12-02T13:41:46Z\",\"expiryTime\":\"2021-11-23T20:55:31Z\",\"lastStartTime\":\"2021-07-18T21:27:37Z\",\"lastStopTime\":\"2021-04-24T22:04:41Z\",\"lastUpdateResult\":\"Succeed\",\"lastStartUpdateTime\":\"2021-11-19T09:35:38Z\",\"lastEndUpdateTime\":\"2021-01-27T13:52:03Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":601104812,\"maxConcurrentJobs\":298117911,\"\":{\"hqnzcbjfpxoyg\":\"datahkoqcudnwmoyhdpj\",\"q\":\"datamjn\"}},{\"nodeName\":\"qxyxfknjqss\",\"machineName\":\"wvravn\",\"hostServiceUri\":\"klkwqisnlpa\",\"status\":\"NeedRegistration\",\"capabilities\":{\"mpgqqdhtct\":\"otktdmewwlkry\",\"zjohdhczh\":\"xregykjmpad\"},\"versionStatus\":\"itydljgrpq\",\"version\":\"fxg\",\"registerTime\":\"2021-03-01T06:28:30Z\",\"lastConnectTime\":\"2021-01-05T20:00:31Z\",\"expiryTime\":\"2021-05-06T21:01:56Z\",\"lastStartTime\":\"2021-09-13T21:29:57Z\",\"lastStopTime\":\"2021-08-09T19:17:55Z\",\"lastUpdateResult\":\"None\",\"lastStartUpdateTime\":\"2021-05-19T23:25:50Z\",\"lastEndUpdateTime\":\"2021-10-05T17:19:03Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":1187362021,\"maxConcurrentJobs\":264432221,\"\":{\"qihvbkufqiqddjy\":\"datagljopiz\",\"mkjpajlfp\":\"datapgom\"}},{\"nodeName\":\"rljlhejcccpxbnwi\",\"machineName\":\"fjjevptv\",\"hostServiceUri\":\"ghzqwvkparskpl\",\"status\":\"InitializeFailed\",\"capabilities\":{\"cykgulellfwrmsux\":\"xqmbsu\"},\"versionStatus\":\"b\",\"version\":\"oubxlpkdsnbqoyms\",\"registerTime\":\"2021-07-23T03:20:47Z\",\"lastConnectTime\":\"2021-11-28T19:08:48Z\",\"expiryTime\":\"2021-03-05T01:15:54Z\",\"lastStartTime\":\"2021-11-08T10:38:08Z\",\"lastStopTime\":\"2021-09-14T21:07:48Z\",\"lastUpdateResult\":\"None\",\"lastStartUpdateTime\":\"2021-11-05T01:02:23Z\",\"lastEndUpdateTime\":\"2021-06-29T18:24:37Z\",\"isActiveDispatcher\":false,\"concurrentJobsLimit\":1884274645,\"maxConcurrentJobs\":1000136471,\"\":{\"ksribmbtmorik\":\"dataskkop\"}}],\"scheduledUpdateDate\":\"2021-05-31T21:34:47Z\",\"updateDelayOffset\":\"qtiqxgwsfrlyvzl\",\"localTimeZoneOffset\":\"ydgmdoqgruc\",\"capabilities\":{\"nrruwsqp\":\"yjudhgwjqegly\",\"mgexqcilfiulgnn\":\"yxkm\",\"wqvvzxgbamqrbb\":\"xwdpmc\",\"mvdpavcjkb\":\"npt\"},\"serviceUrls\":[\"w\",\"izcbqjxmhwdtunpc\",\"kd\",\"rfouwkoltjdauujj\"],\"autoUpdate\":\"Off\",\"versionStatus\":\"d\",\"links\":[{\"name\":\"kudjzr\",\"subscriptionId\":\"ye\",\"dataFactoryName\":\"lm\",\"dataFactoryLocation\":\"oduk\",\"createTime\":\"2021-08-19T15:28:18Z\"},{\"name\":\"nqqkqxkd\",\"subscriptionId\":\"seuqkrbyyakrjgb\",\"dataFactoryName\":\"gokrllbecannvxo\",\"dataFactoryLocation\":\"dcftptfc\",\"createTime\":\"2021-05-09T13:46:34Z\"},{\"name\":\"fwxmcxhwkgihif\",\"subscriptionId\":\"ocmgadhmqyufp\",\"dataFactoryName\":\"wweylszrt\",\"dataFactoryLocation\":\"iqcypmonfcorcn\",\"createTime\":\"2021-04-07T10:45:05Z\"}],\"pushedVersion\":\"igc\",\"latestVersion\":\"ayhdt\",\"autoUpdateETA\":\"2021-02-21T20:09:01Z\",\"selfContainedInteractiveAuthoringEnabled\":true}") + "{\"createTime\":\"2021-11-07T15:55:44Z\",\"taskQueueId\":\"tkviptqhatfgkhhb\",\"internalChannelEncryption\":\"SslEncrypted\",\"version\":\"gkd\",\"nodes\":[{\"nodeName\":\"hhbupqo\",\"machineName\":\"whvqih\",\"hostServiceUri\":\"hqjdcbgcemjti\",\"status\":\"Limited\",\"capabilities\":{\"raxil\":\"calyqz\",\"rhiehxjou\":\"dxcmpdmdsypks\"},\"versionStatus\":\"ehs\",\"version\":\"c\",\"registerTime\":\"2021-06-14T23:46:34Z\",\"lastConnectTime\":\"2021-07-02T12:32:20Z\",\"expiryTime\":\"2021-07-05T04:08:34Z\",\"lastStartTime\":\"2021-09-18T17:50:18Z\",\"lastStopTime\":\"2021-01-07T23:07:29Z\",\"lastUpdateResult\":\"None\",\"lastStartUpdateTime\":\"2021-06-29T12:45:21Z\",\"lastEndUpdateTime\":\"2021-07-20T14:11:23Z\",\"isActiveDispatcher\":false,\"concurrentJobsLimit\":1062834096,\"maxConcurrentJobs\":93393411,\"\":{\"fzwiztvucyackgk\":\"datapuku\",\"pcohfksixvwkp\":\"datavpmg\"}},{\"nodeName\":\"smurjab\",\"machineName\":\"xohagcojgmyzjd\",\"hostServiceUri\":\"qehrqtgdipb\",\"status\":\"Upgrading\",\"capabilities\":{\"g\":\"yu\",\"jyxyunypf\":\"zsebkbfykgmwurcx\",\"jxr\":\"vjgovbbn\",\"nk\":\"aqgmztlru\"},\"versionStatus\":\"upmqffgjsq\",\"version\":\"airoog\",\"registerTime\":\"2021-11-18T20:03:20Z\",\"lastConnectTime\":\"2021-07-02T09:43:33Z\",\"expiryTime\":\"2021-07-21T15:52:53Z\",\"lastStartTime\":\"2021-03-14T06:17:38Z\",\"lastStopTime\":\"2021-01-15T04:09:58Z\",\"lastUpdateResult\":\"Fail\",\"lastStartUpdateTime\":\"2021-07-27T22:50:59Z\",\"lastEndUpdateTime\":\"2021-07-09T19:19:44Z\",\"isActiveDispatcher\":false,\"concurrentJobsLimit\":1576744134,\"maxConcurrentJobs\":479771092,\"\":{\"ibtrwgljfa\":\"datarvtf\",\"phgimyomje\":\"dataezdxqhj\",\"uxkepga\":\"datalgnieeqjdfxxxnn\"}},{\"nodeName\":\"ijbiterqfuterrca\",\"machineName\":\"pfqddwwpg\",\"hostServiceUri\":\"awupqkv\",\"status\":\"Online\",\"capabilities\":{\"agw\":\"efrxzwvcvtjdq\",\"rpxj\":\"jafbdvpcdzdbjz\",\"ngvgjgcwwuusjjht\":\"raupirvdonka\"},\"versionStatus\":\"znrhig\",\"version\":\"xhgmfrnktsvwqda\",\"registerTime\":\"2021-12-05T12:31:27Z\",\"lastConnectTime\":\"2021-06-02T09:55:30Z\",\"expiryTime\":\"2021-11-11T22:05:39Z\",\"lastStartTime\":\"2021-03-05T22:36:17Z\",\"lastStopTime\":\"2021-08-24T07:44Z\",\"lastUpdateResult\":\"Succeed\",\"lastStartUpdateTime\":\"2021-07-14T19:29:42Z\",\"lastEndUpdateTime\":\"2021-12-03T04:46:12Z\",\"isActiveDispatcher\":false,\"concurrentJobsLimit\":1620576978,\"maxConcurrentJobs\":2087234500,\"\":{\"ecweo\":\"dataasflg\",\"wvzapybdeea\":\"datalerpt\",\"eirjrkak\":\"dataigbpabacp\",\"bmazlxlbd\":\"datavnd\"}}],\"scheduledUpdateDate\":\"2021-05-27T01:49Z\",\"updateDelayOffset\":\"gqeyqrlg\",\"localTimeZoneOffset\":\"ypbekpardoad\",\"capabilities\":{\"bzmgzsytmp\":\"xzgy\",\"vsqsbqr\":\"slr\",\"jzijxefydck\":\"bpiqfstkdl\"},\"serviceUrls\":[\"z\"],\"autoUpdate\":\"On\",\"versionStatus\":\"kiwpadnh\",\"links\":[{\"name\":\"nit\",\"subscriptionId\":\"gotfrdlfewexbr\",\"dataFactoryName\":\"dyriy\",\"dataFactoryLocation\":\"ybigksnijclfdpr\",\"createTime\":\"2021-07-31T05:09:25Z\"},{\"name\":\"zxeigydd\",\"subscriptionId\":\"n\",\"dataFactoryName\":\"xerzypcral\",\"dataFactoryLocation\":\"cnmrbizxqldmhzb\",\"createTime\":\"2021-09-06T08:51:46Z\"},{\"name\":\"zlgyerm\",\"subscriptionId\":\"mnyuh\",\"dataFactoryName\":\"pbf\",\"dataFactoryLocation\":\"cbweab\",\"createTime\":\"2021-05-12T10:51:07Z\"}],\"pushedVersion\":\"kuquvcgvnomlcldo\",\"latestVersion\":\"huafu\",\"autoUpdateETA\":\"2020-12-26T05:11:47Z\",\"selfContainedInteractiveAuthoringEnabled\":false}") .toObject(SelfHostedIntegrationRuntimeStatusTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SelfHostedIntegrationRuntimeStatusTypeProperties model = new SelfHostedIntegrationRuntimeStatusTypeProperties() - .withNodes(Arrays.asList( - new SelfHostedIntegrationRuntimeNodeInner().withAdditionalProperties(mapOf("nodeName", "tqe", - "lastStartUpdateTime", "2021-11-19T09:35:38Z", "lastConnectTime", "2021-12-02T13:41:46Z", - "capabilities", - JacksonAdapter.createDefaultSerializerAdapter().deserialize("{\"wc\":\"epi\",\"kqmki\":\"oasg\"}", - Object.class, SerializerEncoding.JSON), - "hostServiceUri", "mcbcen", "registerTime", "2021-06-23T08:30:31Z", "maxConcurrentJobs", 298117911, - "lastStopTime", "2021-04-24T22:04:41Z", "version", "cct", "machineName", "ypul", "versionStatus", - "bfvkiwmqnwm", "concurrentJobsLimit", 601104812, "lastEndUpdateTime", "2021-01-27T13:52:03Z", - "expiryTime", "2021-11-23T20:55:31Z", "lastStartTime", "2021-07-18T21:27:37Z", "lastUpdateResult", - "Succeed", "isActiveDispatcher", true, "status", "NeedRegistration")), - new SelfHostedIntegrationRuntimeNodeInner() - .withAdditionalProperties(mapOf("nodeName", "qxyxfknjqss", "lastStartUpdateTime", - "2021-05-19T23:25:50Z", "lastConnectTime", "2021-01-05T20:00:31Z", "capabilities", - JacksonAdapter.createDefaultSerializerAdapter().deserialize( - "{\"mpgqqdhtct\":\"otktdmewwlkry\",\"zjohdhczh\":\"xregykjmpad\"}", Object.class, - SerializerEncoding.JSON), - "hostServiceUri", "klkwqisnlpa", "registerTime", "2021-03-01T06:28:30Z", "maxConcurrentJobs", - 264432221, "lastStopTime", "2021-08-09T19:17:55Z", "version", "fxg", "machineName", "wvravn", - "versionStatus", "itydljgrpq", "concurrentJobsLimit", 1187362021, "lastEndUpdateTime", - "2021-10-05T17:19:03Z", "expiryTime", "2021-05-06T21:01:56Z", "lastStartTime", - "2021-09-13T21:29:57Z", "lastUpdateResult", "None", "isActiveDispatcher", true, "status", - "NeedRegistration")), - new SelfHostedIntegrationRuntimeNodeInner() - .withAdditionalProperties(mapOf("nodeName", "rljlhejcccpxbnwi", "lastStartUpdateTime", - "2021-11-05T01:02:23Z", "lastConnectTime", "2021-11-28T19:08:48Z", "capabilities", - JacksonAdapter.createDefaultSerializerAdapter().deserialize("{\"cykgulellfwrmsux\":\"xqmbsu\"}", - Object.class, SerializerEncoding.JSON), - "hostServiceUri", "ghzqwvkparskpl", "registerTime", "2021-07-23T03:20:47Z", "maxConcurrentJobs", - 1000136471, "lastStopTime", "2021-09-14T21:07:48Z", "version", "oubxlpkdsnbqoyms", - "machineName", "fjjevptv", "versionStatus", "b", "concurrentJobsLimit", 1884274645, - "lastEndUpdateTime", "2021-06-29T18:24:37Z", "expiryTime", "2021-03-05T01:15:54Z", - "lastStartTime", "2021-11-08T10:38:08Z", "lastUpdateResult", "None", "isActiveDispatcher", - false, "status", "InitializeFailed")))) - .withLinks(Arrays.asList(new LinkedIntegrationRuntime(), new LinkedIntegrationRuntime(), - new LinkedIntegrationRuntime())); + SelfHostedIntegrationRuntimeStatusTypeProperties model + = new SelfHostedIntegrationRuntimeStatusTypeProperties() + .withNodes(Arrays.asList( + new SelfHostedIntegrationRuntimeNodeInner() + .withAdditionalProperties(mapOf("nodeName", "hhbupqo", "lastStartUpdateTime", + "2021-06-29T12:45:21Z", "lastConnectTime", "2021-07-02T12:32:20Z", "capabilities", + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize( + "{\"raxil\":\"calyqz\",\"rhiehxjou\":\"dxcmpdmdsypks\"}", Object.class, + SerializerEncoding.JSON), + "hostServiceUri", "hqjdcbgcemjti", "registerTime", "2021-06-14T23:46:34Z", + "maxConcurrentJobs", 93393411, "lastStopTime", "2021-01-07T23:07:29Z", "version", "c", + "machineName", "whvqih", "versionStatus", "ehs", "concurrentJobsLimit", 1062834096, + "lastEndUpdateTime", "2021-07-20T14:11:23Z", "expiryTime", "2021-07-05T04:08:34Z", + "lastStartTime", "2021-09-18T17:50:18Z", "lastUpdateResult", "None", "isActiveDispatcher", + false, "status", "Limited")), + new SelfHostedIntegrationRuntimeNodeInner().withAdditionalProperties(mapOf("nodeName", "smurjab", + "lastStartUpdateTime", "2021-07-27T22:50:59Z", "lastConnectTime", "2021-07-02T09:43:33Z", + "capabilities", + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize( + "{\"g\":\"yu\",\"jyxyunypf\":\"zsebkbfykgmwurcx\",\"jxr\":\"vjgovbbn\",\"nk\":\"aqgmztlru\"}", + Object.class, SerializerEncoding.JSON), + "hostServiceUri", "qehrqtgdipb", "registerTime", "2021-11-18T20:03:20Z", "maxConcurrentJobs", + 479771092, "lastStopTime", "2021-01-15T04:09:58Z", "version", "airoog", "machineName", + "xohagcojgmyzjd", "versionStatus", "upmqffgjsq", "concurrentJobsLimit", 1576744134, + "lastEndUpdateTime", "2021-07-09T19:19:44Z", "expiryTime", "2021-07-21T15:52:53Z", + "lastStartTime", "2021-03-14T06:17:38Z", "lastUpdateResult", "Fail", "isActiveDispatcher", + false, "status", "Upgrading")), + new SelfHostedIntegrationRuntimeNodeInner().withAdditionalProperties(mapOf("nodeName", + "ijbiterqfuterrca", "lastStartUpdateTime", "2021-07-14T19:29:42Z", "lastConnectTime", + "2021-06-02T09:55:30Z", "capabilities", + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize( + "{\"agw\":\"efrxzwvcvtjdq\",\"rpxj\":\"jafbdvpcdzdbjz\",\"ngvgjgcwwuusjjht\":\"raupirvdonka\"}", + Object.class, SerializerEncoding.JSON), + "hostServiceUri", "awupqkv", "registerTime", "2021-12-05T12:31:27Z", "maxConcurrentJobs", + 2087234500, "lastStopTime", "2021-08-24T07:44Z", "version", "xhgmfrnktsvwqda", "machineName", + "pfqddwwpg", "versionStatus", "znrhig", "concurrentJobsLimit", 1620576978, "lastEndUpdateTime", + "2021-12-03T04:46:12Z", "expiryTime", "2021-11-11T22:05:39Z", "lastStartTime", + "2021-03-05T22:36:17Z", "lastUpdateResult", "Succeed", "isActiveDispatcher", false, "status", + "Online")))) + .withLinks(Arrays.asList(new LinkedIntegrationRuntime(), new LinkedIntegrationRuntime(), + new LinkedIntegrationRuntime())); model = BinaryData.fromObject(model).toObject(SelfHostedIntegrationRuntimeStatusTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowObjectDatasetTests.java index 27ed528922618..43d8db718adee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowObjectDatasetTests.java @@ -19,29 +19,36 @@ public final class ServiceNowObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ServiceNowObjectDataset model = BinaryData.fromString( - "{\"type\":\"ServiceNowObject\",\"typeProperties\":{\"tableName\":\"datazd\"},\"description\":\"bj\",\"structure\":\"datadsysx\",\"schema\":\"datauhvhnlse\",\"linkedServiceName\":{\"referenceName\":\"zcrrwnkkgdwqym\",\"parameters\":{\"eluvmsa\":\"dataqeaxd\",\"hvvzfznfgpbc\":\"datahviawgqrw\"}},\"parameters\":{\"djieask\":{\"type\":\"Object\",\"defaultValue\":\"datam\"}},\"annotations\":[\"dataclnfusrgnos\",\"datakhb\",\"datajphlyyuahvy\",\"dataikbvqzrurgbqaucp\"],\"folder\":{\"name\":\"jnohafwm\"},\"\":{\"tugpeametsdwxfa\":\"datajly\",\"fegs\":\"datatxc\",\"hooimazkmqfwbgd\":\"datavbghoucvkan\"}}") + "{\"type\":\"yciwbnqinr\",\"typeProperties\":{\"tableName\":\"datamjogxgr\"},\"description\":\"vvmrn\",\"structure\":\"datadijoxqswsychdc\",\"schema\":\"datagcmpnc\",\"linkedServiceName\":{\"referenceName\":\"qbgbnoqnow\",\"parameters\":{\"wjekyq\":\"dataeytz\",\"polnvgpppdilbdvx\":\"datarv\",\"x\":\"datafhlzzgaps\"}},\"parameters\":{\"roy\":{\"type\":\"Float\",\"defaultValue\":\"datacrmzquuzywkgoux\"},\"scifrzcwuejmxlfz\":{\"type\":\"String\",\"defaultValue\":\"dataesywywnvgy\"},\"hchraunjovlx\":{\"type\":\"Int\",\"defaultValue\":\"datayrgr\"},\"tlhzkrazk\":{\"type\":\"String\",\"defaultValue\":\"datamvzpniqwxmrgmnk\"}},\"annotations\":[\"datayecznvzmsvznghe\"],\"folder\":{\"name\":\"ehgvmmnoyz\"},\"\":{\"fu\":\"datanypluzypkfc\"}}") .toObject(ServiceNowObjectDataset.class); - Assertions.assertEquals("bj", model.description()); - Assertions.assertEquals("zcrrwnkkgdwqym", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("djieask").type()); - Assertions.assertEquals("jnohafwm", model.folder().name()); + Assertions.assertEquals("vvmrn", model.description()); + Assertions.assertEquals("qbgbnoqnow", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("roy").type()); + Assertions.assertEquals("ehgvmmnoyz", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ServiceNowObjectDataset model = new ServiceNowObjectDataset().withDescription("bj").withStructure("datadsysx") - .withSchema("datauhvhnlse") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zcrrwnkkgdwqym") - .withParameters(mapOf("eluvmsa", "dataqeaxd", "hvvzfznfgpbc", "datahviawgqrw"))) - .withParameters( - mapOf("djieask", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datam"))) - .withAnnotations(Arrays.asList("dataclnfusrgnos", "datakhb", "datajphlyyuahvy", "dataikbvqzrurgbqaucp")) - .withFolder(new DatasetFolder().withName("jnohafwm")).withTableName("datazd"); + ServiceNowObjectDataset model = new ServiceNowObjectDataset().withDescription("vvmrn") + .withStructure("datadijoxqswsychdc") + .withSchema("datagcmpnc") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("qbgbnoqnow") + .withParameters(mapOf("wjekyq", "dataeytz", "polnvgpppdilbdvx", "datarv", "x", "datafhlzzgaps"))) + .withParameters(mapOf("roy", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datacrmzquuzywkgoux"), + "scifrzcwuejmxlfz", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataesywywnvgy"), + "hchraunjovlx", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datayrgr"), + "tlhzkrazk", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datamvzpniqwxmrgmnk"))) + .withAnnotations(Arrays.asList("datayecznvzmsvznghe")) + .withFolder(new DatasetFolder().withName("ehgvmmnoyz")) + .withTableName("datamjogxgr"); model = BinaryData.fromObject(model).toObject(ServiceNowObjectDataset.class); - Assertions.assertEquals("bj", model.description()); - Assertions.assertEquals("zcrrwnkkgdwqym", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("djieask").type()); - Assertions.assertEquals("jnohafwm", model.folder().name()); + Assertions.assertEquals("vvmrn", model.description()); + Assertions.assertEquals("qbgbnoqnow", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("roy").type()); + Assertions.assertEquals("ehgvmmnoyz", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowSourceTests.java index 284e370128593..539f77fd9ff6c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowSourceTests.java @@ -11,16 +11,19 @@ public final class ServiceNowSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ServiceNowSource model = BinaryData.fromString( - "{\"type\":\"ServiceNowSource\",\"query\":\"datadtq\",\"queryTimeout\":\"datajbxol\",\"additionalColumns\":\"datahquqihgibog\",\"sourceRetryCount\":\"datajupenoupcolxc\",\"sourceRetryWait\":\"dataszwadesisd\",\"maxConcurrentConnections\":\"datauhqts\",\"disableMetricsCollection\":\"datab\",\"\":{\"bymrgelgoduexx\":\"dataeeucvv\",\"fr\":\"datad\",\"wqzvqtnozwphka\":\"dataenvkqtvtq\",\"bzbbjxkami\":\"dataracvcbrtltpo\"}}") + "{\"type\":\"ovwizjraks\",\"query\":\"dataljgaykaqwnk\",\"queryTimeout\":\"dataewshhqgjvchl\",\"additionalColumns\":\"datazfbtczzjf\",\"sourceRetryCount\":\"datawqpukltfk\",\"sourceRetryWait\":\"dataoxmisnbzczpfvqt\",\"maxConcurrentConnections\":\"datakcfe\",\"disableMetricsCollection\":\"datazknhcvxf\",\"\":{\"ysdmovbvnjyq\":\"datagrnh\",\"kczolndwrggyt\":\"dataofdgzl\",\"ukfwmhzarrfttx\":\"datavoxnjbyjgobzj\",\"vvab\":\"dataifrjgvhone\"}}") .toObject(ServiceNowSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ServiceNowSource model - = new ServiceNowSource().withSourceRetryCount("datajupenoupcolxc").withSourceRetryWait("dataszwadesisd") - .withMaxConcurrentConnections("datauhqts").withDisableMetricsCollection("datab") - .withQueryTimeout("datajbxol").withAdditionalColumns("datahquqihgibog").withQuery("datadtq"); + ServiceNowSource model = new ServiceNowSource().withSourceRetryCount("datawqpukltfk") + .withSourceRetryWait("dataoxmisnbzczpfvqt") + .withMaxConcurrentConnections("datakcfe") + .withDisableMetricsCollection("datazknhcvxf") + .withQueryTimeout("dataewshhqgjvchl") + .withAdditionalColumns("datazfbtczzjf") + .withQuery("dataljgaykaqwnk"); model = BinaryData.fromObject(model).toObject(ServiceNowSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2ObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2ObjectDatasetTests.java index 27defae2a3ca9..370ea218025c1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2ObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2ObjectDatasetTests.java @@ -19,33 +19,36 @@ public final class ServiceNowV2ObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ServiceNowV2ObjectDataset model = BinaryData.fromString( - "{\"type\":\"ServiceNowV2Object\",\"typeProperties\":{\"tableName\":\"dataadzglm\"},\"description\":\"zpsuhsypxmul\",\"structure\":\"datafrerkqp\",\"schema\":\"datajxkbywsbu\",\"linkedServiceName\":{\"referenceName\":\"fmxbdjkm\",\"parameters\":{\"vghbtycvl\":\"dataggnowxhyvdbrdv\",\"xshmrdisc\":\"datausgiikhrcthype\"}},\"parameters\":{\"laf\":{\"type\":\"Int\",\"defaultValue\":\"datamktcwmivoxgzegn\"},\"aghddcozwxuxorru\":{\"type\":\"String\",\"defaultValue\":\"dataa\"},\"gxlssolqypvw\":{\"type\":\"SecureString\",\"defaultValue\":\"datave\"},\"fcm\":{\"type\":\"String\",\"defaultValue\":\"dataohvrkqvrvgdojcv\"}},\"annotations\":[\"datapjakjdtuodoc\"],\"folder\":{\"name\":\"qxnyxpmqdsqnii\"},\"\":{\"oih\":\"dataqikdipkxsqkuzabr\"}}") + "{\"type\":\"vyjehyvnfjngo\",\"typeProperties\":{\"tableName\":\"dataaxtmvmycvjpaxjd\"},\"description\":\"rdvhbgtuhwh\",\"structure\":\"datanwenbphyl\",\"schema\":\"datazgz\",\"linkedServiceName\":{\"referenceName\":\"x\",\"parameters\":{\"etxpun\":\"datasebkc\",\"uiqr\":\"dataerma\"}},\"parameters\":{\"rwjbsycukbocsits\":{\"type\":\"String\",\"defaultValue\":\"dataubqwxvsxrbiyzjl\"},\"yqs\":{\"type\":\"Object\",\"defaultValue\":\"datasgzpwqieyxjkc\"},\"qepeftmub\":{\"type\":\"Object\",\"defaultValue\":\"datam\"},\"zeq\":{\"type\":\"Object\",\"defaultValue\":\"dataoepeqlhbtysy\"}},\"annotations\":[\"datapqnofkwhgy\"],\"folder\":{\"name\":\"yfdbvooelmik\"},\"\":{\"qmlmwjwsmnwb\":\"dataqykgjjsmvsi\",\"vque\":\"dataacvemmriyz\",\"lolxxhcyn\":\"dataxplcsinb\"}}") .toObject(ServiceNowV2ObjectDataset.class); - Assertions.assertEquals("zpsuhsypxmul", model.description()); - Assertions.assertEquals("fmxbdjkm", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("laf").type()); - Assertions.assertEquals("qxnyxpmqdsqnii", model.folder().name()); + Assertions.assertEquals("rdvhbgtuhwh", model.description()); + Assertions.assertEquals("x", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("rwjbsycukbocsits").type()); + Assertions.assertEquals("yfdbvooelmik", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ServiceNowV2ObjectDataset model = new ServiceNowV2ObjectDataset().withDescription("zpsuhsypxmul") - .withStructure("datafrerkqp").withSchema("datajxkbywsbu") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("fmxbdjkm") - .withParameters(mapOf("vghbtycvl", "dataggnowxhyvdbrdv", "xshmrdisc", "datausgiikhrcthype"))) - .withParameters(mapOf("laf", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datamktcwmivoxgzegn"), - "aghddcozwxuxorru", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataa"), "gxlssolqypvw", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datave"), "fcm", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataohvrkqvrvgdojcv"))) - .withAnnotations(Arrays.asList("datapjakjdtuodoc")) - .withFolder(new DatasetFolder().withName("qxnyxpmqdsqnii")).withTableName("dataadzglm"); + ServiceNowV2ObjectDataset model = new ServiceNowV2ObjectDataset().withDescription("rdvhbgtuhwh") + .withStructure("datanwenbphyl") + .withSchema("datazgz") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("x") + .withParameters(mapOf("etxpun", "datasebkc", "uiqr", "dataerma"))) + .withParameters(mapOf("rwjbsycukbocsits", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataubqwxvsxrbiyzjl"), + "yqs", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datasgzpwqieyxjkc"), + "qepeftmub", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datam"), + "zeq", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataoepeqlhbtysy"))) + .withAnnotations(Arrays.asList("datapqnofkwhgy")) + .withFolder(new DatasetFolder().withName("yfdbvooelmik")) + .withTableName("dataaxtmvmycvjpaxjd"); model = BinaryData.fromObject(model).toObject(ServiceNowV2ObjectDataset.class); - Assertions.assertEquals("zpsuhsypxmul", model.description()); - Assertions.assertEquals("fmxbdjkm", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("laf").type()); - Assertions.assertEquals("qxnyxpmqdsqnii", model.folder().name()); + Assertions.assertEquals("rdvhbgtuhwh", model.description()); + Assertions.assertEquals("x", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("rwjbsycukbocsits").type()); + Assertions.assertEquals("yfdbvooelmik", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2SourceTests.java index 63f3837be9b72..51aaab73ae6ae 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2SourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2SourceTests.java @@ -15,55 +15,62 @@ public final class ServiceNowV2SourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ServiceNowV2Source model = BinaryData.fromString( - "{\"type\":\"ServiceNowV2Source\",\"expression\":{\"type\":\"Constant\",\"value\":\"avbteaegyojy\",\"operator\":\"epcdhqjczh\",\"operands\":[{\"type\":\"Constant\",\"value\":\"jbhwoszrhfwcihk\",\"operator\":\"jbitmuriizyrgzxp\",\"operands\":[{\"type\":\"Unary\",\"value\":\"sod\",\"operator\":\"jn\",\"operands\":[{},{},{},{}]},{\"type\":\"Constant\",\"value\":\"hr\",\"operator\":\"i\",\"operands\":[{},{}]},{\"type\":\"Constant\",\"value\":\"huwcqnaspjdahi\",\"operator\":\"kliyfgkzwkyqa\",\"operands\":[{}]}]},{\"type\":\"Unary\",\"value\":\"oyykhidia\",\"operator\":\"ktwijoxkkynppqt\",\"operands\":[{\"type\":\"Unary\",\"value\":\"jat\",\"operator\":\"kqq\",\"operands\":[{},{},{}]}]}]},\"queryTimeout\":\"dataoinqzv\",\"additionalColumns\":\"datafpu\",\"sourceRetryCount\":\"datayhs\",\"sourceRetryWait\":\"dataa\",\"maxConcurrentConnections\":\"dataofqntt\",\"disableMetricsCollection\":\"datajcgupxnuvsh\",\"\":{\"ogvafbd\":\"databpmvppgui\",\"deqntbl\":\"dataokplolcalyvcxvcp\",\"afqr\":\"datadqsqbyubsw\"}}") + "{\"type\":\"jqkkacw\",\"expression\":{\"type\":\"Constant\",\"value\":\"feslxwlm\",\"operator\":\"o\",\"operands\":[{\"type\":\"Field\",\"value\":\"blomidvic\",\"operator\":\"ufjahu\",\"operands\":[{\"type\":\"Field\",\"value\":\"cklthsuasnxdhlov\",\"operator\":\"rfdipsshxx\",\"operands\":[{},{},{}]},{\"type\":\"Field\",\"value\":\"yysvexpzsxbfnkj\",\"operator\":\"tynpbi\",\"operands\":[{},{},{}]},{\"type\":\"Constant\",\"value\":\"ahnlxcd\",\"operator\":\"ssdtysnlx\",\"operands\":[{},{},{},{}]}]}]},\"queryTimeout\":\"datafjjjzcxtzkoloos\",\"additionalColumns\":\"dataukqioqh\",\"sourceRetryCount\":\"dataqmxkxfmwbrvsl\",\"sourceRetryWait\":\"datarlaudemzrp\",\"maxConcurrentConnections\":\"datausujbibbgcloknh\",\"disableMetricsCollection\":\"dataqfynrdagmih\",\"\":{\"mqbmcmg\":\"dataflzpuibczlrewf\",\"wezplnzvrh\":\"datazapuunuyokftd\"}}") .toObject(ServiceNowV2Source.class); Assertions.assertEquals(ExpressionV2Type.CONSTANT, model.expression().type()); - Assertions.assertEquals("avbteaegyojy", model.expression().value()); - Assertions.assertEquals("epcdhqjczh", model.expression().operator()); - Assertions.assertEquals(ExpressionV2Type.CONSTANT, model.expression().operands().get(0).type()); - Assertions.assertEquals("jbhwoszrhfwcihk", model.expression().operands().get(0).value()); - Assertions.assertEquals("jbitmuriizyrgzxp", model.expression().operands().get(0).operator()); - Assertions.assertEquals(ExpressionV2Type.UNARY, model.expression().operands().get(0).operands().get(0).type()); - Assertions.assertEquals("sod", model.expression().operands().get(0).operands().get(0).value()); - Assertions.assertEquals("jn", model.expression().operands().get(0).operands().get(0).operator()); + Assertions.assertEquals("feslxwlm", model.expression().value()); + Assertions.assertEquals("o", model.expression().operator()); + Assertions.assertEquals(ExpressionV2Type.FIELD, model.expression().operands().get(0).type()); + Assertions.assertEquals("blomidvic", model.expression().operands().get(0).value()); + Assertions.assertEquals("ufjahu", model.expression().operands().get(0).operator()); + Assertions.assertEquals(ExpressionV2Type.FIELD, model.expression().operands().get(0).operands().get(0).type()); + Assertions.assertEquals("cklthsuasnxdhlov", model.expression().operands().get(0).operands().get(0).value()); + Assertions.assertEquals("rfdipsshxx", model.expression().operands().get(0).operands().get(0).operator()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { ServiceNowV2Source model - = new ServiceNowV2Source().withSourceRetryCount("datayhs").withSourceRetryWait("dataa") - .withMaxConcurrentConnections( - "dataofqntt") - .withDisableMetricsCollection( - "datajcgupxnuvsh") - .withQueryTimeout("dataoinqzv").withAdditionalColumns("datafpu") - .withExpression(new ExpressionV2().withType(ExpressionV2Type.CONSTANT).withValue("avbteaegyojy") - .withOperator("epcdhqjczh") - .withOperands(Arrays.asList( - new ExpressionV2().withType(ExpressionV2Type.CONSTANT).withValue("jbhwoszrhfwcihk") - .withOperator("jbitmuriizyrgzxp") - .withOperands(Arrays.asList( - new ExpressionV2().withType(ExpressionV2Type.UNARY).withValue("sod").withOperator("jn") - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), - new ExpressionV2(), new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.CONSTANT).withValue("hr").withOperator("i") - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.CONSTANT).withValue("huwcqnaspjdahi") - .withOperator("kliyfgkzwkyqa").withOperands(Arrays.asList(new ExpressionV2())))), - new ExpressionV2().withType(ExpressionV2Type.UNARY).withValue("oyykhidia") - .withOperator("ktwijoxkkynppqt") - .withOperands(Arrays.asList(new ExpressionV2().withType(ExpressionV2Type.UNARY) - .withValue("jat").withOperator("kqq").withOperands( - Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2()))))))); + = new ServiceNowV2Source().withSourceRetryCount("dataqmxkxfmwbrvsl") + .withSourceRetryWait("datarlaudemzrp") + .withMaxConcurrentConnections("datausujbibbgcloknh") + .withDisableMetricsCollection("dataqfynrdagmih") + .withQueryTimeout("datafjjjzcxtzkoloos") + .withAdditionalColumns("dataukqioqh") + .withExpression( + new ExpressionV2().withType(ExpressionV2Type.CONSTANT) + .withValue("feslxwlm") + .withOperator("o") + .withOperands( + Arrays + .asList(new ExpressionV2().withType(ExpressionV2Type.FIELD) + .withValue("blomidvic") + .withOperator("ufjahu") + .withOperands(Arrays.asList( + new ExpressionV2().withType(ExpressionV2Type.FIELD) + .withValue("cklthsuasnxdhlov") + .withOperator("rfdipsshxx") + .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), + new ExpressionV2())), + new ExpressionV2().withType(ExpressionV2Type.FIELD) + .withValue("yysvexpzsxbfnkj") + .withOperator("tynpbi") + .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), + new ExpressionV2())), + new ExpressionV2().withType(ExpressionV2Type.CONSTANT) + .withValue("ahnlxcd") + .withOperator("ssdtysnlx") + .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), + new ExpressionV2(), new ExpressionV2()))))))); model = BinaryData.fromObject(model).toObject(ServiceNowV2Source.class); Assertions.assertEquals(ExpressionV2Type.CONSTANT, model.expression().type()); - Assertions.assertEquals("avbteaegyojy", model.expression().value()); - Assertions.assertEquals("epcdhqjczh", model.expression().operator()); - Assertions.assertEquals(ExpressionV2Type.CONSTANT, model.expression().operands().get(0).type()); - Assertions.assertEquals("jbhwoszrhfwcihk", model.expression().operands().get(0).value()); - Assertions.assertEquals("jbitmuriizyrgzxp", model.expression().operands().get(0).operator()); - Assertions.assertEquals(ExpressionV2Type.UNARY, model.expression().operands().get(0).operands().get(0).type()); - Assertions.assertEquals("sod", model.expression().operands().get(0).operands().get(0).value()); - Assertions.assertEquals("jn", model.expression().operands().get(0).operands().get(0).operator()); + Assertions.assertEquals("feslxwlm", model.expression().value()); + Assertions.assertEquals("o", model.expression().operator()); + Assertions.assertEquals(ExpressionV2Type.FIELD, model.expression().operands().get(0).type()); + Assertions.assertEquals("blomidvic", model.expression().operands().get(0).value()); + Assertions.assertEquals("ufjahu", model.expression().operands().get(0).operator()); + Assertions.assertEquals(ExpressionV2Type.FIELD, model.expression().operands().get(0).operands().get(0).type()); + Assertions.assertEquals("cklthsuasnxdhlov", model.expression().operands().get(0).operands().get(0).value()); + Assertions.assertEquals("rfdipsshxx", model.expression().operands().get(0).operands().get(0).operator()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTests.java index dc867df8301e5..168680014c091 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTests.java @@ -21,56 +21,60 @@ public final class SetVariableActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SetVariableActivity model = BinaryData.fromString( - "{\"type\":\"SetVariable\",\"typeProperties\":{\"variableName\":\"cjawqhpijurqoi\",\"value\":\"dataibjivmvjtkvf\",\"setSystemVariable\":false},\"policy\":{\"secureInput\":true,\"secureOutput\":false},\"name\":\"jmsaacnblu\",\"description\":\"mzgqj\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"xfkxnevciwuiopgy\",\"dependencyConditions\":[\"Completed\",\"Completed\",\"Succeeded\"],\"\":{\"hmpmfakinode\":\"dataycgdkikpqmdi\"}},{\"activity\":\"ppcpwcxfn\",\"dependencyConditions\":[\"Failed\"],\"\":{\"xldorqprj\":\"dataxay\",\"ssog\":\"datavueyzg\",\"vir\":\"dataeqv\"}},{\"activity\":\"bguewtcq\",\"dependencyConditions\":[\"Completed\"],\"\":{\"sgauwepojmxbyv\":\"databn\",\"r\":\"dataykwrffx\",\"qlcvtdy\":\"dataxhcxpzjewo\",\"quyatvsn\":\"dataozmtsjgnp\"}},{\"activity\":\"rxhv\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Skipped\",\"Succeeded\"],\"\":{\"wzpauwhfh\":\"datarzlrnuyhldtjzid\",\"lojca\":\"datanh\",\"idqlghrc\":\"databukjywgs\"}}],\"userProperties\":[{\"name\":\"ln\",\"value\":\"datakvdr\"},{\"name\":\"ekxvlejh\",\"value\":\"databqzxqid\"},{\"name\":\"u\",\"value\":\"datawrwjbanteeu\"}],\"\":{\"fvjktfpo\":\"datacai\",\"q\":\"dataalrrqjioltdlppyk\"}}") + "{\"type\":\"zd\",\"typeProperties\":{\"variableName\":\"rkvhgn\",\"value\":\"dataxmmpuksvoimdg\",\"setSystemVariable\":false},\"policy\":{\"secureInput\":false,\"secureOutput\":false},\"name\":\"ggghwxpgftshcss\",\"description\":\"eplpphfezzgnwx\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"zgvtjrobowhcvssm\",\"dependencyConditions\":[\"Failed\",\"Failed\"],\"\":{\"o\":\"datalxrgqmuth\"}},{\"activity\":\"dmhypptfppmu\",\"dependencyConditions\":[\"Skipped\",\"Skipped\"],\"\":{\"z\":\"datahgwqqtbubkyi\"}},{\"activity\":\"hitdqmbo\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"drkolpnebn\":\"datasitgpzablkcvkm\",\"jdbdjxvcxepjfxcm\":\"dataafvks\"}}],\"userProperties\":[{\"name\":\"vwcmt\",\"value\":\"dataetflirbvqkbxg\"},{\"name\":\"epinyursqfhrzpy\",\"value\":\"datamfipvgmlf\"},{\"name\":\"bwfxssxarxvftlls\",\"value\":\"dataqa\"},{\"name\":\"kd\",\"value\":\"datagr\"}],\"\":{\"y\":\"datatfky\"}}") .toObject(SetVariableActivity.class); - Assertions.assertEquals("jmsaacnblu", model.name()); - Assertions.assertEquals("mzgqj", model.description()); + Assertions.assertEquals("ggghwxpgftshcss", model.name()); + Assertions.assertEquals("eplpphfezzgnwx", model.description()); Assertions.assertEquals(ActivityState.ACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("xfkxnevciwuiopgy", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ln", model.userProperties().get(0).name()); - Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals("zgvtjrobowhcvssm", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("vwcmt", model.userProperties().get(0).name()); + Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("cjawqhpijurqoi", model.variableName()); + Assertions.assertEquals("rkvhgn", model.variableName()); Assertions.assertEquals(false, model.setSystemVariable()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SetVariableActivity model = new SetVariableActivity().withName("jmsaacnblu").withDescription("mzgqj") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("xfkxnevciwuiopgy") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ppcpwcxfn") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("bguewtcq") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("rxhv") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, - DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ln").withValue("datakvdr"), - new UserProperty().withName("ekxvlejh").withValue("databqzxqid"), - new UserProperty().withName("u").withValue("datawrwjbanteeu"))) - .withPolicy(new SecureInputOutputPolicy().withSecureInput(true).withSecureOutput(false)) - .withVariableName("cjawqhpijurqoi").withValue("dataibjivmvjtkvf").withSetSystemVariable(false); + SetVariableActivity model + = new SetVariableActivity().withName("ggghwxpgftshcss") + .withDescription("eplpphfezzgnwx") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn( + Arrays.asList( + new ActivityDependency().withActivity("zgvtjrobowhcvssm") + .withDependencyConditions( + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("dmhypptfppmu") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("hitdqmbo") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("vwcmt").withValue("dataetflirbvqkbxg"), + new UserProperty().withName("epinyursqfhrzpy").withValue("datamfipvgmlf"), + new UserProperty().withName("bwfxssxarxvftlls").withValue("dataqa"), + new UserProperty().withName("kd").withValue("datagr"))) + .withPolicy(new SecureInputOutputPolicy().withSecureInput(false).withSecureOutput(false)) + .withVariableName("rkvhgn") + .withValue("dataxmmpuksvoimdg") + .withSetSystemVariable(false); model = BinaryData.fromObject(model).toObject(SetVariableActivity.class); - Assertions.assertEquals("jmsaacnblu", model.name()); - Assertions.assertEquals("mzgqj", model.description()); + Assertions.assertEquals("ggghwxpgftshcss", model.name()); + Assertions.assertEquals("eplpphfezzgnwx", model.description()); Assertions.assertEquals(ActivityState.ACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("xfkxnevciwuiopgy", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ln", model.userProperties().get(0).name()); - Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals("zgvtjrobowhcvssm", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("vwcmt", model.userProperties().get(0).name()); + Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("cjawqhpijurqoi", model.variableName()); + Assertions.assertEquals("rkvhgn", model.variableName()); Assertions.assertEquals(false, model.setSystemVariable()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTypePropertiesTests.java index 698811f251086..6436a4eddf8f6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTypePropertiesTests.java @@ -11,19 +11,20 @@ public final class SetVariableActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SetVariableActivityTypeProperties model = BinaryData - .fromString("{\"variableName\":\"rvghvfodrqmcgeqy\",\"value\":\"datard\",\"setSystemVariable\":true}") - .toObject(SetVariableActivityTypeProperties.class); - Assertions.assertEquals("rvghvfodrqmcgeqy", model.variableName()); - Assertions.assertEquals(true, model.setSystemVariable()); + SetVariableActivityTypeProperties model + = BinaryData.fromString("{\"variableName\":\"ujukcd\",\"value\":\"datapt\",\"setSystemVariable\":false}") + .toObject(SetVariableActivityTypeProperties.class); + Assertions.assertEquals("ujukcd", model.variableName()); + Assertions.assertEquals(false, model.setSystemVariable()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SetVariableActivityTypeProperties model = new SetVariableActivityTypeProperties() - .withVariableName("rvghvfodrqmcgeqy").withValue("datard").withSetSystemVariable(true); + SetVariableActivityTypeProperties model = new SetVariableActivityTypeProperties().withVariableName("ujukcd") + .withValue("datapt") + .withSetSystemVariable(false); model = BinaryData.fromObject(model).toObject(SetVariableActivityTypeProperties.class); - Assertions.assertEquals("rvghvfodrqmcgeqy", model.variableName()); - Assertions.assertEquals(true, model.setSystemVariable()); + Assertions.assertEquals("ujukcd", model.variableName()); + Assertions.assertEquals(false, model.setSystemVariable()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpLocationTests.java index c9e510746a0e3..a1f68eb1bc883 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpLocationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpLocationTests.java @@ -11,13 +11,13 @@ public final class SftpLocationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SftpLocation model = BinaryData.fromString( - "{\"type\":\"SftpLocation\",\"folderPath\":\"dataw\",\"fileName\":\"datavxakglh\",\"\":{\"qiy\":\"datasrfga\",\"yqyxyjrcbqpbis\":\"datavxcgdhyhgoqgs\",\"yjz\":\"dataglqjoxtdahneaoov\",\"fsr\":\"dataivfwjlofze\"}}") + "{\"type\":\"fdk\",\"folderPath\":\"datayvndjokgwesym\",\"fileName\":\"datahrqpfzl\",\"\":{\"aujqbbgsimwejlwb\":\"datatznxlu\"}}") .toObject(SftpLocation.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SftpLocation model = new SftpLocation().withFolderPath("dataw").withFileName("datavxakglh"); + SftpLocation model = new SftpLocation().withFolderPath("datayvndjokgwesym").withFileName("datahrqpfzl"); model = BinaryData.fromObject(model).toObject(SftpLocation.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpReadSettingsTests.java index afae147a92626..a707993f7520b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpReadSettingsTests.java @@ -11,18 +11,24 @@ public final class SftpReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SftpReadSettings model = BinaryData.fromString( - "{\"type\":\"SftpReadSettings\",\"recursive\":\"datapbukou\",\"wildcardFolderPath\":\"dataviyfksegwezgfqou\",\"wildcardFileName\":\"datacpdtdzlyog\",\"enablePartitionDiscovery\":\"dataasjckakikkkajm\",\"partitionRootPath\":\"databigmnkrqdyc\",\"fileListPath\":\"databgkxxpklq\",\"deleteFilesAfterCompletion\":\"databcgsa\",\"modifiedDatetimeStart\":\"dataay\",\"modifiedDatetimeEnd\":\"dataeqedft\",\"disableChunking\":\"datagm\",\"maxConcurrentConnections\":\"datakt\",\"disableMetricsCollection\":\"datazyvzixmusiidivbb\",\"\":{\"pucl\":\"datafliqntnoegxo\",\"ygi\":\"datadytwdaiexisa\",\"ivfiypfvwyzjsi\":\"dataoukaffzzf\"}}") + "{\"type\":\"kletlwa\",\"recursive\":\"datahxphxokdbv\",\"wildcardFolderPath\":\"dataqttusuxxb\",\"wildcardFileName\":\"datapvue\",\"enablePartitionDiscovery\":\"datarnnwgrxzcn\",\"partitionRootPath\":\"datauezxluimkwbwmg\",\"fileListPath\":\"dataqlsn\",\"deleteFilesAfterCompletion\":\"datachpcjztziu\",\"modifiedDatetimeStart\":\"datayvpcfvi\",\"modifiedDatetimeEnd\":\"dataxciunet\",\"disableChunking\":\"datagd\",\"maxConcurrentConnections\":\"datazbp\",\"disableMetricsCollection\":\"dataxvftjdrqgionmoky\",\"\":{\"ymbeeyskbq\":\"datad\",\"jgxdhgezyhph\":\"datacakle\"}}") .toObject(SftpReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SftpReadSettings model = new SftpReadSettings().withMaxConcurrentConnections("datakt") - .withDisableMetricsCollection("datazyvzixmusiidivbb").withRecursive("datapbukou") - .withWildcardFolderPath("dataviyfksegwezgfqou").withWildcardFileName("datacpdtdzlyog") - .withEnablePartitionDiscovery("dataasjckakikkkajm").withPartitionRootPath("databigmnkrqdyc") - .withFileListPath("databgkxxpklq").withDeleteFilesAfterCompletion("databcgsa") - .withModifiedDatetimeStart("dataay").withModifiedDatetimeEnd("dataeqedft").withDisableChunking("datagm"); + SftpReadSettings model = new SftpReadSettings().withMaxConcurrentConnections("datazbp") + .withDisableMetricsCollection("dataxvftjdrqgionmoky") + .withRecursive("datahxphxokdbv") + .withWildcardFolderPath("dataqttusuxxb") + .withWildcardFileName("datapvue") + .withEnablePartitionDiscovery("datarnnwgrxzcn") + .withPartitionRootPath("datauezxluimkwbwmg") + .withFileListPath("dataqlsn") + .withDeleteFilesAfterCompletion("datachpcjztziu") + .withModifiedDatetimeStart("datayvpcfvi") + .withModifiedDatetimeEnd("dataxciunet") + .withDisableChunking("datagd"); model = BinaryData.fromObject(model).toObject(SftpReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpWriteSettingsTests.java index e287adf3f738a..269ee58fd7745 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpWriteSettingsTests.java @@ -13,19 +13,19 @@ public final class SftpWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SftpWriteSettings model = BinaryData.fromString( - "{\"type\":\"SftpWriteSettings\",\"operationTimeout\":\"dataj\",\"useTempFileRename\":\"datarorjbyyxkcvahvb\",\"maxConcurrentConnections\":\"dataxtjivwveng\",\"disableMetricsCollection\":\"datayct\",\"copyBehavior\":\"datahgjyholsmahbjc\",\"metadata\":[{\"name\":\"dataqxgbigozrvlkla\",\"value\":\"datalysse\"},{\"name\":\"datappgsfj\",\"value\":\"datagmogmcjn\"},{\"name\":\"dataukbwypcvqfz\",\"value\":\"dataoxgerizflwl\"},{\"name\":\"datavopqpfcdpy\",\"value\":\"datamwpm\"}],\"\":{\"sxfptv\":\"datajj\",\"rdwfhh\":\"datamvrejkvcimq\"}}") + "{\"type\":\"rtoxsthjyyiryb\",\"operationTimeout\":\"dataawfwws\",\"useTempFileRename\":\"datakbdozsspfwmf\",\"maxConcurrentConnections\":\"datamkmwdok\",\"disableMetricsCollection\":\"datayilho\",\"copyBehavior\":\"datatdioxgsrhxoyrgv\",\"metadata\":[{\"name\":\"datatmzglbplqhbrar\",\"value\":\"datadepsxufyqcqf\"}],\"\":{\"xgxbgochpxps\":\"dataye\"}}") .toObject(SftpWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SftpWriteSettings model = new SftpWriteSettings().withMaxConcurrentConnections("dataxtjivwveng") - .withDisableMetricsCollection("datayct").withCopyBehavior("datahgjyholsmahbjc") - .withMetadata(Arrays.asList(new MetadataItem().withName("dataqxgbigozrvlkla").withValue("datalysse"), - new MetadataItem().withName("datappgsfj").withValue("datagmogmcjn"), - new MetadataItem().withName("dataukbwypcvqfz").withValue("dataoxgerizflwl"), - new MetadataItem().withName("datavopqpfcdpy").withValue("datamwpm"))) - .withOperationTimeout("dataj").withUseTempFileRename("datarorjbyyxkcvahvb"); + SftpWriteSettings model = new SftpWriteSettings().withMaxConcurrentConnections("datamkmwdok") + .withDisableMetricsCollection("datayilho") + .withCopyBehavior("datatdioxgsrhxoyrgv") + .withMetadata( + Arrays.asList(new MetadataItem().withName("datatmzglbplqhbrar").withValue("datadepsxufyqcqf"))) + .withOperationTimeout("dataawfwws") + .withUseTempFileRename("datakbdozsspfwmf"); model = BinaryData.fromObject(model).toObject(SftpWriteSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListDatasetTypePropertiesTests.java index eee5f9847eac1..290cd7df6b966 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListDatasetTypePropertiesTests.java @@ -10,14 +10,14 @@ public final class SharePointOnlineListDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SharePointOnlineListDatasetTypeProperties model = BinaryData.fromString("{\"listName\":\"datagba\"}") + SharePointOnlineListDatasetTypeProperties model = BinaryData.fromString("{\"listName\":\"dataouoq\"}") .toObject(SharePointOnlineListDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SharePointOnlineListDatasetTypeProperties model - = new SharePointOnlineListDatasetTypeProperties().withListName("datagba"); + = new SharePointOnlineListDatasetTypeProperties().withListName("dataouoq"); model = BinaryData.fromObject(model).toObject(SharePointOnlineListDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListResourceDatasetTests.java index cb2baceccde7a..ab0ce956d1ba4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListResourceDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListResourceDatasetTests.java @@ -19,34 +19,33 @@ public final class SharePointOnlineListResourceDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SharePointOnlineListResourceDataset model = BinaryData.fromString( - "{\"type\":\"SharePointOnlineListResource\",\"typeProperties\":{\"listName\":\"datad\"},\"description\":\"mipvlxtyw\",\"structure\":\"datahj\",\"schema\":\"datapllitx\",\"linkedServiceName\":{\"referenceName\":\"rgkwiyoy\",\"parameters\":{\"sizfuewlf\":\"dataivxcodwkwoytcach\",\"gne\":\"datafiikqcdnzsfiu\",\"mcrxlyzoaho\":\"datao\",\"rnlsyiaanistc\":\"dataufakrxjjwnbrmdwt\"}},\"parameters\":{\"fa\":{\"type\":\"Float\",\"defaultValue\":\"datalpphcstmrycpana\"},\"pxngzzxqbgq\":{\"type\":\"Object\",\"defaultValue\":\"datarin\"},\"vbdozwbskueaf\":{\"type\":\"SecureString\",\"defaultValue\":\"datazctbxzjkpifpu\"}},\"annotations\":[\"datambinpxmiwt\",\"dataqi\",\"datapvrd\"],\"folder\":{\"name\":\"d\"},\"\":{\"abux\":\"datax\"}}") + "{\"type\":\"yiyjxzxbt\",\"typeProperties\":{\"listName\":\"datavjgvjzykn\"},\"description\":\"nyzpu\",\"structure\":\"datarmdlkoabfcv\",\"schema\":\"databwxpmgyqshsasmr\",\"linkedServiceName\":{\"referenceName\":\"uy\",\"parameters\":{\"uuihkybgkyncyzj\":\"databxrgrztkyqgu\",\"vypmwdz\":\"datadfeemxiurpf\"}},\"parameters\":{\"llyd\":{\"type\":\"SecureString\",\"defaultValue\":\"dataoxppoqimyhxnpdg\"},\"c\":{\"type\":\"Float\",\"defaultValue\":\"datafdmz\"}},\"annotations\":[\"datavwlxd\",\"datanst\",\"dataeurd\",\"dataxklr\"],\"folder\":{\"name\":\"siznymwz\"},\"\":{\"ezxevltfe\":\"datakihqhnfubevwadx\",\"mftwrphmriipzgfo\":\"dataqoqiaklqakpsti\",\"dcjqneaqkgttbarg\":\"datau\",\"jjpfyxhsppvjs\":\"dataeynqlsnrgaxoyv\"}}") .toObject(SharePointOnlineListResourceDataset.class); - Assertions.assertEquals("mipvlxtyw", model.description()); - Assertions.assertEquals("rgkwiyoy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("fa").type()); - Assertions.assertEquals("d", model.folder().name()); + Assertions.assertEquals("nyzpu", model.description()); + Assertions.assertEquals("uy", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("llyd").type()); + Assertions.assertEquals("siznymwz", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SharePointOnlineListResourceDataset model = new SharePointOnlineListResourceDataset() - .withDescription("mipvlxtyw").withStructure("datahj").withSchema("datapllitx") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rgkwiyoy") - .withParameters(mapOf("sizfuewlf", "dataivxcodwkwoytcach", "gne", "datafiikqcdnzsfiu", "mcrxlyzoaho", - "datao", "rnlsyiaanistc", "dataufakrxjjwnbrmdwt"))) - .withParameters(mapOf("fa", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datalpphcstmrycpana"), - "pxngzzxqbgq", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datarin"), - "vbdozwbskueaf", + SharePointOnlineListResourceDataset model = new SharePointOnlineListResourceDataset().withDescription("nyzpu") + .withStructure("datarmdlkoabfcv") + .withSchema("databwxpmgyqshsasmr") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("uy") + .withParameters(mapOf("uuihkybgkyncyzj", "databxrgrztkyqgu", "vypmwdz", "datadfeemxiurpf"))) + .withParameters(mapOf("llyd", new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datazctbxzjkpifpu"))) - .withAnnotations(Arrays.asList("datambinpxmiwt", "dataqi", "datapvrd")) - .withFolder(new DatasetFolder().withName("d")).withListName("datad"); + .withDefaultValue("dataoxppoqimyhxnpdg"), + "c", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datafdmz"))) + .withAnnotations(Arrays.asList("datavwlxd", "datanst", "dataeurd", "dataxklr")) + .withFolder(new DatasetFolder().withName("siznymwz")) + .withListName("datavjgvjzykn"); model = BinaryData.fromObject(model).toObject(SharePointOnlineListResourceDataset.class); - Assertions.assertEquals("mipvlxtyw", model.description()); - Assertions.assertEquals("rgkwiyoy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("fa").type()); - Assertions.assertEquals("d", model.folder().name()); + Assertions.assertEquals("nyzpu", model.description()); + Assertions.assertEquals("uy", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("llyd").type()); + Assertions.assertEquals("siznymwz", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListSourceTests.java index 8a94072cd83b2..3372a25c42675 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListSourceTests.java @@ -11,16 +11,18 @@ public final class SharePointOnlineListSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SharePointOnlineListSource model = BinaryData.fromString( - "{\"type\":\"SharePointOnlineListSource\",\"query\":\"datacnkojy\",\"httpRequestTimeout\":\"datahbtycfj\",\"sourceRetryCount\":\"dataxiapts\",\"sourceRetryWait\":\"datadoybpwzniekedx\",\"maxConcurrentConnections\":\"dataevip\",\"disableMetricsCollection\":\"datazcxqdrqsuve\",\"\":{\"oxqwcusls\":\"datayb\",\"zwybbewjvyrd\":\"datatzq\",\"bwr\":\"dataw\"}}") + "{\"type\":\"bprecgeregfthgj\",\"query\":\"dataxzdayzfuv\",\"httpRequestTimeout\":\"dataelmimmcc\",\"sourceRetryCount\":\"datanp\",\"sourceRetryWait\":\"datavafczgisegdei\",\"maxConcurrentConnections\":\"datalcdqxownbjkwgkgo\",\"disableMetricsCollection\":\"dataazmw\",\"\":{\"fjnozityqqoswksh\":\"datafejkrcg\",\"vaufxxvs\":\"datagfewflxby\",\"ismonwhazalftta\":\"dataldtmeendocqaptwk\",\"sve\":\"dataz\"}}") .toObject(SharePointOnlineListSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SharePointOnlineListSource model = new SharePointOnlineListSource().withSourceRetryCount("dataxiapts") - .withSourceRetryWait("datadoybpwzniekedx").withMaxConcurrentConnections("dataevip") - .withDisableMetricsCollection("datazcxqdrqsuve").withQuery("datacnkojy") - .withHttpRequestTimeout("datahbtycfj"); + SharePointOnlineListSource model = new SharePointOnlineListSource().withSourceRetryCount("datanp") + .withSourceRetryWait("datavafczgisegdei") + .withMaxConcurrentConnections("datalcdqxownbjkwgkgo") + .withDisableMetricsCollection("dataazmw") + .withQuery("dataxzdayzfuv") + .withHttpRequestTimeout("dataelmimmcc"); model = BinaryData.fromObject(model).toObject(SharePointOnlineListSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifyObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifyObjectDatasetTests.java index c14c402c1c129..f3b1251e592d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifyObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifyObjectDatasetTests.java @@ -19,29 +19,34 @@ public final class ShopifyObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ShopifyObjectDataset model = BinaryData.fromString( - "{\"type\":\"ShopifyObject\",\"typeProperties\":{\"tableName\":\"dataybfmpotal\"},\"description\":\"figrxxtrco\",\"structure\":\"dataqe\",\"schema\":\"dataldmxxbjh\",\"linkedServiceName\":{\"referenceName\":\"pvamsxrwqlwdf\",\"parameters\":{\"bboffgxtae\":\"datarplzeqzv\",\"fcyatbxdwr\":\"dataxt\",\"fbpeigkflvovriq\":\"datayvtkmxvztshnu\"}},\"parameters\":{\"txur\":{\"type\":\"Float\",\"defaultValue\":\"datakqcgzygtdjhtbar\"}},\"annotations\":[\"datayyumhzpst\",\"datacqacvttyh\",\"databilnszyjbuw\"],\"folder\":{\"name\":\"sydsci\"},\"\":{\"l\":\"dataayioxpqgqs\",\"akqsjymcfv\":\"datalefeombodvdgf\",\"nbpkfnxrlncmlzvv\":\"datazceuyuqktck\",\"cjqzrevfwcba\":\"datamesfhqs\"}}") + "{\"type\":\"qmbwcacwaaqa\",\"typeProperties\":{\"tableName\":\"dataefqhxy\"},\"description\":\"okyaxxratqlr\",\"structure\":\"databrcmmdtshzu\",\"schema\":\"dataucznbabowrcyrnmj\",\"linkedServiceName\":{\"referenceName\":\"ko\",\"parameters\":{\"mnaphrskmpeajzzy\":\"datazkkagvej\",\"r\":\"dataavamzmzfntte\",\"pjdr\":\"datatxytja\"}},\"parameters\":{\"vqmdmrac\":{\"type\":\"Array\",\"defaultValue\":\"datakvbfaehjjirvjqx\"},\"yzhcvlavy\":{\"type\":\"Float\",\"defaultValue\":\"datafdralihhsscy\"}},\"annotations\":[\"datandxrmyzvtiojtpd\",\"datau\",\"dataxaxoyj\"],\"folder\":{\"name\":\"twedi\"},\"\":{\"tmca\":\"datakteccxfnat\",\"cnbdq\":\"databqpmfhjik\"}}") .toObject(ShopifyObjectDataset.class); - Assertions.assertEquals("figrxxtrco", model.description()); - Assertions.assertEquals("pvamsxrwqlwdf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("txur").type()); - Assertions.assertEquals("sydsci", model.folder().name()); + Assertions.assertEquals("okyaxxratqlr", model.description()); + Assertions.assertEquals("ko", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("vqmdmrac").type()); + Assertions.assertEquals("twedi", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ShopifyObjectDataset model = new ShopifyObjectDataset().withDescription("figrxxtrco").withStructure("dataqe") - .withSchema("dataldmxxbjh") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("pvamsxrwqlwdf").withParameters( - mapOf("bboffgxtae", "datarplzeqzv", "fcyatbxdwr", "dataxt", "fbpeigkflvovriq", "datayvtkmxvztshnu"))) - .withParameters(mapOf("txur", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datakqcgzygtdjhtbar"))) - .withAnnotations(Arrays.asList("datayyumhzpst", "datacqacvttyh", "databilnszyjbuw")) - .withFolder(new DatasetFolder().withName("sydsci")).withTableName("dataybfmpotal"); + ShopifyObjectDataset model = new ShopifyObjectDataset().withDescription("okyaxxratqlr") + .withStructure("databrcmmdtshzu") + .withSchema("dataucznbabowrcyrnmj") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ko") + .withParameters( + mapOf("mnaphrskmpeajzzy", "datazkkagvej", "r", "dataavamzmzfntte", "pjdr", "datatxytja"))) + .withParameters(mapOf("vqmdmrac", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datakvbfaehjjirvjqx"), + "yzhcvlavy", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datafdralihhsscy"))) + .withAnnotations(Arrays.asList("datandxrmyzvtiojtpd", "datau", "dataxaxoyj")) + .withFolder(new DatasetFolder().withName("twedi")) + .withTableName("dataefqhxy"); model = BinaryData.fromObject(model).toObject(ShopifyObjectDataset.class); - Assertions.assertEquals("figrxxtrco", model.description()); - Assertions.assertEquals("pvamsxrwqlwdf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("txur").type()); - Assertions.assertEquals("sydsci", model.folder().name()); + Assertions.assertEquals("okyaxxratqlr", model.description()); + Assertions.assertEquals("ko", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("vqmdmrac").type()); + Assertions.assertEquals("twedi", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifySourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifySourceTests.java index e199d1365bc3f..afbd0cbe0d936 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifySourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifySourceTests.java @@ -11,15 +11,19 @@ public final class ShopifySourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ShopifySource model = BinaryData.fromString( - "{\"type\":\"ShopifySource\",\"query\":\"datav\",\"queryTimeout\":\"datapdv\",\"additionalColumns\":\"datayelrteunkwypu\",\"sourceRetryCount\":\"datafmsygt\",\"sourceRetryWait\":\"dataqlfdml\",\"maxConcurrentConnections\":\"datazdbrw\",\"disableMetricsCollection\":\"datawft\",\"\":{\"jsfgkwrcbgxypr\":\"dataxwi\",\"izabjb\":\"databpywecz\"}}") + "{\"type\":\"tpriicte\",\"query\":\"datae\",\"queryTimeout\":\"datayguothnucqktu\",\"additionalColumns\":\"datar\",\"sourceRetryCount\":\"datalbahmivtuph\",\"sourceRetryWait\":\"datayi\",\"maxConcurrentConnections\":\"datarfxft\",\"disableMetricsCollection\":\"datamnrziwkcpxg\",\"\":{\"qap\":\"dataou\"}}") .toObject(ShopifySource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ShopifySource model = new ShopifySource().withSourceRetryCount("datafmsygt").withSourceRetryWait("dataqlfdml") - .withMaxConcurrentConnections("datazdbrw").withDisableMetricsCollection("datawft") - .withQueryTimeout("datapdv").withAdditionalColumns("datayelrteunkwypu").withQuery("datav"); + ShopifySource model = new ShopifySource().withSourceRetryCount("datalbahmivtuph") + .withSourceRetryWait("datayi") + .withMaxConcurrentConnections("datarfxft") + .withDisableMetricsCollection("datamnrziwkcpxg") + .withQueryTimeout("datayguothnucqktu") + .withAdditionalColumns("datar") + .withQuery("datae"); model = BinaryData.fromObject(model).toObject(ShopifySource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SkipErrorFileTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SkipErrorFileTests.java index 9b8b5a1f25628..dafc8dbde16b2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SkipErrorFileTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SkipErrorFileTests.java @@ -10,15 +10,15 @@ public final class SkipErrorFileTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SkipErrorFile model - = BinaryData.fromString("{\"fileMissing\":\"dataalh\",\"dataInconsistency\":\"dataneclphmjsqcubyjr\"}") - .toObject(SkipErrorFile.class); + SkipErrorFile model = BinaryData + .fromString("{\"fileMissing\":\"datakrynziudmhed\",\"dataInconsistency\":\"dataygwagvuioxjwztr\"}") + .toObject(SkipErrorFile.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SkipErrorFile model - = new SkipErrorFile().withFileMissing("dataalh").withDataInconsistency("dataneclphmjsqcubyjr"); + = new SkipErrorFile().withFileMissing("datakrynziudmhed").withDataInconsistency("dataygwagvuioxjwztr"); model = BinaryData.fromObject(model).toObject(SkipErrorFile.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTests.java index 9e0bd390ea4df..3777b5d283ef3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTests.java @@ -19,31 +19,34 @@ public final class SnowflakeDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SnowflakeDataset model = BinaryData.fromString( - "{\"type\":\"SnowflakeTable\",\"typeProperties\":{\"schema\":\"dataqjfskjva\",\"table\":\"dataxrwkns\"},\"description\":\"hypbrzwiypz\",\"structure\":\"datahkecebtpgvutb\",\"schema\":\"datasfd\",\"linkedServiceName\":{\"referenceName\":\"wq\",\"parameters\":{\"dgrcifflxqqn\":\"dataowke\",\"ujticwmlf\":\"datagtcuyuwgnyjd\"}},\"parameters\":{\"ufpvvdgnmeiomn\":{\"type\":\"Float\",\"defaultValue\":\"datafmcoxbktuaj\"},\"i\":{\"type\":\"Float\",\"defaultValue\":\"dataaibcfbfyqz\"}},\"annotations\":[\"datafgvmrkmgifmy\",\"databuhdnhhcmtslptbd\",\"dataonhbl\"],\"folder\":{\"name\":\"cnuqfpzjz\"},\"\":{\"mruawqesqsqmiekx\":\"datacwtwtrchk\",\"qchf\":\"datap\",\"cu\":\"datatykkvjjlba\"}}") + "{\"type\":\"azburskuff\",\"typeProperties\":{\"schema\":\"datapncdocloepjz\",\"table\":\"databod\"},\"description\":\"bkvunn\",\"structure\":\"dataoasnzlawow\",\"schema\":\"datanixxwdqzur\",\"linkedServiceName\":{\"referenceName\":\"ukszk\",\"parameters\":{\"vvaeght\":\"datahh\",\"uvwa\":\"dataqonxvnmcyze\",\"shkmqfvbks\":\"dataesswbrnbox\",\"wowrmhmifhfutjy\":\"datasmq\"}},\"parameters\":{\"moygcax\":{\"type\":\"Bool\",\"defaultValue\":\"datatguqa\"},\"vgicxt\":{\"type\":\"SecureString\",\"defaultValue\":\"datadqr\"}},\"annotations\":[\"dataqgnsydksvjfgxgdp\",\"datavjk\",\"datacosc\",\"dataljjhcsgzooefzsdt\"],\"folder\":{\"name\":\"kaqdvwo\"},\"\":{\"tingzic\":\"datavfvhrqxrqgh\"}}") .toObject(SnowflakeDataset.class); - Assertions.assertEquals("hypbrzwiypz", model.description()); - Assertions.assertEquals("wq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("ufpvvdgnmeiomn").type()); - Assertions.assertEquals("cnuqfpzjz", model.folder().name()); + Assertions.assertEquals("bkvunn", model.description()); + Assertions.assertEquals("ukszk", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("moygcax").type()); + Assertions.assertEquals("kaqdvwo", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SnowflakeDataset model = new SnowflakeDataset().withDescription("hypbrzwiypz") - .withStructure("datahkecebtpgvutb").withSchema("datasfd") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("wq") - .withParameters(mapOf("dgrcifflxqqn", "dataowke", "ujticwmlf", "datagtcuyuwgnyjd"))) - .withParameters(mapOf("ufpvvdgnmeiomn", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datafmcoxbktuaj"), "i", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataaibcfbfyqz"))) - .withAnnotations(Arrays.asList("datafgvmrkmgifmy", "databuhdnhhcmtslptbd", "dataonhbl")) - .withFolder(new DatasetFolder().withName("cnuqfpzjz")).withSchemaTypePropertiesSchema("dataqjfskjva") - .withTable("dataxrwkns"); + SnowflakeDataset model = new SnowflakeDataset().withDescription("bkvunn") + .withStructure("dataoasnzlawow") + .withSchema("datanixxwdqzur") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ukszk") + .withParameters(mapOf("vvaeght", "datahh", "uvwa", "dataqonxvnmcyze", "shkmqfvbks", "dataesswbrnbox", + "wowrmhmifhfutjy", "datasmq"))) + .withParameters(mapOf("moygcax", + new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datatguqa"), "vgicxt", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datadqr"))) + .withAnnotations(Arrays.asList("dataqgnsydksvjfgxgdp", "datavjk", "datacosc", "dataljjhcsgzooefzsdt")) + .withFolder(new DatasetFolder().withName("kaqdvwo")) + .withSchemaTypePropertiesSchema("datapncdocloepjz") + .withTable("databod"); model = BinaryData.fromObject(model).toObject(SnowflakeDataset.class); - Assertions.assertEquals("hypbrzwiypz", model.description()); - Assertions.assertEquals("wq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("ufpvvdgnmeiomn").type()); - Assertions.assertEquals("cnuqfpzjz", model.folder().name()); + Assertions.assertEquals("bkvunn", model.description()); + Assertions.assertEquals("ukszk", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("moygcax").type()); + Assertions.assertEquals("kaqdvwo", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTypePropertiesTests.java index 975bfad630c43..c048b72b61e34 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class SnowflakeDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SnowflakeDatasetTypeProperties model - = BinaryData.fromString("{\"schema\":\"datayqokbgumuejxxpx\",\"table\":\"datazch\"}") + = BinaryData.fromString("{\"schema\":\"datayga\",\"table\":\"datahpwmdkyfgye\"}") .toObject(SnowflakeDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SnowflakeDatasetTypeProperties model - = new SnowflakeDatasetTypeProperties().withSchema("datayqokbgumuejxxpx").withTable("datazch"); + = new SnowflakeDatasetTypeProperties().withSchema("datayga").withTable("datahpwmdkyfgye"); model = BinaryData.fromObject(model).toObject(SnowflakeDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeExportCopyCommandTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeExportCopyCommandTests.java index 9037e93320e6c..37faa27fba00b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeExportCopyCommandTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeExportCopyCommandTests.java @@ -13,15 +13,16 @@ public final class SnowflakeExportCopyCommandTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SnowflakeExportCopyCommand model = BinaryData.fromString( - "{\"type\":\"SnowflakeExportCopyCommand\",\"additionalCopyOptions\":{\"eas\":\"datakmgp\",\"wdosfgbvsoz\":\"datadqpwhp\",\"tlb\":\"datafnpwxcjci\",\"oyl\":\"datauemqetmotuvhhed\"},\"additionalFormatOptions\":{\"wcca\":\"datal\"},\"\":{\"lxqlzzkbx\":\"databdvsorvhbygw\",\"lnkkghlexvqhbn\":\"datancggamxbtqizydai\",\"ltbpqjfoujeiagny\":\"datamokzx\",\"jssay\":\"datae\"}}") + "{\"type\":\"mfom\",\"additionalCopyOptions\":{\"tabgejopvegmtgoe\":\"datayphdkxw\",\"bdjzcfdpxbwqgkfx\":\"datayhojdgwezcrss\",\"yevvuddnwj\":\"datadtj\",\"flemxbmaiiv\":\"databc\"},\"additionalFormatOptions\":{\"o\":\"datatzbkevh\",\"sis\":\"datajpumpqlugzydylf\"},\"\":{\"htkprbmcazrfh\":\"datavfnxxkmrf\",\"akjxduhqilv\":\"datajwik\",\"lfrfardjqwd\":\"datajctp\"}}") .toObject(SnowflakeExportCopyCommand.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SnowflakeExportCopyCommand model = new SnowflakeExportCopyCommand().withAdditionalCopyOptions( - mapOf("eas", "datakmgp", "wdosfgbvsoz", "datadqpwhp", "tlb", "datafnpwxcjci", "oyl", "datauemqetmotuvhhed")) - .withAdditionalFormatOptions(mapOf("wcca", "datal")); + SnowflakeExportCopyCommand model = new SnowflakeExportCopyCommand() + .withAdditionalCopyOptions(mapOf("tabgejopvegmtgoe", "datayphdkxw", "bdjzcfdpxbwqgkfx", "datayhojdgwezcrss", + "yevvuddnwj", "datadtj", "flemxbmaiiv", "databc")) + .withAdditionalFormatOptions(mapOf("o", "datatzbkevh", "sis", "datajpumpqlugzydylf")); model = BinaryData.fromObject(model).toObject(SnowflakeExportCopyCommand.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeImportCopyCommandTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeImportCopyCommandTests.java index 7be88e45574e7..067238d48f7fc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeImportCopyCommandTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeImportCopyCommandTests.java @@ -13,16 +13,17 @@ public final class SnowflakeImportCopyCommandTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SnowflakeImportCopyCommand model = BinaryData.fromString( - "{\"type\":\"SnowflakeImportCopyCommand\",\"additionalCopyOptions\":{\"lvktjbmce\":\"databe\",\"ukdawgz\":\"datal\",\"vygx\":\"databwhjvonu\"},\"additionalFormatOptions\":{\"jcjvdajxebm\":\"datatjoxocothsg\",\"nd\":\"dataiyrctfaabkukra\",\"dhjdwfnbiyxqr\":\"datahwdicntqsrhacjsb\"},\"\":{\"yovljtrmlxr\":\"dataffk\",\"wbqukjithxlpvp\":\"datallugnxmbwdkzi\",\"shhcktbfmtbprt\":\"datavceiwc\"}}") + "{\"type\":\"lb\",\"additionalCopyOptions\":{\"ezgbqi\":\"datanfqqlg\",\"aceokrarzkza\":\"datawe\",\"zomtzpukm\":\"dataznvyeuxd\"},\"additionalFormatOptions\":{\"nlfzq\":\"datalzb\",\"wzesejdcpcpeu\":\"datamxuo\",\"jrptltytbqhejhn\":\"databofzmvtwyjc\"},\"\":{\"vygquiwcfqzo\":\"datafmeeuuurxgslxz\",\"irqkskyyam\":\"datagwwdevqmtejhvggy\",\"lmibvczdj\":\"datamimsyiwcdwqjbrrx\"}}") .toObject(SnowflakeImportCopyCommand.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SnowflakeImportCopyCommand model = new SnowflakeImportCopyCommand() - .withAdditionalCopyOptions(mapOf("lvktjbmce", "databe", "ukdawgz", "datal", "vygx", "databwhjvonu")) - .withAdditionalFormatOptions(mapOf("jcjvdajxebm", "datatjoxocothsg", "nd", "dataiyrctfaabkukra", - "dhjdwfnbiyxqr", "datahwdicntqsrhacjsb")); + .withAdditionalCopyOptions( + mapOf("ezgbqi", "datanfqqlg", "aceokrarzkza", "datawe", "zomtzpukm", "dataznvyeuxd")) + .withAdditionalFormatOptions( + mapOf("nlfzq", "datalzb", "wzesejdcpcpeu", "datamxuo", "jrptltytbqhejhn", "databofzmvtwyjc")); model = BinaryData.fromObject(model).toObject(SnowflakeImportCopyCommand.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSinkTests.java index 3ee66d7cf7ed2..4b8f13e7eca78 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSinkTests.java @@ -14,21 +14,24 @@ public final class SnowflakeSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SnowflakeSink model = BinaryData.fromString( - "{\"type\":\"SnowflakeSink\",\"preCopyScript\":\"datastofdedlmfwabf\",\"importSettings\":{\"type\":\"SnowflakeImportCopyCommand\",\"additionalCopyOptions\":{\"lwh\":\"databiwxmcsxidaz\",\"fperheiplzms\":\"datayikhdcilinbuok\",\"u\":\"datahqrdvqvalo\",\"fjgklmyomav\":\"datawoigofumbpmzed\"},\"additionalFormatOptions\":{\"tqvjn\":\"dataaoaixipcwyinfy\",\"yaeyyiwraowpdm\":\"dataemqwut\",\"dmwcxvcron\":\"dataxugd\",\"z\":\"dataidhwoyznjddsh\"},\"\":{\"puotniqzqmpgvyy\":\"datavxv\",\"w\":\"datajwwbrhjhcwcfftsz\"}},\"writeBatchSize\":\"dataighjgvhgyeoikxjp\",\"writeBatchTimeout\":\"dataggpsaqfn\",\"sinkRetryCount\":\"datauwawmbusleg\",\"sinkRetryWait\":\"dataqzkzworuhhvdeodc\",\"maxConcurrentConnections\":\"datahfxb\",\"disableMetricsCollection\":\"datapn\",\"\":{\"lhwutyuvuzqtrf\":\"dataylgpznb\"}}") + "{\"type\":\"kbr\",\"preCopyScript\":\"datau\",\"importSettings\":{\"type\":\"ooytilsmise\",\"additionalCopyOptions\":{\"lvwtslzblgvezhim\":\"dataskynkkezkv\",\"dzkovt\":\"dataiyqwlxkyoysyutnr\"},\"additionalFormatOptions\":{\"xmyblway\":\"datacaprxh\",\"wyfy\":\"datapaggkrumpu\",\"jdrvjktvpy\":\"datahcboipxhghicwv\"},\"\":{\"qu\":\"datajmm\",\"zh\":\"datacrzgluqacebcn\",\"wkgcpfz\":\"dataaumjuruspflvgl\"}},\"writeBatchSize\":\"datawvxwqqmvsrbmf\",\"writeBatchTimeout\":\"datatm\",\"sinkRetryCount\":\"dataob\",\"sinkRetryWait\":\"dataoyownygbralc\",\"maxConcurrentConnections\":\"dataebyczwegt\",\"disableMetricsCollection\":\"dataprazqxxvksqi\",\"\":{\"cmegoll\":\"datamidvturdgl\",\"pryhz\":\"datal\"}}") .toObject(SnowflakeSink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SnowflakeSink model = new SnowflakeSink().withWriteBatchSize("dataighjgvhgyeoikxjp") - .withWriteBatchTimeout("dataggpsaqfn").withSinkRetryCount("datauwawmbusleg") - .withSinkRetryWait("dataqzkzworuhhvdeodc").withMaxConcurrentConnections("datahfxb") - .withDisableMetricsCollection("datapn").withPreCopyScript("datastofdedlmfwabf") + SnowflakeSink model = new SnowflakeSink().withWriteBatchSize("datawvxwqqmvsrbmf") + .withWriteBatchTimeout("datatm") + .withSinkRetryCount("dataob") + .withSinkRetryWait("dataoyownygbralc") + .withMaxConcurrentConnections("dataebyczwegt") + .withDisableMetricsCollection("dataprazqxxvksqi") + .withPreCopyScript("datau") .withImportSettings(new SnowflakeImportCopyCommand() - .withAdditionalCopyOptions(mapOf("lwh", "databiwxmcsxidaz", "fperheiplzms", "datayikhdcilinbuok", "u", - "datahqrdvqvalo", "fjgklmyomav", "datawoigofumbpmzed")) - .withAdditionalFormatOptions(mapOf("tqvjn", "dataaoaixipcwyinfy", "yaeyyiwraowpdm", "dataemqwut", - "dmwcxvcron", "dataxugd", "z", "dataidhwoyznjddsh"))); + .withAdditionalCopyOptions( + mapOf("lvwtslzblgvezhim", "dataskynkkezkv", "dzkovt", "dataiyqwlxkyoysyutnr")) + .withAdditionalFormatOptions( + mapOf("xmyblway", "datacaprxh", "wyfy", "datapaggkrumpu", "jdrvjktvpy", "datahcboipxhghicwv"))); model = BinaryData.fromObject(model).toObject(SnowflakeSink.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSourceTests.java index 04ba1162faa29..bcfaac92e47be 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSourceTests.java @@ -14,21 +14,21 @@ public final class SnowflakeSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SnowflakeSource model = BinaryData.fromString( - "{\"type\":\"SnowflakeSource\",\"query\":\"datagpcrvv\",\"exportSettings\":{\"type\":\"SnowflakeExportCopyCommand\",\"additionalCopyOptions\":{\"bwpai\":\"datalckpznove\",\"mrpdxnr\":\"dataikzysdharswh\",\"dfplk\":\"datavtvtyqlthn\",\"ltsnqjcmkpxb\":\"datadiehrajbatgmxk\"},\"additionalFormatOptions\":{\"qmyowddhtwaxob\":\"datarfkwc\",\"vsclwpsteuvjdnh\":\"datazatqocvrdj\"},\"\":{\"bfomo\":\"datavymvnlaehitxo\",\"rhhbvbqxtktkeuap\":\"datacyn\",\"bhptraljcq\":\"datamoofb\"}},\"sourceRetryCount\":\"datapmathiydmkyvsx\",\"sourceRetryWait\":\"dataivghajpddgfozn\",\"maxConcurrentConnections\":\"datamkpjoesozcuhunm\",\"disableMetricsCollection\":\"databmwptdrrruy\",\"\":{\"ckxhmxzsmpoiuta\":\"dataiumuxnabivg\",\"vo\":\"datatvpbiojncgjog\"}}") + "{\"type\":\"tjjewezcknp\",\"query\":\"datartldijgrbi\",\"exportSettings\":{\"type\":\"natbecuh\",\"additionalCopyOptions\":{\"wgbajmgctwamj\":\"datajtw\",\"pedmkxtdy\":\"datawvmugistnyzmhh\",\"m\":\"datazgfqtgf\",\"jsrcqpxaajt\":\"datacvblrvkqxhk\"},\"additionalFormatOptions\":{\"azmxssbv\":\"dataqtuztzmubxngs\"},\"\":{\"aousj\":\"datakkewg\"}},\"sourceRetryCount\":\"datavahqjdi\",\"sourceRetryWait\":\"dataoldwahe\",\"maxConcurrentConnections\":\"datadicxolmm\",\"disableMetricsCollection\":\"datad\",\"\":{\"vogrripopzyde\":\"dataoiwbmjqvmpzc\",\"rgwy\":\"datapwwkdmsnezdumjqd\",\"sveaerg\":\"databqmelmqkbepie\",\"ksknst\":\"datajbrnlbfnuppw\"}}") .toObject(SnowflakeSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SnowflakeSource model - = new SnowflakeSource().withSourceRetryCount("datapmathiydmkyvsx").withSourceRetryWait("dataivghajpddgfozn") - .withMaxConcurrentConnections("datamkpjoesozcuhunm").withDisableMetricsCollection("databmwptdrrruy") - .withQuery("datagpcrvv").withExportSettings( - new SnowflakeExportCopyCommand() - .withAdditionalCopyOptions(mapOf("bwpai", "datalckpznove", "mrpdxnr", "dataikzysdharswh", - "dfplk", "datavtvtyqlthn", "ltsnqjcmkpxb", "datadiehrajbatgmxk")) - .withAdditionalFormatOptions( - mapOf("qmyowddhtwaxob", "datarfkwc", "vsclwpsteuvjdnh", "datazatqocvrdj"))); + SnowflakeSource model = new SnowflakeSource().withSourceRetryCount("datavahqjdi") + .withSourceRetryWait("dataoldwahe") + .withMaxConcurrentConnections("datadicxolmm") + .withDisableMetricsCollection("datad") + .withQuery("datartldijgrbi") + .withExportSettings(new SnowflakeExportCopyCommand() + .withAdditionalCopyOptions(mapOf("wgbajmgctwamj", "datajtw", "pedmkxtdy", "datawvmugistnyzmhh", "m", + "datazgfqtgf", "jsrcqpxaajt", "datacvblrvkqxhk")) + .withAdditionalFormatOptions(mapOf("azmxssbv", "dataqtuztzmubxngs"))); model = BinaryData.fromObject(model).toObject(SnowflakeSource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2DatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2DatasetTests.java index 37e7d601d0993..8328e5a2b521d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2DatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2DatasetTests.java @@ -19,35 +19,35 @@ public final class SnowflakeV2DatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SnowflakeV2Dataset model = BinaryData.fromString( - "{\"type\":\"SnowflakeV2Table\",\"typeProperties\":{\"schema\":\"datauzginrkjkn\",\"table\":\"datafsu\"},\"description\":\"ybhozlsbufnhb\",\"structure\":\"datantpoe\",\"schema\":\"datatrsljzmzuic\",\"linkedServiceName\":{\"referenceName\":\"ggsxznb\",\"parameters\":{\"njl\":\"datakqbylb\",\"nitvkyahfoyfzo\":\"dataicqomanefwl\",\"cjukfalwc\":\"datadyaepre\",\"cayvqbeqpu\":\"dataec\"}},\"parameters\":{\"hicrjriy\":{\"type\":\"Int\",\"defaultValue\":\"datayl\"},\"nqocybrhxgiknrlu\":{\"type\":\"Array\",\"defaultValue\":\"dataydrlqllbof\"},\"xdgpksgo\":{\"type\":\"SecureString\",\"defaultValue\":\"dataiqbroqjfeamzk\"}},\"annotations\":[\"datanvnj\",\"dataldrqqv\",\"dataugusqlx\"],\"folder\":{\"name\":\"dthfwlnvqa\"},\"\":{\"ggpaiheaeyzwloq\":\"datafisblnlmpuyyp\"}}") + "{\"type\":\"ixfzau\",\"typeProperties\":{\"schema\":\"datapuqyrpubbkhcidc\",\"table\":\"dataoolkugzowg\"},\"description\":\"blnagjnpahzhpqsc\",\"structure\":\"dataileqjzrijebmuio\",\"schema\":\"datamndwohoeashuxfvb\",\"linkedServiceName\":{\"referenceName\":\"imzwynsmmphvkye\",\"parameters\":{\"ofzrk\":\"dataeyuoyjmjwqdslqr\",\"symnrtvqmima\":\"dataztpyaymhxlnmwa\",\"vzxx\":\"datayotpcvpahhpkvyq\",\"tominrufq\":\"datand\"}},\"parameters\":{\"ewk\":{\"type\":\"Float\",\"defaultValue\":\"dataasfmhbx\"},\"vuzcc\":{\"type\":\"Array\",\"defaultValue\":\"datat\"},\"hlc\":{\"type\":\"Float\",\"defaultValue\":\"datarybytcaqpj\"}},\"annotations\":[\"datavxyyhhsisz\",\"dataqfrpanteqiw\"],\"folder\":{\"name\":\"kaamim\"},\"\":{\"rjgyindexijov\":\"datacxysjdfxvksi\"}}") .toObject(SnowflakeV2Dataset.class); - Assertions.assertEquals("ybhozlsbufnhb", model.description()); - Assertions.assertEquals("ggsxznb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("hicrjriy").type()); - Assertions.assertEquals("dthfwlnvqa", model.folder().name()); + Assertions.assertEquals("blnagjnpahzhpqsc", model.description()); + Assertions.assertEquals("imzwynsmmphvkye", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("ewk").type()); + Assertions.assertEquals("kaamim", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SnowflakeV2Dataset model = new SnowflakeV2Dataset().withDescription("ybhozlsbufnhb").withStructure("datantpoe") - .withSchema("datatrsljzmzuic") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ggsxznb") - .withParameters(mapOf("njl", "datakqbylb", "nitvkyahfoyfzo", "dataicqomanefwl", "cjukfalwc", - "datadyaepre", "cayvqbeqpu", "dataec"))) - .withParameters(mapOf("hicrjriy", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datayl"), "nqocybrhxgiknrlu", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataydrlqllbof"), - "xdgpksgo", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("dataiqbroqjfeamzk"))) - .withAnnotations(Arrays.asList("datanvnj", "dataldrqqv", "dataugusqlx")) - .withFolder(new DatasetFolder().withName("dthfwlnvqa")).withSchemaTypePropertiesSchema("datauzginrkjkn") - .withTable("datafsu"); + SnowflakeV2Dataset model = new SnowflakeV2Dataset().withDescription("blnagjnpahzhpqsc") + .withStructure("dataileqjzrijebmuio") + .withSchema("datamndwohoeashuxfvb") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("imzwynsmmphvkye") + .withParameters(mapOf("ofzrk", "dataeyuoyjmjwqdslqr", "symnrtvqmima", "dataztpyaymhxlnmwa", "vzxx", + "datayotpcvpahhpkvyq", "tominrufq", "datand"))) + .withParameters(mapOf("ewk", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataasfmhbx"), "vuzcc", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datat"), "hlc", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datarybytcaqpj"))) + .withAnnotations(Arrays.asList("datavxyyhhsisz", "dataqfrpanteqiw")) + .withFolder(new DatasetFolder().withName("kaamim")) + .withSchemaTypePropertiesSchema("datapuqyrpubbkhcidc") + .withTable("dataoolkugzowg"); model = BinaryData.fromObject(model).toObject(SnowflakeV2Dataset.class); - Assertions.assertEquals("ybhozlsbufnhb", model.description()); - Assertions.assertEquals("ggsxznb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("hicrjriy").type()); - Assertions.assertEquals("dthfwlnvqa", model.folder().name()); + Assertions.assertEquals("blnagjnpahzhpqsc", model.description()); + Assertions.assertEquals("imzwynsmmphvkye", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("ewk").type()); + Assertions.assertEquals("kaamim", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SinkTests.java index 56d3797d5d127..e0e7e745b100f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SinkTests.java @@ -14,18 +14,23 @@ public final class SnowflakeV2SinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SnowflakeV2Sink model = BinaryData.fromString( - "{\"type\":\"SnowflakeV2Sink\",\"preCopyScript\":\"datauxwuepjcug\",\"importSettings\":{\"type\":\"SnowflakeImportCopyCommand\",\"additionalCopyOptions\":{\"hqkie\":\"dataqbwgmznvlwcn\"},\"additionalFormatOptions\":{\"xetxzcjrb\":\"dataubwz\",\"kbeoofox\":\"dataqcwnbx\"},\"\":{\"prjcfyvkkgxil\":\"dataunewkssxpnhhl\",\"kgwdng\":\"datalonzifdfs\",\"xdvrajoghgxgzb\":\"datadnrtydhqkariatxh\"}},\"writeBatchSize\":\"dataasgam\",\"writeBatchTimeout\":\"dataetxnsgcwad\",\"sinkRetryCount\":\"dataqbageltffqalmcq\",\"sinkRetryWait\":\"datapzwwtcwbgmx\",\"maxConcurrentConnections\":\"dataynslcty\",\"disableMetricsCollection\":\"datai\",\"\":{\"tarxtdgpmhzll\":\"datamhhqw\",\"zsdnfp\":\"datah\",\"ycyfubgnml\":\"datakyks\",\"qvcutwngfd\":\"datajdaxiohulvp\"}}") + "{\"type\":\"vg\",\"preCopyScript\":\"dataefezjyfaqdwf\",\"importSettings\":{\"type\":\"jncaqgt\",\"additionalCopyOptions\":{\"qziljrsycujnszn\":\"dataalxxgela\",\"pixuyyqsonfxsf\":\"dataskwjjupukh\",\"ytedspkduhz\":\"dataedjnxicufxt\",\"ttesqpjcuuy\":\"datavbgcf\"},\"additionalFormatOptions\":{\"m\":\"dataind\"},\"\":{\"agagmgullnash\":\"datahtlopbnsbjzrnj\",\"betzcdbqzw\":\"dataoxrma\",\"wpllojdccd\":\"datatakbvaqguaubm\",\"jrcrbkw\":\"dataewtddigmmjve\"}},\"writeBatchSize\":\"datau\",\"writeBatchTimeout\":\"datamjcemkcwc\",\"sinkRetryCount\":\"datahqjpiafzwhrgm\",\"sinkRetryWait\":\"datatroafzrqmog\",\"maxConcurrentConnections\":\"datajrryzbqpksoa\",\"disableMetricsCollection\":\"datazuhuojjbkyddsdg\",\"\":{\"shnksupchzspgby\":\"datacqkbetemam\",\"vuhgchtaea\":\"dataumxyqhctr\",\"zuk\":\"databqkx\"}}") .toObject(SnowflakeV2Sink.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SnowflakeV2Sink model = new SnowflakeV2Sink().withWriteBatchSize("dataasgam") - .withWriteBatchTimeout("dataetxnsgcwad").withSinkRetryCount("dataqbageltffqalmcq") - .withSinkRetryWait("datapzwwtcwbgmx").withMaxConcurrentConnections("dataynslcty") - .withDisableMetricsCollection("datai").withPreCopyScript("datauxwuepjcug").withImportSettings( - new SnowflakeImportCopyCommand().withAdditionalCopyOptions(mapOf("hqkie", "dataqbwgmznvlwcn")) - .withAdditionalFormatOptions(mapOf("xetxzcjrb", "dataubwz", "kbeoofox", "dataqcwnbx"))); + SnowflakeV2Sink model = new SnowflakeV2Sink().withWriteBatchSize("datau") + .withWriteBatchTimeout("datamjcemkcwc") + .withSinkRetryCount("datahqjpiafzwhrgm") + .withSinkRetryWait("datatroafzrqmog") + .withMaxConcurrentConnections("datajrryzbqpksoa") + .withDisableMetricsCollection("datazuhuojjbkyddsdg") + .withPreCopyScript("dataefezjyfaqdwf") + .withImportSettings(new SnowflakeImportCopyCommand() + .withAdditionalCopyOptions(mapOf("qziljrsycujnszn", "dataalxxgela", "pixuyyqsonfxsf", "dataskwjjupukh", + "ytedspkduhz", "dataedjnxicufxt", "ttesqpjcuuy", "datavbgcf")) + .withAdditionalFormatOptions(mapOf("m", "dataind"))); model = BinaryData.fromObject(model).toObject(SnowflakeV2Sink.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SourceTests.java index 42ff8f322dc71..6569676d430bc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SourceTests.java @@ -14,19 +14,22 @@ public final class SnowflakeV2SourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SnowflakeV2Source model = BinaryData.fromString( - "{\"type\":\"SnowflakeV2Source\",\"query\":\"datav\",\"exportSettings\":{\"type\":\"SnowflakeExportCopyCommand\",\"additionalCopyOptions\":{\"qlshg\":\"dataexwhoscinpmvcvnm\",\"elazvcfhiayro\":\"dataym\"},\"additionalFormatOptions\":{\"wzttkhyrwdsnpuoa\":\"dataezlqwbgly\",\"pizybpjypqatkzgh\":\"datar\",\"wrbmxw\":\"datac\"},\"\":{\"wffjiezbmhsqyj\":\"datakf\",\"ntghjmmjmmjnxh\":\"datawbzhafcoayuq\"}},\"sourceRetryCount\":\"datajtsemnidbaykvlrs\",\"sourceRetryWait\":\"dataniocyo\",\"maxConcurrentConnections\":\"dataimbchi\",\"disableMetricsCollection\":\"datawaffsjqn\",\"\":{\"vaagazryyjjwggp\":\"dataybugojzcargsxmaw\",\"csjw\":\"datadugwddob\",\"hzugwkqnmh\":\"dataxedzmmcgqifh\"}}") + "{\"type\":\"hvupbiouuqo\",\"query\":\"dataw\",\"exportSettings\":{\"type\":\"cenjctcxamuplxks\",\"additionalCopyOptions\":{\"cxctnijxyczzwh\":\"datadcawazqldakbi\"},\"additionalFormatOptions\":{\"lnuhocbbeoxoewp\":\"datadnqiavoyhno\",\"yuasstokzhmyayb\":\"datatzofcurnhujcu\"},\"\":{\"s\":\"datayubytslfmajswrf\",\"losl\":\"datagvkqz\",\"uxcffbsnlvissyzw\":\"dataufnhejualug\",\"mesoxjkpwbgfhj\":\"datazdyzufgnnsoj\"}},\"sourceRetryCount\":\"dataxtwsreadghlokvis\",\"sourceRetryWait\":\"datamheimsioonn\",\"maxConcurrentConnections\":\"datagrtkeg\",\"disableMetricsCollection\":\"datavwff\",\"\":{\"kksdjkanizdcjx\":\"datauxpyveavstzbz\",\"hivfosbrqeywhlq\":\"datazpmwxvfrmvtwwb\"}}") .toObject(SnowflakeV2Source.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SnowflakeV2Source model = new SnowflakeV2Source().withSourceRetryCount("datajtsemnidbaykvlrs") - .withSourceRetryWait("dataniocyo").withMaxConcurrentConnections("dataimbchi") - .withDisableMetricsCollection("datawaffsjqn").withQuery("datav") - .withExportSettings(new SnowflakeExportCopyCommand() - .withAdditionalCopyOptions(mapOf("qlshg", "dataexwhoscinpmvcvnm", "elazvcfhiayro", "dataym")) - .withAdditionalFormatOptions( - mapOf("wzttkhyrwdsnpuoa", "dataezlqwbgly", "pizybpjypqatkzgh", "datar", "wrbmxw", "datac"))); + SnowflakeV2Source model + = new SnowflakeV2Source().withSourceRetryCount("dataxtwsreadghlokvis") + .withSourceRetryWait("datamheimsioonn") + .withMaxConcurrentConnections("datagrtkeg") + .withDisableMetricsCollection("datavwff") + .withQuery("dataw") + .withExportSettings(new SnowflakeExportCopyCommand() + .withAdditionalCopyOptions(mapOf("cxctnijxyczzwh", "datadcawazqldakbi")) + .withAdditionalFormatOptions( + mapOf("lnuhocbbeoxoewp", "datadnqiavoyhno", "yuasstokzhmyayb", "datatzofcurnhujcu"))); model = BinaryData.fromObject(model).toObject(SnowflakeV2Source.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkConfigurationParametrizationReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkConfigurationParametrizationReferenceTests.java index 9703d45fa93ed..a1388c4e2c4b7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkConfigurationParametrizationReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkConfigurationParametrizationReferenceTests.java @@ -12,16 +12,17 @@ public final class SparkConfigurationParametrizationReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SparkConfigurationParametrizationReference model - = BinaryData.fromString("{\"type\":\"SparkConfigurationReference\",\"referenceName\":\"datacry\"}") - .toObject(SparkConfigurationParametrizationReference.class); + SparkConfigurationParametrizationReference model = BinaryData + .fromString("{\"type\":\"SparkConfigurationReference\",\"referenceName\":\"datansqtrtcacdomz\"}") + .toObject(SparkConfigurationParametrizationReference.class); Assertions.assertEquals(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE, model.type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SparkConfigurationParametrizationReference model = new SparkConfigurationParametrizationReference() - .withType(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE).withReferenceName("datacry"); + .withType(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE) + .withReferenceName("datansqtrtcacdomz"); model = BinaryData.fromObject(model).toObject(SparkConfigurationParametrizationReference.class); Assertions.assertEquals(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE, model.type()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkDatasetTypePropertiesTests.java index e1c30dfb381f9..277660af4f239 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkDatasetTypePropertiesTests.java @@ -11,15 +11,15 @@ public final class SparkDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SparkDatasetTypeProperties model = BinaryData - .fromString( - "{\"tableName\":\"datau\",\"table\":\"dataodincfbaoboiahk\",\"schema\":\"datasvaxmksaxyeedvp\"}") + .fromString("{\"tableName\":\"dataxwhxrztdacrqcwkk\",\"table\":\"dataysyajmm\",\"schema\":\"dataupdc\"}") .toObject(SparkDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SparkDatasetTypeProperties model = new SparkDatasetTypeProperties().withTableName("datau") - .withTable("dataodincfbaoboiahk").withSchema("datasvaxmksaxyeedvp"); + SparkDatasetTypeProperties model = new SparkDatasetTypeProperties().withTableName("dataxwhxrztdacrqcwkk") + .withTable("dataysyajmm") + .withSchema("dataupdc"); model = BinaryData.fromObject(model).toObject(SparkDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkObjectDatasetTests.java index 1da32014da4ec..2a5e4c6db7821 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkObjectDatasetTests.java @@ -19,33 +19,37 @@ public final class SparkObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SparkObjectDataset model = BinaryData.fromString( - "{\"type\":\"SparkObject\",\"typeProperties\":{\"tableName\":\"datandmtqvmkmzvag\",\"table\":\"datafblsxyfqgtodg\",\"schema\":\"datalefmizdcsr\"},\"description\":\"bnasgfyxhsxcg\",\"structure\":\"datam\",\"schema\":\"datapqcnxs\",\"linkedServiceName\":{\"referenceName\":\"ehojvmazu\",\"parameters\":{\"hpdnc\":\"datapiuu\",\"h\":\"datakqrgiv\"}},\"parameters\":{\"uyrgcaygumqeo\":{\"type\":\"Bool\",\"defaultValue\":\"datalyhbjfnmmibgwc\"},\"sawha\":{\"type\":\"Float\",\"defaultValue\":\"datareud\"},\"zmfk\":{\"type\":\"Float\",\"defaultValue\":\"datas\"},\"k\":{\"type\":\"Int\",\"defaultValue\":\"datag\"}},\"annotations\":[\"datakcge\",\"datanubr\"],\"folder\":{\"name\":\"fkxnwt\"},\"\":{\"iwap\":\"dataoeqcrjvcjskqsfn\",\"nh\":\"dataunhdikatzmtuv\",\"ibxl\":\"datatjk\",\"u\":\"datazlvkcm\"}}") + "{\"type\":\"brhhvipgt\",\"typeProperties\":{\"tableName\":\"datarchmyuc\",\"table\":\"datamwyvhdiyp\",\"schema\":\"dataqqq\"},\"description\":\"aaoylwhfmkbwe\",\"structure\":\"datagypjixdmobadydw\",\"schema\":\"datauwdvclsxdqdc\",\"linkedServiceName\":{\"referenceName\":\"nzi\",\"parameters\":{\"gd\":\"datargsrwxxqkwargc\"}},\"parameters\":{\"gflhdhoxur\":{\"type\":\"Int\",\"defaultValue\":\"dataiqexqwqykmv\"},\"vmbgyvxhfmu\":{\"type\":\"Float\",\"defaultValue\":\"datankvthwta\"},\"ucqico\":{\"type\":\"Object\",\"defaultValue\":\"dataz\"},\"utpdwneec\":{\"type\":\"Int\",\"defaultValue\":\"datavbeqzjdwx\"}},\"annotations\":[\"datalxug\",\"datarwvn\",\"datafaofkvfruxz\"],\"folder\":{\"name\":\"vhgykz\"},\"\":{\"gubqkfnoxhvoyj\":\"datatvymdqaymqmyrn\",\"xwejuguvnx\":\"datagfkrqsjrvpakxrde\",\"yfenrozoijoxcbpk\":\"dataohpzurnzoytkbea\",\"pan\":\"datawseacbtaxd\"}}") .toObject(SparkObjectDataset.class); - Assertions.assertEquals("bnasgfyxhsxcg", model.description()); - Assertions.assertEquals("ehojvmazu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("uyrgcaygumqeo").type()); - Assertions.assertEquals("fkxnwt", model.folder().name()); + Assertions.assertEquals("aaoylwhfmkbwe", model.description()); + Assertions.assertEquals("nzi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("gflhdhoxur").type()); + Assertions.assertEquals("vhgykz", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SparkObjectDataset model - = new SparkObjectDataset().withDescription("bnasgfyxhsxcg").withStructure("datam").withSchema("datapqcnxs") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ehojvmazu") - .withParameters(mapOf("hpdnc", "datapiuu", "h", "datakqrgiv"))) - .withParameters(mapOf("uyrgcaygumqeo", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datalyhbjfnmmibgwc"), - "sawha", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datareud"), - "zmfk", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datas"), "k", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datag"))) - .withAnnotations(Arrays.asList("datakcge", "datanubr")) - .withFolder(new DatasetFolder().withName("fkxnwt")).withTableName("datandmtqvmkmzvag") - .withTable("datafblsxyfqgtodg").withSchemaTypePropertiesSchema("datalefmizdcsr"); + SparkObjectDataset model = new SparkObjectDataset().withDescription("aaoylwhfmkbwe") + .withStructure("datagypjixdmobadydw") + .withSchema("datauwdvclsxdqdc") + .withLinkedServiceName( + new LinkedServiceReference().withReferenceName("nzi").withParameters(mapOf("gd", "datargsrwxxqkwargc"))) + .withParameters(mapOf("gflhdhoxur", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataiqexqwqykmv"), + "vmbgyvxhfmu", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datankvthwta"), "ucqico", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataz"), "utpdwneec", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datavbeqzjdwx"))) + .withAnnotations(Arrays.asList("datalxug", "datarwvn", "datafaofkvfruxz")) + .withFolder(new DatasetFolder().withName("vhgykz")) + .withTableName("datarchmyuc") + .withTable("datamwyvhdiyp") + .withSchemaTypePropertiesSchema("dataqqq"); model = BinaryData.fromObject(model).toObject(SparkObjectDataset.class); - Assertions.assertEquals("bnasgfyxhsxcg", model.description()); - Assertions.assertEquals("ehojvmazu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("uyrgcaygumqeo").type()); - Assertions.assertEquals("fkxnwt", model.folder().name()); + Assertions.assertEquals("aaoylwhfmkbwe", model.description()); + Assertions.assertEquals("nzi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("gflhdhoxur").type()); + Assertions.assertEquals("vhgykz", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkSourceTests.java index 9f466b1ce7e23..09d7b5c791762 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkSourceTests.java @@ -11,15 +11,19 @@ public final class SparkSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SparkSource model = BinaryData.fromString( - "{\"type\":\"SparkSource\",\"query\":\"datayddijfkktigisee\",\"queryTimeout\":\"datazrerxyds\",\"additionalColumns\":\"datapn\",\"sourceRetryCount\":\"dataarkjt\",\"sourceRetryWait\":\"dataaczkjkfakgrwt\",\"maxConcurrentConnections\":\"datasfanmjmpce\",\"disableMetricsCollection\":\"datamfdylvpyhhgqysz\",\"\":{\"jekolnylpyk\":\"datajzhvej\",\"aouyaanfxai\":\"datapa\"}}") + "{\"type\":\"d\",\"query\":\"datartxggmp\",\"queryTimeout\":\"dataukn\",\"additionalColumns\":\"datanjhywgziqcwn\",\"sourceRetryCount\":\"dataptln\",\"sourceRetryWait\":\"datal\",\"maxConcurrentConnections\":\"datawbneszxnmgeuoih\",\"disableMetricsCollection\":\"datakfiwpgxyavcbdsu\",\"\":{\"p\":\"datavbhcjfgxtljyrey\",\"ivhk\":\"datatwzr\"}}") .toObject(SparkSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SparkSource model = new SparkSource().withSourceRetryCount("dataarkjt").withSourceRetryWait("dataaczkjkfakgrwt") - .withMaxConcurrentConnections("datasfanmjmpce").withDisableMetricsCollection("datamfdylvpyhhgqysz") - .withQueryTimeout("datazrerxyds").withAdditionalColumns("datapn").withQuery("datayddijfkktigisee"); + SparkSource model = new SparkSource().withSourceRetryCount("dataptln") + .withSourceRetryWait("datal") + .withMaxConcurrentConnections("datawbneszxnmgeuoih") + .withDisableMetricsCollection("datakfiwpgxyavcbdsu") + .withQueryTimeout("dataukn") + .withAdditionalColumns("datanjhywgziqcwn") + .withQuery("datartxggmp"); model = BinaryData.fromObject(model).toObject(SparkSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlDWSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlDWSourceTests.java index 1900a67d55043..c4346dbba86fb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlDWSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlDWSourceTests.java @@ -12,19 +12,26 @@ public final class SqlDWSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SqlDWSource model = BinaryData.fromString( - "{\"type\":\"SqlDWSource\",\"sqlReaderQuery\":\"dataahijbjjlx\",\"sqlReaderStoredProcedureName\":\"datajzbggsnanojty\",\"storedProcedureParameters\":\"datahzxzazofr\",\"isolationLevel\":\"datasxjdgaimk\",\"partitionOption\":\"datasowszb\",\"partitionSettings\":{\"partitionColumnName\":\"datahxikrgokyngarwz\",\"partitionUpperBound\":\"datazjxgassmna\",\"partitionLowerBound\":\"datapolueylqysgmiix\"},\"queryTimeout\":\"dataekcwec\",\"additionalColumns\":\"datatkdginm\",\"sourceRetryCount\":\"datagp\",\"sourceRetryWait\":\"dataqqcceyowrwvbqv\",\"maxConcurrentConnections\":\"dataqgqrsopqgiqf\",\"disableMetricsCollection\":\"datatl\",\"\":{\"hfaabibvslo\":\"datazcgugslpvyktf\",\"jzashhiztfmibwzu\":\"datadkpvvkqlkhdxn\",\"wtbfxxsfjn\":\"dataydajck\"}}") + "{\"type\":\"imbchi\",\"sqlReaderQuery\":\"datawzttkhyrwdsnpuoa\",\"sqlReaderStoredProcedureName\":\"datafpizybpjypqat\",\"storedProcedureParameters\":\"dataghw\",\"isolationLevel\":\"datawrbmxw\",\"partitionOption\":\"datakkffwf\",\"partitionSettings\":{\"partitionColumnName\":\"datae\",\"partitionUpperBound\":\"datamhsqyjsw\",\"partitionLowerBound\":\"datahafcoayuq\"},\"queryTimeout\":\"datajtsemnidbaykvlrs\",\"additionalColumns\":\"dataniocyo\",\"sourceRetryCount\":\"datawaffsjqn\",\"sourceRetryWait\":\"datacybugojzcarg\",\"maxConcurrentConnections\":\"datamaw\",\"disableMetricsCollection\":\"dataaagazryyjjwggpc\",\"\":{\"sj\":\"datawddobp\",\"gwkqnmhfml\":\"dataexedzmmcgqifhyhz\",\"qkdmpf\":\"dataorvhthxcrwe\",\"veibntwikm\":\"datacrcelsnjftnfdcj\"}}") .toObject(SqlDWSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SqlDWSource model = new SqlDWSource().withSourceRetryCount("datagp").withSourceRetryWait("dataqqcceyowrwvbqv") - .withMaxConcurrentConnections("dataqgqrsopqgiqf").withDisableMetricsCollection("datatl") - .withQueryTimeout("dataekcwec").withAdditionalColumns("datatkdginm").withSqlReaderQuery("dataahijbjjlx") - .withSqlReaderStoredProcedureName("datajzbggsnanojty").withStoredProcedureParameters("datahzxzazofr") - .withIsolationLevel("datasxjdgaimk").withPartitionOption("datasowszb") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datahxikrgokyngarwz") - .withPartitionUpperBound("datazjxgassmna").withPartitionLowerBound("datapolueylqysgmiix")); + SqlDWSource model = new SqlDWSource().withSourceRetryCount("datawaffsjqn") + .withSourceRetryWait("datacybugojzcarg") + .withMaxConcurrentConnections("datamaw") + .withDisableMetricsCollection("dataaagazryyjjwggpc") + .withQueryTimeout("datajtsemnidbaykvlrs") + .withAdditionalColumns("dataniocyo") + .withSqlReaderQuery("datawzttkhyrwdsnpuoa") + .withSqlReaderStoredProcedureName("datafpizybpjypqat") + .withStoredProcedureParameters("dataghw") + .withIsolationLevel("datawrbmxw") + .withPartitionOption("datakkffwf") + .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datae") + .withPartitionUpperBound("datamhsqyjsw") + .withPartitionLowerBound("datahafcoayuq")); model = BinaryData.fromObject(model).toObject(SqlDWSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlMISourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlMISourceTests.java index 048758c511a80..4d17179cbb9fb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlMISourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlMISourceTests.java @@ -12,20 +12,27 @@ public final class SqlMISourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SqlMISource model = BinaryData.fromString( - "{\"type\":\"SqlMISource\",\"sqlReaderQuery\":\"dataxhfttm\",\"sqlReaderStoredProcedureName\":\"datamuwl\",\"storedProcedureParameters\":\"datajwkpznsfbi\",\"isolationLevel\":\"datafzgpvdlx\",\"produceAdditionalTypes\":\"dataotclcuxzllnwmgqc\",\"partitionOption\":\"datagjequox\",\"partitionSettings\":{\"partitionColumnName\":\"datafspwhfhdguuvga\",\"partitionUpperBound\":\"datazvd\",\"partitionLowerBound\":\"datatqzxemqnwpwr\"},\"queryTimeout\":\"datat\",\"additionalColumns\":\"datag\",\"sourceRetryCount\":\"dataeufhkoernrjm\",\"sourceRetryWait\":\"dataha\",\"maxConcurrentConnections\":\"dataen\",\"disableMetricsCollection\":\"dataqjvdde\",\"\":{\"wk\":\"datarjhtpxydiuviu\",\"suhozihd\":\"datansyrrybdyqivk\",\"hwcpijgas\":\"dataqdjw\",\"vzbdhrcepanhy\":\"dataafdjinwgirnjgs\"}}") + "{\"type\":\"ltbpqjfoujeiagny\",\"sqlReaderQuery\":\"dataekmgpseassdqpwhp\",\"sqlReaderStoredProcedureName\":\"datadosfgbvsozjf\",\"storedProcedureParameters\":\"datawxcjciotlbpuemqe\",\"isolationLevel\":\"datao\",\"produceAdditionalTypes\":\"datavhhedc\",\"partitionOption\":\"datalycrldwccas\",\"partitionSettings\":{\"partitionColumnName\":\"datadvsorvhbyg\",\"partitionUpperBound\":\"datalxqlzzkbx\",\"partitionLowerBound\":\"datacgg\"},\"queryTimeout\":\"datakghlexvq\",\"additionalColumns\":\"datanwmokz\",\"sourceRetryCount\":\"datafjssayrwyf\",\"sourceRetryWait\":\"datatezxr\",\"maxConcurrentConnections\":\"datadhzwdyva\",\"disableMetricsCollection\":\"datav\",\"\":{\"qlshg\":\"dataexwhoscinpmvcvnm\",\"elazvcfhiayro\":\"dataym\"}}") .toObject(SqlMISource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SqlMISource model = new SqlMISource().withSourceRetryCount("dataeufhkoernrjm").withSourceRetryWait("dataha") - .withMaxConcurrentConnections("dataen").withDisableMetricsCollection("dataqjvdde").withQueryTimeout("datat") - .withAdditionalColumns("datag").withSqlReaderQuery("dataxhfttm") - .withSqlReaderStoredProcedureName("datamuwl").withStoredProcedureParameters("datajwkpznsfbi") - .withIsolationLevel("datafzgpvdlx").withProduceAdditionalTypes("dataotclcuxzllnwmgqc") - .withPartitionOption("datagjequox") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datafspwhfhdguuvga") - .withPartitionUpperBound("datazvd").withPartitionLowerBound("datatqzxemqnwpwr")); + SqlMISource model = new SqlMISource().withSourceRetryCount("datafjssayrwyf") + .withSourceRetryWait("datatezxr") + .withMaxConcurrentConnections("datadhzwdyva") + .withDisableMetricsCollection("datav") + .withQueryTimeout("datakghlexvq") + .withAdditionalColumns("datanwmokz") + .withSqlReaderQuery("dataekmgpseassdqpwhp") + .withSqlReaderStoredProcedureName("datadosfgbvsozjf") + .withStoredProcedureParameters("datawxcjciotlbpuemqe") + .withIsolationLevel("datao") + .withProduceAdditionalTypes("datavhhedc") + .withPartitionOption("datalycrldwccas") + .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datadvsorvhbyg") + .withPartitionUpperBound("datalxqlzzkbx") + .withPartitionLowerBound("datacgg")); model = BinaryData.fromObject(model).toObject(SqlMISource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlPartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlPartitionSettingsTests.java index 1c0d4ec7a2473..47399b368ef04 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlPartitionSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlPartitionSettingsTests.java @@ -11,14 +11,15 @@ public final class SqlPartitionSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SqlPartitionSettings model = BinaryData.fromString( - "{\"partitionColumnName\":\"dataokqeuzslny\",\"partitionUpperBound\":\"datauywijnlpeczq\",\"partitionLowerBound\":\"datamzkqydthf\"}") + "{\"partitionColumnName\":\"datazrrapimtuojq\",\"partitionUpperBound\":\"datayymyy\",\"partitionLowerBound\":\"datafdkjykvezso\"}") .toObject(SqlPartitionSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SqlPartitionSettings model = new SqlPartitionSettings().withPartitionColumnName("dataokqeuzslny") - .withPartitionUpperBound("datauywijnlpeczq").withPartitionLowerBound("datamzkqydthf"); + SqlPartitionSettings model = new SqlPartitionSettings().withPartitionColumnName("datazrrapimtuojq") + .withPartitionUpperBound("datayymyy") + .withPartitionLowerBound("datafdkjykvezso"); model = BinaryData.fromObject(model).toObject(SqlPartitionSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerSourceTests.java index e2e5e955bfadc..26becd0c10721 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerSourceTests.java @@ -12,21 +12,27 @@ public final class SqlServerSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SqlServerSource model = BinaryData.fromString( - "{\"type\":\"SqlServerSource\",\"sqlReaderQuery\":\"dataycmwvphrwuf\",\"sqlReaderStoredProcedureName\":\"dataov\",\"storedProcedureParameters\":\"dataisqlekc\",\"isolationLevel\":\"datadhlskeifwqtcownx\",\"produceAdditionalTypes\":\"datapptv\",\"partitionOption\":\"datadbnu\",\"partitionSettings\":{\"partitionColumnName\":\"datallyjelnhmuzhxk\",\"partitionUpperBound\":\"datazxk\",\"partitionLowerBound\":\"datal\"},\"queryTimeout\":\"datakbamahnwgccgblep\",\"additionalColumns\":\"datavl\",\"sourceRetryCount\":\"dataxdaoj\",\"sourceRetryWait\":\"datalqoxwqlnxvnmrl\",\"maxConcurrentConnections\":\"datajzya\",\"disableMetricsCollection\":\"datafecwnufldzjc\",\"\":{\"hfejgpef\":\"datajbzp\",\"shtujaqpkupnr\":\"databoxvwtlnv\"}}") + "{\"type\":\"oyyfysn\",\"sqlReaderQuery\":\"datazldplamcc\",\"sqlReaderStoredProcedureName\":\"datajrnveqleozqqwia\",\"storedProcedureParameters\":\"datawzyvbu\",\"isolationLevel\":\"datahysatoplqc\",\"produceAdditionalTypes\":\"datasrlzwuqkprf\",\"partitionOption\":\"datacowtoqfwbsbkob\",\"partitionSettings\":{\"partitionColumnName\":\"datasjhwhfcxwrj\",\"partitionUpperBound\":\"dataxmrsetttulswaj\",\"partitionLowerBound\":\"dataespfgmosiskih\"},\"queryTimeout\":\"datazgmxqa\",\"additionalColumns\":\"dataypxgoypo\",\"sourceRetryCount\":\"datajnl\",\"sourceRetryWait\":\"datacmhonojese\",\"maxConcurrentConnections\":\"dataxel\",\"disableMetricsCollection\":\"dataxwmpziy\",\"\":{\"wpcutzlvx\":\"datajswedkfofyfwpu\",\"vddwgozr\":\"dataolvedzrjkrpor\",\"dyhcwcgvyuuse\":\"dataglkmgcxmkrldfo\"}}") .toObject(SqlServerSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SqlServerSource model - = new SqlServerSource().withSourceRetryCount("dataxdaoj").withSourceRetryWait("datalqoxwqlnxvnmrl") - .withMaxConcurrentConnections("datajzya").withDisableMetricsCollection("datafecwnufldzjc") - .withQueryTimeout("datakbamahnwgccgblep").withAdditionalColumns("datavl") - .withSqlReaderQuery("dataycmwvphrwuf").withSqlReaderStoredProcedureName("dataov") - .withStoredProcedureParameters("dataisqlekc").withIsolationLevel("datadhlskeifwqtcownx") - .withProduceAdditionalTypes("datapptv").withPartitionOption("datadbnu") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datallyjelnhmuzhxk") - .withPartitionUpperBound("datazxk").withPartitionLowerBound("datal")); + SqlServerSource model = new SqlServerSource().withSourceRetryCount("datajnl") + .withSourceRetryWait("datacmhonojese") + .withMaxConcurrentConnections("dataxel") + .withDisableMetricsCollection("dataxwmpziy") + .withQueryTimeout("datazgmxqa") + .withAdditionalColumns("dataypxgoypo") + .withSqlReaderQuery("datazldplamcc") + .withSqlReaderStoredProcedureName("datajrnveqleozqqwia") + .withStoredProcedureParameters("datawzyvbu") + .withIsolationLevel("datahysatoplqc") + .withProduceAdditionalTypes("datasrlzwuqkprf") + .withPartitionOption("datacowtoqfwbsbkob") + .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datasjhwhfcxwrj") + .withPartitionUpperBound("dataxmrsetttulswaj") + .withPartitionLowerBound("dataespfgmosiskih")); model = BinaryData.fromObject(model).toObject(SqlServerSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTests.java index 85d71d0b6f39d..e0853f5fb5a0f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTests.java @@ -22,50 +22,62 @@ public final class SqlServerStoredProcedureActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SqlServerStoredProcedureActivity model = BinaryData.fromString( - "{\"type\":\"SqlServerStoredProcedure\",\"typeProperties\":{\"storedProcedureName\":\"datazclnqexlnpwpw\",\"storedProcedureParameters\":\"datajsjkondrkncfoq\"},\"linkedServiceName\":{\"referenceName\":\"pslc\",\"parameters\":{\"ppwoli\":\"datawrsfdpikxsggaeg\",\"ophcwzdwvy\":\"dataflj\",\"vy\":\"datazo\",\"prnqjxsexzxbiwn\":\"datainmywjcfkmfoztwm\"}},\"policy\":{\"timeout\":\"dataqtbztogihpy\",\"retry\":\"datadryesgalspar\",\"retryIntervalInSeconds\":1336549807,\"secureInput\":true,\"secureOutput\":false,\"\":{\"nosblczctwacbn\":\"dataemkzeol\",\"mufhz\":\"datakpdcvjhykptcijun\"}},\"name\":\"cqhtlqr\",\"description\":\"rfxrg\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"acavzadybhydlqfx\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Failed\"],\"\":{\"haoviwuttlmfcn\":\"datahnsaespzwgpjri\",\"pihqmmmbokdqkf\":\"datatgai\",\"uwrvg\":\"datapfzxniv\",\"rlkgpipwtrt\":\"datal\"}}],\"userProperties\":[{\"name\":\"ipbddhfkjsqq\",\"value\":\"dataunoa\"},{\"name\":\"zkefz\",\"value\":\"datauyhvaovoqonqjlpc\"},{\"name\":\"yqiytrhhmld\",\"value\":\"datatyz\"}],\"\":{\"lkfg\":\"datast\",\"fe\":\"dataovbbcsb\",\"chfssbqwvr\":\"datamcprg\",\"qipfrrvngill\":\"datagvxhw\"}}") + "{\"type\":\"npodsqilmvxpfay\",\"typeProperties\":{\"storedProcedureName\":\"datayzacjxczjosixter\",\"storedProcedureParameters\":\"datajkhtmmkmezlhmt\"},\"linkedServiceName\":{\"referenceName\":\"caynhzmzi\",\"parameters\":{\"qqlpphotbsgkliu\":\"datawpitwlel\",\"zchpjh\":\"datatxfzhvxqotwcfbqz\",\"xqkm\":\"datahyxxftrfwmxwjc\"}},\"policy\":{\"timeout\":\"datauleofdxznopkd\",\"retry\":\"datafeutvqgnugiiyc\",\"retryIntervalInSeconds\":1621828202,\"secureInput\":true,\"secureOutput\":false,\"\":{\"hgi\":\"datanbrfscrabrqdb\",\"yxntuzgceuzhp\":\"datazrruvnlubkbgfzii\"}},\"name\":\"phpud\",\"description\":\"aqhbqvb\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"rwiivekrgvzjtvj\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Skipped\",\"Completed\"],\"\":{\"cdoat\":\"dataz\",\"mijraei\":\"datanktheh\"}},{\"activity\":\"v\",\"dependencyConditions\":[\"Failed\"],\"\":{\"nroxgwqgbvnctcbm\":\"datakl\",\"twkzfp\":\"dataecozvxdb\"}},{\"activity\":\"wjvjuixbtkuv\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Succeeded\",\"Failed\"],\"\":{\"jnxznlxcjk\":\"dataegxdzopfkzj\"}}],\"userProperties\":[{\"name\":\"bntqvl\",\"value\":\"datatqsbmurbl\"},{\"name\":\"tvsxnaothlrhjikt\",\"value\":\"datafjylh\"}],\"\":{\"egyh\":\"databaowclbzn\",\"d\":\"dataucpix\",\"rhqbpfvhnh\":\"dataicpchbcbdpy\"}}") .toObject(SqlServerStoredProcedureActivity.class); - Assertions.assertEquals("cqhtlqr", model.name()); - Assertions.assertEquals("rfxrg", model.description()); + Assertions.assertEquals("phpud", model.name()); + Assertions.assertEquals("aqhbqvb", model.description()); Assertions.assertEquals(ActivityState.ACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("acavzadybhydlqfx", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ipbddhfkjsqq", model.userProperties().get(0).name()); - Assertions.assertEquals("pslc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1336549807, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("rwiivekrgvzjtvj", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("bntqvl", model.userProperties().get(0).name()); + Assertions.assertEquals("caynhzmzi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1621828202, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SqlServerStoredProcedureActivity model - = new SqlServerStoredProcedureActivity().withName("cqhtlqr").withDescription("rfxrg") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("acavzadybhydlqfx") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, + SqlServerStoredProcedureActivity model = new SqlServerStoredProcedureActivity().withName("phpud") + .withDescription("aqhbqvb") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("rwiivekrgvzjtvj") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED, + DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("v") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("wjvjuixbtkuv") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ipbddhfkjsqq").withValue("dataunoa"), - new UserProperty().withName("zkefz").withValue("datauyhvaovoqonqjlpc"), - new UserProperty().withName("yqiytrhhmld").withValue("datatyz"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("pslc") - .withParameters(mapOf("ppwoli", "datawrsfdpikxsggaeg", "ophcwzdwvy", "dataflj", "vy", "datazo", - "prnqjxsexzxbiwn", "datainmywjcfkmfoztwm"))) - .withPolicy(new ActivityPolicy().withTimeout("dataqtbztogihpy").withRetry("datadryesgalspar") - .withRetryIntervalInSeconds(1336549807).withSecureInput(true).withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withStoredProcedureName("datazclnqexlnpwpw").withStoredProcedureParameters("datajsjkondrkncfoq"); + .withUserProperties(Arrays.asList(new UserProperty().withName("bntqvl").withValue("datatqsbmurbl"), + new UserProperty().withName("tvsxnaothlrhjikt").withValue("datafjylh"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("caynhzmzi") + .withParameters(mapOf("qqlpphotbsgkliu", "datawpitwlel", "zchpjh", "datatxfzhvxqotwcfbqz", "xqkm", + "datahyxxftrfwmxwjc"))) + .withPolicy(new ActivityPolicy().withTimeout("datauleofdxznopkd") + .withRetry("datafeutvqgnugiiyc") + .withRetryIntervalInSeconds(1621828202) + .withSecureInput(true) + .withSecureOutput(false) + .withAdditionalProperties(mapOf())) + .withStoredProcedureName("datayzacjxczjosixter") + .withStoredProcedureParameters("datajkhtmmkmezlhmt"); model = BinaryData.fromObject(model).toObject(SqlServerStoredProcedureActivity.class); - Assertions.assertEquals("cqhtlqr", model.name()); - Assertions.assertEquals("rfxrg", model.description()); + Assertions.assertEquals("phpud", model.name()); + Assertions.assertEquals("aqhbqvb", model.description()); Assertions.assertEquals(ActivityState.ACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("acavzadybhydlqfx", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ipbddhfkjsqq", model.userProperties().get(0).name()); - Assertions.assertEquals("pslc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1336549807, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals("rwiivekrgvzjtvj", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("bntqvl", model.userProperties().get(0).name()); + Assertions.assertEquals("caynhzmzi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1621828202, model.policy().retryIntervalInSeconds()); Assertions.assertEquals(true, model.policy().secureInput()); Assertions.assertEquals(false, model.policy().secureOutput()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTypePropertiesTests.java index ebcc3f11d9c99..45e5645f8a92a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTypePropertiesTests.java @@ -11,14 +11,16 @@ public final class SqlServerStoredProcedureActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SqlServerStoredProcedureActivityTypeProperties model = BinaryData - .fromString("{\"storedProcedureName\":\"datamfbl\",\"storedProcedureParameters\":\"dataekoux\"}") + .fromString( + "{\"storedProcedureName\":\"dataxcwsnhszmuvarea\",\"storedProcedureParameters\":\"dataxdamnmnmqy\"}") .toObject(SqlServerStoredProcedureActivityTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SqlServerStoredProcedureActivityTypeProperties model = new SqlServerStoredProcedureActivityTypeProperties() - .withStoredProcedureName("datamfbl").withStoredProcedureParameters("dataekoux"); + SqlServerStoredProcedureActivityTypeProperties model + = new SqlServerStoredProcedureActivityTypeProperties().withStoredProcedureName("dataxcwsnhszmuvarea") + .withStoredProcedureParameters("dataxdamnmnmqy"); model = BinaryData.fromObject(model).toObject(SqlServerStoredProcedureActivityTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTests.java index 3d75ed9c13f41..646b91cb46e4c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTests.java @@ -19,31 +19,35 @@ public final class SqlServerTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SqlServerTableDataset model = BinaryData.fromString( - "{\"type\":\"SqlServerTable\",\"typeProperties\":{\"tableName\":\"dataszfutgpbygbnbc\",\"schema\":\"dataiqgtzpv\",\"table\":\"datawfl\"},\"description\":\"hxzuxerxhyw\",\"structure\":\"datakqsqvvdkfp\",\"schema\":\"datadajdqxymxxyfr\",\"linkedServiceName\":{\"referenceName\":\"j\",\"parameters\":{\"ld\":\"dataetfvgwfw\",\"rsnxfrp\":\"datagwouppvyddqsvc\",\"xzxlcqzfxa\":\"datawwqclmdmtfxxepz\"}},\"parameters\":{\"smkir\":{\"type\":\"SecureString\",\"defaultValue\":\"datacj\"},\"hkcomeobw\":{\"type\":\"Bool\",\"defaultValue\":\"dataipud\"}},\"annotations\":[\"datazltenlbfxl\",\"dataxozesn\"],\"folder\":{\"name\":\"uomtxj\"},\"\":{\"wis\":\"dataxymckikkqyvur\",\"ktehognsdd\":\"datayfmrzcqfevnkyak\"}}") + "{\"type\":\"ptlsrvqzgaqs\",\"typeProperties\":{\"tableName\":\"dataiuokglts\",\"schema\":\"dataoiobh\",\"table\":\"datab\"},\"description\":\"r\",\"structure\":\"datalvgrghnhuoxrqhjn\",\"schema\":\"datapesw\",\"linkedServiceName\":{\"referenceName\":\"nhqkgebzqz\",\"parameters\":{\"aosxsxoxvimdvetq\":\"dataviujojzdvms\",\"u\":\"datadbitqsbyujsgomri\",\"xgfygfkgxbd\":\"datagrmsdbv\"}},\"parameters\":{\"porrvkxtfctane\":{\"type\":\"SecureString\",\"defaultValue\":\"datawbdpsesboynpy\"}},\"annotations\":[\"dataqxdhnpjnezji\"],\"folder\":{\"name\":\"umltpmrzwv\"},\"\":{\"uqhngqqxjbsoto\":\"dataqffuxvf\"}}") .toObject(SqlServerTableDataset.class); - Assertions.assertEquals("hxzuxerxhyw", model.description()); - Assertions.assertEquals("j", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("smkir").type()); - Assertions.assertEquals("uomtxj", model.folder().name()); + Assertions.assertEquals("r", model.description()); + Assertions.assertEquals("nhqkgebzqz", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("porrvkxtfctane").type()); + Assertions.assertEquals("umltpmrzwv", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SqlServerTableDataset model = new SqlServerTableDataset().withDescription("hxzuxerxhyw") - .withStructure("datakqsqvvdkfp").withSchema("datadajdqxymxxyfr") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("j").withParameters( - mapOf("ld", "dataetfvgwfw", "rsnxfrp", "datagwouppvyddqsvc", "xzxlcqzfxa", "datawwqclmdmtfxxepz"))) - .withParameters(mapOf("smkir", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datacj"), - "hkcomeobw", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataipud"))) - .withAnnotations(Arrays.asList("datazltenlbfxl", "dataxozesn")) - .withFolder(new DatasetFolder().withName("uomtxj")).withTableName("dataszfutgpbygbnbc") - .withSchemaTypePropertiesSchema("dataiqgtzpv").withTable("datawfl"); + SqlServerTableDataset model = new SqlServerTableDataset().withDescription("r") + .withStructure("datalvgrghnhuoxrqhjn") + .withSchema("datapesw") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("nhqkgebzqz") + .withParameters(mapOf("aosxsxoxvimdvetq", "dataviujojzdvms", "u", "datadbitqsbyujsgomri", "xgfygfkgxbd", + "datagrmsdbv"))) + .withParameters(mapOf("porrvkxtfctane", + new ParameterSpecification().withType(ParameterType.SECURE_STRING) + .withDefaultValue("datawbdpsesboynpy"))) + .withAnnotations(Arrays.asList("dataqxdhnpjnezji")) + .withFolder(new DatasetFolder().withName("umltpmrzwv")) + .withTableName("dataiuokglts") + .withSchemaTypePropertiesSchema("dataoiobh") + .withTable("datab"); model = BinaryData.fromObject(model).toObject(SqlServerTableDataset.class); - Assertions.assertEquals("hxzuxerxhyw", model.description()); - Assertions.assertEquals("j", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("smkir").type()); - Assertions.assertEquals("uomtxj", model.folder().name()); + Assertions.assertEquals("r", model.description()); + Assertions.assertEquals("nhqkgebzqz", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("porrvkxtfctane").type()); + Assertions.assertEquals("umltpmrzwv", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTypePropertiesTests.java index 9209c8ae57240..e079cb22aae9e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTypePropertiesTests.java @@ -10,15 +10,17 @@ public final class SqlServerTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SqlServerTableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"datakde\",\"schema\":\"datasuaz\",\"table\":\"datafcnxc\"}") - .toObject(SqlServerTableDatasetTypeProperties.class); + SqlServerTableDatasetTypeProperties model = BinaryData + .fromString("{\"tableName\":\"datamrttujyd\",\"schema\":\"datatwxpxbxedhxbboc\",\"table\":\"datasramqc\"}") + .toObject(SqlServerTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SqlServerTableDatasetTypeProperties model = new SqlServerTableDatasetTypeProperties().withTableName("datakde") - .withSchema("datasuaz").withTable("datafcnxc"); + SqlServerTableDatasetTypeProperties model + = new SqlServerTableDatasetTypeProperties().withTableName("datamrttujyd") + .withSchema("datatwxpxbxedhxbboc") + .withTable("datasramqc"); model = BinaryData.fromObject(model).toObject(SqlServerTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlSourceTests.java index 90699870d26ba..4b21527e9040b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlSourceTests.java @@ -12,19 +12,26 @@ public final class SqlSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SqlSource model = BinaryData.fromString( - "{\"type\":\"SqlSource\",\"sqlReaderQuery\":\"dataplo\",\"sqlReaderStoredProcedureName\":\"datahimvlocdxvhkobi\",\"storedProcedureParameters\":\"datahipntrddyr\",\"isolationLevel\":\"dataanv\",\"partitionOption\":\"datagmqscijlfulxgnza\",\"partitionSettings\":{\"partitionColumnName\":\"datamwsooq\",\"partitionUpperBound\":\"datavplmyzebvgh\",\"partitionLowerBound\":\"dataydehbvbexrbynnl\"},\"queryTimeout\":\"datad\",\"additionalColumns\":\"datak\",\"sourceRetryCount\":\"datazzsi\",\"sourceRetryWait\":\"databosacrnpscfkef\",\"maxConcurrentConnections\":\"datatxe\",\"disableMetricsCollection\":\"datamimgjuvjvtgece\",\"\":{\"oukfjwkctdn\":\"datanled\"}}") + "{\"type\":\"gqgcnkghgczjxo\",\"sqlReaderQuery\":\"dataqzd\",\"sqlReaderStoredProcedureName\":\"datamyutzttroymi\",\"storedProcedureParameters\":\"datakuz\",\"isolationLevel\":\"datacegyztzhcfuwm\",\"partitionOption\":\"dataz\",\"partitionSettings\":{\"partitionColumnName\":\"datak\",\"partitionUpperBound\":\"dataoogflhhos\",\"partitionLowerBound\":\"datablyokjwss\"},\"queryTimeout\":\"datardg\",\"additionalColumns\":\"dataihnzvoeh\",\"sourceRetryCount\":\"dataigdwpg\",\"sourceRetryWait\":\"dataqh\",\"maxConcurrentConnections\":\"dataexnwxqw\",\"disableMetricsCollection\":\"dataaqlymmhzvnetecfy\",\"\":{\"wfpoaf\":\"datak\",\"kssygdvllbb\":\"datagkzgzxqwvvfkqb\",\"xygrniqnxpsebazb\":\"datafulvhpwp\",\"qgn\":\"datayrjr\"}}") .toObject(SqlSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SqlSource model = new SqlSource().withSourceRetryCount("datazzsi").withSourceRetryWait("databosacrnpscfkef") - .withMaxConcurrentConnections("datatxe").withDisableMetricsCollection("datamimgjuvjvtgece") - .withQueryTimeout("datad").withAdditionalColumns("datak").withSqlReaderQuery("dataplo") - .withSqlReaderStoredProcedureName("datahimvlocdxvhkobi").withStoredProcedureParameters("datahipntrddyr") - .withIsolationLevel("dataanv").withPartitionOption("datagmqscijlfulxgnza") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datamwsooq") - .withPartitionUpperBound("datavplmyzebvgh").withPartitionLowerBound("dataydehbvbexrbynnl")); + SqlSource model = new SqlSource().withSourceRetryCount("dataigdwpg") + .withSourceRetryWait("dataqh") + .withMaxConcurrentConnections("dataexnwxqw") + .withDisableMetricsCollection("dataaqlymmhzvnetecfy") + .withQueryTimeout("datardg") + .withAdditionalColumns("dataihnzvoeh") + .withSqlReaderQuery("dataqzd") + .withSqlReaderStoredProcedureName("datamyutzttroymi") + .withStoredProcedureParameters("datakuz") + .withIsolationLevel("datacegyztzhcfuwm") + .withPartitionOption("dataz") + .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datak") + .withPartitionUpperBound("dataoogflhhos") + .withPartitionLowerBound("datablyokjwss")); model = BinaryData.fromObject(model).toObject(SqlSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareObjectDatasetTests.java index 3f9e2ff83e323..cb614e4d0c693 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareObjectDatasetTests.java @@ -19,29 +19,35 @@ public final class SquareObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SquareObjectDataset model = BinaryData.fromString( - "{\"type\":\"SquareObject\",\"typeProperties\":{\"tableName\":\"dataktdutydvvgkmo\"},\"description\":\"pcjes\",\"structure\":\"datavuztnsvmsh\",\"schema\":\"datagygfohrm\",\"linkedServiceName\":{\"referenceName\":\"hhlclpkr\",\"parameters\":{\"utivrfnztxtmrm\":\"databmjjv\",\"ii\":\"dataftj\",\"hfh\":\"dataohlgrjcx\"}},\"parameters\":{\"ylyumb\":{\"type\":\"Object\",\"defaultValue\":\"datawfogbv\"}},\"annotations\":[\"datarlnuom\",\"dataxhdkhmemx\"],\"folder\":{\"name\":\"apesnbyoullyfz\"},\"\":{\"g\":\"datarmxxjvwbat\",\"ommdzphxulx\":\"datakmwfwzlmpxfmdjs\"}}") + "{\"type\":\"cwbshfihvl\",\"typeProperties\":{\"tableName\":\"datauxkdiumgsivxw\"},\"description\":\"ceylaulpuexyigxz\",\"structure\":\"dataecxdslspgnndefyh\",\"schema\":\"datayhwl\",\"linkedServiceName\":{\"referenceName\":\"svsyltap\",\"parameters\":{\"cuhbgftfvqukkmvz\":\"datafkmvzrkpmonxdw\",\"dqrjylwqqsemjhh\":\"dataneg\",\"hztb\":\"datalsub\",\"zwufi\":\"datajrdzwyktdp\"}},\"parameters\":{\"kmcykxmysmk\":{\"type\":\"Float\",\"defaultValue\":\"datahmjkykqf\"},\"ig\":{\"type\":\"Float\",\"defaultValue\":\"datanrihpjaxhcbeejn\"},\"ardvdpfgwdxmia\":{\"type\":\"Array\",\"defaultValue\":\"datalkrnpsbnmrmhkip\"}},\"annotations\":[\"datapbie\",\"datal\"],\"folder\":{\"name\":\"dvjlpbjszqjfs\"},\"\":{\"wknsbgh\":\"dataaycx\",\"pzcyhk\":\"datapbrzwi\"}}") .toObject(SquareObjectDataset.class); - Assertions.assertEquals("pcjes", model.description()); - Assertions.assertEquals("hhlclpkr", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("ylyumb").type()); - Assertions.assertEquals("apesnbyoullyfz", model.folder().name()); + Assertions.assertEquals("ceylaulpuexyigxz", model.description()); + Assertions.assertEquals("svsyltap", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("kmcykxmysmk").type()); + Assertions.assertEquals("dvjlpbjszqjfs", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SquareObjectDataset model = new SquareObjectDataset().withDescription("pcjes").withStructure("datavuztnsvmsh") - .withSchema("datagygfohrm") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hhlclpkr") - .withParameters(mapOf("utivrfnztxtmrm", "databmjjv", "ii", "dataftj", "hfh", "dataohlgrjcx"))) - .withParameters(mapOf("ylyumb", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datawfogbv"))) - .withAnnotations(Arrays.asList("datarlnuom", "dataxhdkhmemx")) - .withFolder(new DatasetFolder().withName("apesnbyoullyfz")).withTableName("dataktdutydvvgkmo"); + SquareObjectDataset model = new SquareObjectDataset().withDescription("ceylaulpuexyigxz") + .withStructure("dataecxdslspgnndefyh") + .withSchema("datayhwl") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("svsyltap") + .withParameters(mapOf("cuhbgftfvqukkmvz", "datafkmvzrkpmonxdw", "dqrjylwqqsemjhh", "dataneg", "hztb", + "datalsub", "zwufi", "datajrdzwyktdp"))) + .withParameters(mapOf("kmcykxmysmk", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datahmjkykqf"), "ig", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datanrihpjaxhcbeejn"), + "ardvdpfgwdxmia", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datalkrnpsbnmrmhkip"))) + .withAnnotations(Arrays.asList("datapbie", "datal")) + .withFolder(new DatasetFolder().withName("dvjlpbjszqjfs")) + .withTableName("datauxkdiumgsivxw"); model = BinaryData.fromObject(model).toObject(SquareObjectDataset.class); - Assertions.assertEquals("pcjes", model.description()); - Assertions.assertEquals("hhlclpkr", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("ylyumb").type()); - Assertions.assertEquals("apesnbyoullyfz", model.folder().name()); + Assertions.assertEquals("ceylaulpuexyigxz", model.description()); + Assertions.assertEquals("svsyltap", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("kmcykxmysmk").type()); + Assertions.assertEquals("dvjlpbjszqjfs", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareSourceTests.java index e1ba19606fc67..3369d38df377f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareSourceTests.java @@ -11,15 +11,19 @@ public final class SquareSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SquareSource model = BinaryData.fromString( - "{\"type\":\"SquareSource\",\"query\":\"dataeycakkon\",\"queryTimeout\":\"datadpd\",\"additionalColumns\":\"datahadzyxaanhwuqewc\",\"sourceRetryCount\":\"datasksfbkxfkeeqo\",\"sourceRetryWait\":\"databek\",\"maxConcurrentConnections\":\"dataerwss\",\"disableMetricsCollection\":\"datamrpdjrylfpdudx\",\"\":{\"tqssngeviyffg\":\"dataeuriehxbanfsqfh\",\"hdapynpvgyaf\":\"datahrhjsps\"}}") + "{\"type\":\"nkhvpuqlh\",\"query\":\"datagephviue\",\"queryTimeout\":\"datayagaxruffkm\",\"additionalColumns\":\"datau\",\"sourceRetryCount\":\"databbitp\",\"sourceRetryWait\":\"datan\",\"maxConcurrentConnections\":\"datamsdgmxwfodvzpxmo\",\"disableMetricsCollection\":\"databvgieey\",\"\":{\"tbasvjodgplagwvg\":\"datan\",\"hinbsys\":\"datanxmqudnqcbb\",\"iyyzqdnrgnyb\":\"datalbfzkvrmdos\",\"mkhxunq\":\"dataqwjj\"}}") .toObject(SquareSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SquareSource model = new SquareSource().withSourceRetryCount("datasksfbkxfkeeqo").withSourceRetryWait("databek") - .withMaxConcurrentConnections("dataerwss").withDisableMetricsCollection("datamrpdjrylfpdudx") - .withQueryTimeout("datadpd").withAdditionalColumns("datahadzyxaanhwuqewc").withQuery("dataeycakkon"); + SquareSource model = new SquareSource().withSourceRetryCount("databbitp") + .withSourceRetryWait("datan") + .withMaxConcurrentConnections("datamsdgmxwfodvzpxmo") + .withDisableMetricsCollection("databvgieey") + .withQueryTimeout("datayagaxruffkm") + .withAdditionalColumns("datau") + .withQuery("datagephviue"); model = BinaryData.fromObject(model).toObject(SquareSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisChildPackageTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisChildPackageTests.java index b6898898f7b34..0b99a216aed90 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisChildPackageTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisChildPackageTests.java @@ -12,18 +12,20 @@ public final class SsisChildPackageTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SsisChildPackage model = BinaryData.fromString( - "{\"packagePath\":\"datadflckumjjpx\",\"packageName\":\"xabvx\",\"packageContent\":\"dataoagoeills\",\"packageLastModifiedDate\":\"gy\"}") + "{\"packagePath\":\"dataogbwhawref\",\"packageName\":\"tt\",\"packageContent\":\"datazlokttpmbxn\",\"packageLastModifiedDate\":\"bvhdbgnbcwfpgv\"}") .toObject(SsisChildPackage.class); - Assertions.assertEquals("xabvx", model.packageName()); - Assertions.assertEquals("gy", model.packageLastModifiedDate()); + Assertions.assertEquals("tt", model.packageName()); + Assertions.assertEquals("bvhdbgnbcwfpgv", model.packageLastModifiedDate()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SsisChildPackage model = new SsisChildPackage().withPackagePath("datadflckumjjpx").withPackageName("xabvx") - .withPackageContent("dataoagoeills").withPackageLastModifiedDate("gy"); + SsisChildPackage model = new SsisChildPackage().withPackagePath("dataogbwhawref") + .withPackageName("tt") + .withPackageContent("datazlokttpmbxn") + .withPackageLastModifiedDate("bvhdbgnbcwfpgv"); model = BinaryData.fromObject(model).toObject(SsisChildPackage.class); - Assertions.assertEquals("xabvx", model.packageName()); - Assertions.assertEquals("gy", model.packageLastModifiedDate()); + Assertions.assertEquals("tt", model.packageName()); + Assertions.assertEquals("bvhdbgnbcwfpgv", model.packageLastModifiedDate()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentReferenceTests.java index ebab0da2548f0..f3dc4bb04e3d1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentReferenceTests.java @@ -12,22 +12,24 @@ public final class SsisEnvironmentReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SsisEnvironmentReference model = BinaryData.fromString( - "{\"id\":858241301939195859,\"environmentFolderName\":\"dulymk\",\"environmentName\":\"s\",\"referenceType\":\"h\"}") + "{\"id\":4444581189400066307,\"environmentFolderName\":\"gxgl\",\"environmentName\":\"deoubv\",\"referenceType\":\"iswemncjhmvvze\"}") .toObject(SsisEnvironmentReference.class); - Assertions.assertEquals(858241301939195859L, model.id()); - Assertions.assertEquals("dulymk", model.environmentFolderName()); - Assertions.assertEquals("s", model.environmentName()); - Assertions.assertEquals("h", model.referenceType()); + Assertions.assertEquals(4444581189400066307L, model.id()); + Assertions.assertEquals("gxgl", model.environmentFolderName()); + Assertions.assertEquals("deoubv", model.environmentName()); + Assertions.assertEquals("iswemncjhmvvze", model.referenceType()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SsisEnvironmentReference model = new SsisEnvironmentReference().withId(858241301939195859L) - .withEnvironmentFolderName("dulymk").withEnvironmentName("s").withReferenceType("h"); + SsisEnvironmentReference model = new SsisEnvironmentReference().withId(4444581189400066307L) + .withEnvironmentFolderName("gxgl") + .withEnvironmentName("deoubv") + .withReferenceType("iswemncjhmvvze"); model = BinaryData.fromObject(model).toObject(SsisEnvironmentReference.class); - Assertions.assertEquals(858241301939195859L, model.id()); - Assertions.assertEquals("dulymk", model.environmentFolderName()); - Assertions.assertEquals("s", model.environmentName()); - Assertions.assertEquals("h", model.referenceType()); + Assertions.assertEquals(4444581189400066307L, model.id()); + Assertions.assertEquals("gxgl", model.environmentFolderName()); + Assertions.assertEquals("deoubv", model.environmentName()); + Assertions.assertEquals("iswemncjhmvvze", model.referenceType()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentTests.java deleted file mode 100644 index 3b668e83335df..0000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentTests.java +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisEnvironment; -import com.azure.resourcemanager.datafactory.models.SsisVariable; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class SsisEnvironmentTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisEnvironment model = BinaryData.fromString( - "{\"type\":\"Environment\",\"folderId\":832771683225896076,\"variables\":[{\"id\":3326667507931302810,\"name\":\"ejdhrodyiitredd\",\"description\":\"goppybs\",\"dataType\":\"kgaxmhaszjietfst\",\"sensitive\":true,\"value\":\"vzcnlk\",\"sensitiveValue\":\"rjtkreiso\"},{\"id\":8273566367613483537,\"name\":\"mgrbkobmgwa\",\"description\":\"mqpaalwidt\",\"dataType\":\"wedj\",\"sensitive\":true,\"value\":\"ppgijn\",\"sensitiveValue\":\"ba\"},{\"id\":8583337090822703542,\"name\":\"gaxpy\",\"description\":\"mccqdss\",\"dataType\":\"gersdud\",\"sensitive\":false,\"value\":\"mnfgzmxtxfuhxy\",\"sensitiveValue\":\"fyzevcknglf\"}],\"id\":926387397922110716,\"name\":\"fysffrpjf\",\"description\":\"yx\"}") - .toObject(SsisEnvironment.class); - Assertions.assertEquals(926387397922110716L, model.id()); - Assertions.assertEquals("fysffrpjf", model.name()); - Assertions.assertEquals("yx", model.description()); - Assertions.assertEquals(832771683225896076L, model.folderId()); - Assertions.assertEquals(3326667507931302810L, model.variables().get(0).id()); - Assertions.assertEquals("ejdhrodyiitredd", model.variables().get(0).name()); - Assertions.assertEquals("goppybs", model.variables().get(0).description()); - Assertions.assertEquals("kgaxmhaszjietfst", model.variables().get(0).dataType()); - Assertions.assertEquals(true, model.variables().get(0).sensitive()); - Assertions.assertEquals("vzcnlk", model.variables().get(0).value()); - Assertions.assertEquals("rjtkreiso", model.variables().get(0).sensitiveValue()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisEnvironment model = new SsisEnvironment().withId(926387397922110716L).withName("fysffrpjf") - .withDescription("yx").withFolderId(832771683225896076L) - .withVariables(Arrays.asList( - new SsisVariable().withId(3326667507931302810L).withName("ejdhrodyiitredd").withDescription("goppybs") - .withDataType("kgaxmhaszjietfst").withSensitive(true).withValue("vzcnlk") - .withSensitiveValue("rjtkreiso"), - new SsisVariable().withId(8273566367613483537L).withName("mgrbkobmgwa").withDescription("mqpaalwidt") - .withDataType("wedj").withSensitive(true).withValue("ppgijn").withSensitiveValue("ba"), - new SsisVariable().withId(8583337090822703542L).withName("gaxpy").withDescription("mccqdss") - .withDataType("gersdud").withSensitive(false).withValue("mnfgzmxtxfuhxy") - .withSensitiveValue("fyzevcknglf"))); - model = BinaryData.fromObject(model).toObject(SsisEnvironment.class); - Assertions.assertEquals(926387397922110716L, model.id()); - Assertions.assertEquals("fysffrpjf", model.name()); - Assertions.assertEquals("yx", model.description()); - Assertions.assertEquals(832771683225896076L, model.folderId()); - Assertions.assertEquals(3326667507931302810L, model.variables().get(0).id()); - Assertions.assertEquals("ejdhrodyiitredd", model.variables().get(0).name()); - Assertions.assertEquals("goppybs", model.variables().get(0).description()); - Assertions.assertEquals("kgaxmhaszjietfst", model.variables().get(0).dataType()); - Assertions.assertEquals(true, model.variables().get(0).sensitive()); - Assertions.assertEquals("vzcnlk", model.variables().get(0).value()); - Assertions.assertEquals("rjtkreiso", model.variables().get(0).sensitiveValue()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisExecutionParameterTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisExecutionParameterTests.java index a364ceefc7880..1e4055f458969 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisExecutionParameterTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisExecutionParameterTests.java @@ -11,12 +11,12 @@ public final class SsisExecutionParameterTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SsisExecutionParameter model - = BinaryData.fromString("{\"value\":\"datavwmybokqpfhs\"}").toObject(SsisExecutionParameter.class); + = BinaryData.fromString("{\"value\":\"datadqeuewgp\"}").toObject(SsisExecutionParameter.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SsisExecutionParameter model = new SsisExecutionParameter().withValue("datavwmybokqpfhs"); + SsisExecutionParameter model = new SsisExecutionParameter().withValue("datadqeuewgp"); model = BinaryData.fromObject(model).toObject(SsisExecutionParameter.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisFolderTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisFolderTests.java index dd4f0210e6a03..c20b15228ca21 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisFolderTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisFolderTests.java @@ -11,21 +11,20 @@ public final class SsisFolderTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SsisFolder model = BinaryData.fromString( - "{\"type\":\"Folder\",\"id\":9119871534508040877,\"name\":\"nzadqmvpehp\",\"description\":\"vkyqhrplfqqnw\"}") + SsisFolder model = BinaryData + .fromString("{\"type\":\"Folder\",\"id\":6236771935686026963,\"name\":\"edxgdju\",\"description\":\"k\"}") .toObject(SsisFolder.class); - Assertions.assertEquals(9119871534508040877L, model.id()); - Assertions.assertEquals("nzadqmvpehp", model.name()); - Assertions.assertEquals("vkyqhrplfqqnw", model.description()); + Assertions.assertEquals(6236771935686026963L, model.id()); + Assertions.assertEquals("edxgdju", model.name()); + Assertions.assertEquals("k", model.description()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SsisFolder model - = new SsisFolder().withId(9119871534508040877L).withName("nzadqmvpehp").withDescription("vkyqhrplfqqnw"); + SsisFolder model = new SsisFolder().withId(6236771935686026963L).withName("edxgdju").withDescription("k"); model = BinaryData.fromObject(model).toObject(SsisFolder.class); - Assertions.assertEquals(9119871534508040877L, model.id()); - Assertions.assertEquals("nzadqmvpehp", model.name()); - Assertions.assertEquals("vkyqhrplfqqnw", model.description()); + Assertions.assertEquals(6236771935686026963L, model.id()); + Assertions.assertEquals("edxgdju", model.name()); + Assertions.assertEquals("k", model.description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataListResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataListResponseInnerTests.java index 5eaa5550c4f80..3c76a31695cd8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataListResponseInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataListResponseInnerTests.java @@ -6,7 +6,9 @@ import com.azure.core.util.BinaryData; import com.azure.resourcemanager.datafactory.fluent.models.SsisObjectMetadataListResponseInner; -import com.azure.resourcemanager.datafactory.models.SsisObjectMetadata; +import com.azure.resourcemanager.datafactory.models.SsisEnvironment; +import com.azure.resourcemanager.datafactory.models.SsisFolder; +import com.azure.resourcemanager.datafactory.models.SsisPackage; import java.util.Arrays; import org.junit.jupiter.api.Assertions; @@ -14,26 +16,29 @@ public final class SsisObjectMetadataListResponseInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SsisObjectMetadataListResponseInner model = BinaryData.fromString( - "{\"value\":[{\"type\":\"SsisObjectMetadata\",\"id\":1248291372184779781,\"name\":\"zvlvqhjkbegib\",\"description\":\"mxiebw\"},{\"type\":\"SsisObjectMetadata\",\"id\":9167362709610232735,\"name\":\"yqcgwrtzjuzgwy\",\"description\":\"htxongmtsavjc\"},{\"type\":\"SsisObjectMetadata\",\"id\":7752304449285326809,\"name\":\"p\",\"description\":\"knftguvriuh\"}],\"nextLink\":\"wmdyvxqtay\"}") + "{\"value\":[{\"type\":\"Folder\",\"id\":6035205555692229077,\"name\":\"cg\",\"description\":\"wndnhj\"},{\"type\":\"Environment\",\"id\":8777833208183910050,\"name\":\"l\",\"description\":\"btdhxujznbm\"},{\"type\":\"Package\",\"id\":3222705953592047297,\"name\":\"zqlveualupjmkhf\",\"description\":\"bbcswsrtjri\"},{\"type\":\"Environment\",\"id\":7508041016656443291,\"name\":\"wtgh\",\"description\":\"blcg\"}],\"nextLink\":\"zvlvqhjkbegib\"}") .toObject(SsisObjectMetadataListResponseInner.class); - Assertions.assertEquals(1248291372184779781L, model.value().get(0).id()); - Assertions.assertEquals("zvlvqhjkbegib", model.value().get(0).name()); - Assertions.assertEquals("mxiebw", model.value().get(0).description()); - Assertions.assertEquals("wmdyvxqtay", model.nextLink()); + Assertions.assertEquals(6035205555692229077L, model.value().get(0).id()); + Assertions.assertEquals("cg", model.value().get(0).name()); + Assertions.assertEquals("wndnhj", model.value().get(0).description()); + Assertions.assertEquals("zvlvqhjkbegib", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SsisObjectMetadataListResponseInner model = new SsisObjectMetadataListResponseInner().withValue(Arrays.asList( - new SsisObjectMetadata().withId(1248291372184779781L).withName("zvlvqhjkbegib").withDescription("mxiebw"), - new SsisObjectMetadata().withId(9167362709610232735L).withName("yqcgwrtzjuzgwy") - .withDescription("htxongmtsavjc"), - new SsisObjectMetadata().withId(7752304449285326809L).withName("p").withDescription("knftguvriuh"))) - .withNextLink("wmdyvxqtay"); + SsisObjectMetadataListResponseInner model = new SsisObjectMetadataListResponseInner() + .withValue( + Arrays.asList(new SsisFolder().withId(6035205555692229077L).withName("cg").withDescription("wndnhj"), + new SsisEnvironment().withId(8777833208183910050L).withName("l").withDescription("btdhxujznbm"), + new SsisPackage().withId(3222705953592047297L) + .withName("zqlveualupjmkhf") + .withDescription("bbcswsrtjri"), + new SsisEnvironment().withId(7508041016656443291L).withName("wtgh").withDescription("blcg"))) + .withNextLink("zvlvqhjkbegib"); model = BinaryData.fromObject(model).toObject(SsisObjectMetadataListResponseInner.class); - Assertions.assertEquals(1248291372184779781L, model.value().get(0).id()); - Assertions.assertEquals("zvlvqhjkbegib", model.value().get(0).name()); - Assertions.assertEquals("mxiebw", model.value().get(0).description()); - Assertions.assertEquals("wmdyvxqtay", model.nextLink()); + Assertions.assertEquals(6035205555692229077L, model.value().get(0).id()); + Assertions.assertEquals("cg", model.value().get(0).name()); + Assertions.assertEquals("wndnhj", model.value().get(0).description()); + Assertions.assertEquals("zvlvqhjkbegib", model.nextLink()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataStatusResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataStatusResponseInnerTests.java index 09b94f65cf2bd..4870398bfeeea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataStatusResponseInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataStatusResponseInnerTests.java @@ -13,22 +13,25 @@ public final class SsisObjectMetadataStatusResponseInnerTests { public void testDeserialize() throws Exception { SsisObjectMetadataStatusResponseInner model = BinaryData .fromString( - "{\"status\":\"tdhxujznbmpowuwp\",\"name\":\"qlveualupjmkh\",\"properties\":\"obbc\",\"error\":\"s\"}") + "{\"status\":\"civyhzceuo\",\"name\":\"jrwjueiotwm\",\"properties\":\"ytdxwit\",\"error\":\"rjaw\"}") .toObject(SsisObjectMetadataStatusResponseInner.class); - Assertions.assertEquals("tdhxujznbmpowuwp", model.status()); - Assertions.assertEquals("qlveualupjmkh", model.name()); - Assertions.assertEquals("obbc", model.properties()); - Assertions.assertEquals("s", model.error()); + Assertions.assertEquals("civyhzceuo", model.status()); + Assertions.assertEquals("jrwjueiotwm", model.name()); + Assertions.assertEquals("ytdxwit", model.properties()); + Assertions.assertEquals("rjaw", model.error()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SsisObjectMetadataStatusResponseInner model = new SsisObjectMetadataStatusResponseInner() - .withStatus("tdhxujznbmpowuwp").withName("qlveualupjmkh").withProperties("obbc").withError("s"); + SsisObjectMetadataStatusResponseInner model + = new SsisObjectMetadataStatusResponseInner().withStatus("civyhzceuo") + .withName("jrwjueiotwm") + .withProperties("ytdxwit") + .withError("rjaw"); model = BinaryData.fromObject(model).toObject(SsisObjectMetadataStatusResponseInner.class); - Assertions.assertEquals("tdhxujznbmpowuwp", model.status()); - Assertions.assertEquals("qlveualupjmkh", model.name()); - Assertions.assertEquals("obbc", model.properties()); - Assertions.assertEquals("s", model.error()); + Assertions.assertEquals("civyhzceuo", model.status()); + Assertions.assertEquals("jrwjueiotwm", model.name()); + Assertions.assertEquals("ytdxwit", model.properties()); + Assertions.assertEquals("rjaw", model.error()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataTests.java index 35212259da5e3..7ed1763cdc7d7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataTests.java @@ -6,26 +6,27 @@ import com.azure.core.util.BinaryData; import com.azure.resourcemanager.datafactory.models.SsisObjectMetadata; +import com.azure.resourcemanager.datafactory.models.SsisProject; import org.junit.jupiter.api.Assertions; public final class SsisObjectMetadataTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SsisObjectMetadata model = BinaryData.fromString( - "{\"type\":\"SsisObjectMetadata\",\"id\":1536045625796396557,\"name\":\"oyq\",\"description\":\"xrmcqibycnojvk\"}") + "{\"type\":\"Project\",\"id\":8168827985153452849,\"name\":\"wwaloayqcgwrt\",\"description\":\"uzgwyzmhtx\"}") .toObject(SsisObjectMetadata.class); - Assertions.assertEquals(1536045625796396557L, model.id()); - Assertions.assertEquals("oyq", model.name()); - Assertions.assertEquals("xrmcqibycnojvk", model.description()); + Assertions.assertEquals(8168827985153452849L, model.id()); + Assertions.assertEquals("wwaloayqcgwrt", model.name()); + Assertions.assertEquals("uzgwyzmhtx", model.description()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SsisObjectMetadata model - = new SsisObjectMetadata().withId(1536045625796396557L).withName("oyq").withDescription("xrmcqibycnojvk"); + = new SsisProject().withId(8168827985153452849L).withName("wwaloayqcgwrt").withDescription("uzgwyzmhtx"); model = BinaryData.fromObject(model).toObject(SsisObjectMetadata.class); - Assertions.assertEquals(1536045625796396557L, model.id()); - Assertions.assertEquals("oyq", model.name()); - Assertions.assertEquals("xrmcqibycnojvk", model.description()); + Assertions.assertEquals(8168827985153452849L, model.id()); + Assertions.assertEquals("wwaloayqcgwrt", model.name()); + Assertions.assertEquals("uzgwyzmhtx", model.description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPackageTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPackageTests.java deleted file mode 100644 index 0c217943183a6..0000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPackageTests.java +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisPackage; -import com.azure.resourcemanager.datafactory.models.SsisParameter; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class SsisPackageTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisPackage model = BinaryData.fromString( - "{\"type\":\"Package\",\"folderId\":3556959886109984896,\"projectVersion\":4692852653848740418,\"projectId\":668264090121943233,\"parameters\":[{\"id\":525854716862013224,\"name\":\"ifywxjjylaqhx\",\"description\":\"fdarvjhwgkynxlw\",\"dataType\":\"gotdt\",\"required\":false,\"sensitive\":true,\"designDefaultValue\":\"lh\",\"defaultValue\":\"ybfnkylzrignqlwo\",\"sensitiveDefaultValue\":\"nbjuaiu\",\"valueType\":\"mqcbnk\",\"valueSet\":true,\"variable\":\"i\"}],\"id\":4578072156366697994,\"name\":\"ryywyfcenkbfxqc\",\"description\":\"ggeciradmxokbutb\"}") - .toObject(SsisPackage.class); - Assertions.assertEquals(4578072156366697994L, model.id()); - Assertions.assertEquals("ryywyfcenkbfxqc", model.name()); - Assertions.assertEquals("ggeciradmxokbutb", model.description()); - Assertions.assertEquals(3556959886109984896L, model.folderId()); - Assertions.assertEquals(4692852653848740418L, model.projectVersion()); - Assertions.assertEquals(668264090121943233L, model.projectId()); - Assertions.assertEquals(525854716862013224L, model.parameters().get(0).id()); - Assertions.assertEquals("ifywxjjylaqhx", model.parameters().get(0).name()); - Assertions.assertEquals("fdarvjhwgkynxlw", model.parameters().get(0).description()); - Assertions.assertEquals("gotdt", model.parameters().get(0).dataType()); - Assertions.assertEquals(false, model.parameters().get(0).required()); - Assertions.assertEquals(true, model.parameters().get(0).sensitive()); - Assertions.assertEquals("lh", model.parameters().get(0).designDefaultValue()); - Assertions.assertEquals("ybfnkylzrignqlwo", model.parameters().get(0).defaultValue()); - Assertions.assertEquals("nbjuaiu", model.parameters().get(0).sensitiveDefaultValue()); - Assertions.assertEquals("mqcbnk", model.parameters().get(0).valueType()); - Assertions.assertEquals(true, model.parameters().get(0).valueSet()); - Assertions.assertEquals("i", model.parameters().get(0).variable()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisPackage model = new SsisPackage().withId(4578072156366697994L).withName("ryywyfcenkbfxqc") - .withDescription("ggeciradmxokbutb").withFolderId(3556959886109984896L) - .withProjectVersion(4692852653848740418L).withProjectId(668264090121943233L) - .withParameters(Arrays.asList(new SsisParameter().withId(525854716862013224L).withName("ifywxjjylaqhx") - .withDescription("fdarvjhwgkynxlw").withDataType("gotdt").withRequired(false).withSensitive(true) - .withDesignDefaultValue("lh").withDefaultValue("ybfnkylzrignqlwo").withSensitiveDefaultValue("nbjuaiu") - .withValueType("mqcbnk").withValueSet(true).withVariable("i"))); - model = BinaryData.fromObject(model).toObject(SsisPackage.class); - Assertions.assertEquals(4578072156366697994L, model.id()); - Assertions.assertEquals("ryywyfcenkbfxqc", model.name()); - Assertions.assertEquals("ggeciradmxokbutb", model.description()); - Assertions.assertEquals(3556959886109984896L, model.folderId()); - Assertions.assertEquals(4692852653848740418L, model.projectVersion()); - Assertions.assertEquals(668264090121943233L, model.projectId()); - Assertions.assertEquals(525854716862013224L, model.parameters().get(0).id()); - Assertions.assertEquals("ifywxjjylaqhx", model.parameters().get(0).name()); - Assertions.assertEquals("fdarvjhwgkynxlw", model.parameters().get(0).description()); - Assertions.assertEquals("gotdt", model.parameters().get(0).dataType()); - Assertions.assertEquals(false, model.parameters().get(0).required()); - Assertions.assertEquals(true, model.parameters().get(0).sensitive()); - Assertions.assertEquals("lh", model.parameters().get(0).designDefaultValue()); - Assertions.assertEquals("ybfnkylzrignqlwo", model.parameters().get(0).defaultValue()); - Assertions.assertEquals("nbjuaiu", model.parameters().get(0).sensitiveDefaultValue()); - Assertions.assertEquals("mqcbnk", model.parameters().get(0).valueType()); - Assertions.assertEquals(true, model.parameters().get(0).valueSet()); - Assertions.assertEquals("i", model.parameters().get(0).variable()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisParameterTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisParameterTests.java index e299d91ab6ec7..5ec9109a70d73 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisParameterTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisParameterTests.java @@ -12,40 +12,48 @@ public final class SsisParameterTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SsisParameter model = BinaryData.fromString( - "{\"id\":3304004875475532508,\"name\":\"enpftk\",\"description\":\"bmvxbi\",\"dataType\":\"zghpsotbameir\",\"required\":true,\"sensitive\":true,\"designDefaultValue\":\"svr\",\"defaultValue\":\"hyncppmmwhje\",\"sensitiveDefaultValue\":\"urgipv\",\"valueType\":\"xlepsmck\",\"valueSet\":false,\"variable\":\"xgcqmguv\"}") + "{\"id\":4128209313466556237,\"name\":\"bsjtfqwlb\",\"description\":\"czzscgropuq\",\"dataType\":\"s\",\"required\":true,\"sensitive\":true,\"designDefaultValue\":\"yikqlyoorme\",\"defaultValue\":\"glq\",\"sensitiveDefaultValue\":\"ykckyhxuzn\",\"valueType\":\"piadjslv\",\"valueSet\":false,\"variable\":\"cdwbi\"}") .toObject(SsisParameter.class); - Assertions.assertEquals(3304004875475532508L, model.id()); - Assertions.assertEquals("enpftk", model.name()); - Assertions.assertEquals("bmvxbi", model.description()); - Assertions.assertEquals("zghpsotbameir", model.dataType()); + Assertions.assertEquals(4128209313466556237L, model.id()); + Assertions.assertEquals("bsjtfqwlb", model.name()); + Assertions.assertEquals("czzscgropuq", model.description()); + Assertions.assertEquals("s", model.dataType()); Assertions.assertEquals(true, model.required()); Assertions.assertEquals(true, model.sensitive()); - Assertions.assertEquals("svr", model.designDefaultValue()); - Assertions.assertEquals("hyncppmmwhje", model.defaultValue()); - Assertions.assertEquals("urgipv", model.sensitiveDefaultValue()); - Assertions.assertEquals("xlepsmck", model.valueType()); + Assertions.assertEquals("yikqlyoorme", model.designDefaultValue()); + Assertions.assertEquals("glq", model.defaultValue()); + Assertions.assertEquals("ykckyhxuzn", model.sensitiveDefaultValue()); + Assertions.assertEquals("piadjslv", model.valueType()); Assertions.assertEquals(false, model.valueSet()); - Assertions.assertEquals("xgcqmguv", model.variable()); + Assertions.assertEquals("cdwbi", model.variable()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SsisParameter model = new SsisParameter().withId(3304004875475532508L).withName("enpftk") - .withDescription("bmvxbi").withDataType("zghpsotbameir").withRequired(true).withSensitive(true) - .withDesignDefaultValue("svr").withDefaultValue("hyncppmmwhje").withSensitiveDefaultValue("urgipv") - .withValueType("xlepsmck").withValueSet(false).withVariable("xgcqmguv"); + SsisParameter model = new SsisParameter().withId(4128209313466556237L) + .withName("bsjtfqwlb") + .withDescription("czzscgropuq") + .withDataType("s") + .withRequired(true) + .withSensitive(true) + .withDesignDefaultValue("yikqlyoorme") + .withDefaultValue("glq") + .withSensitiveDefaultValue("ykckyhxuzn") + .withValueType("piadjslv") + .withValueSet(false) + .withVariable("cdwbi"); model = BinaryData.fromObject(model).toObject(SsisParameter.class); - Assertions.assertEquals(3304004875475532508L, model.id()); - Assertions.assertEquals("enpftk", model.name()); - Assertions.assertEquals("bmvxbi", model.description()); - Assertions.assertEquals("zghpsotbameir", model.dataType()); + Assertions.assertEquals(4128209313466556237L, model.id()); + Assertions.assertEquals("bsjtfqwlb", model.name()); + Assertions.assertEquals("czzscgropuq", model.description()); + Assertions.assertEquals("s", model.dataType()); Assertions.assertEquals(true, model.required()); Assertions.assertEquals(true, model.sensitive()); - Assertions.assertEquals("svr", model.designDefaultValue()); - Assertions.assertEquals("hyncppmmwhje", model.defaultValue()); - Assertions.assertEquals("urgipv", model.sensitiveDefaultValue()); - Assertions.assertEquals("xlepsmck", model.valueType()); + Assertions.assertEquals("yikqlyoorme", model.designDefaultValue()); + Assertions.assertEquals("glq", model.defaultValue()); + Assertions.assertEquals("ykckyhxuzn", model.sensitiveDefaultValue()); + Assertions.assertEquals("piadjslv", model.valueType()); Assertions.assertEquals(false, model.valueSet()); - Assertions.assertEquals("xgcqmguv", model.variable()); + Assertions.assertEquals("cdwbi", model.variable()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisProjectTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisProjectTests.java index 3768267ffbcfc..a325170b060ff 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisProjectTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisProjectTests.java @@ -15,80 +15,125 @@ public final class SsisProjectTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SsisProject model = BinaryData.fromString( - "{\"type\":\"Project\",\"folderId\":4812651773635303684,\"version\":229346380706684388,\"environmentRefs\":[{\"id\":6689352297389894599,\"environmentFolderName\":\"hhaq\",\"environmentName\":\"tuecmguk\",\"referenceType\":\"dpuowlcexkr\"},{\"id\":7044331808895170259,\"environmentFolderName\":\"qnbs\",\"environmentName\":\"jcmtcidcab\",\"referenceType\":\"xhcxct\"},{\"id\":3640487153181540727,\"environmentFolderName\":\"ftforylxaknwkjzv\",\"environmentName\":\"symtupyjt\",\"referenceType\":\"xzwdsnqhyefnakd\"}],\"parameters\":[{\"id\":8635897262903643245,\"name\":\"lulytjx\",\"description\":\"wtittlnv\",\"dataType\":\"vupuplugulynv\",\"required\":false,\"sensitive\":false,\"designDefaultValue\":\"dapydsfpz\",\"defaultValue\":\"bsilbnrucqehyrn\",\"sensitiveDefaultValue\":\"jrgfbmpszwkbcstz\",\"valueType\":\"bgaesm\",\"valueSet\":true,\"variable\":\"xrwqt\"},{\"id\":1353396779166802372,\"name\":\"takx\",\"description\":\"lkgjhomywlypghhu\",\"dataType\":\"qyfvgpqwg\",\"required\":false,\"sensitive\":true,\"designDefaultValue\":\"skmbuihtqfvyqmm\",\"defaultValue\":\"uguvlieegjnqwh\",\"sensitiveDefaultValue\":\"o\",\"valueType\":\"ehjscgqcrwaucft\",\"valueSet\":false,\"variable\":\"hjxdlmuhf\"},{\"id\":8128896989412760226,\"name\":\"jyqmpmsknaxr\",\"description\":\"jwqufudpypboql\",\"dataType\":\"xfpwmajvwfijf\",\"required\":false,\"sensitive\":true,\"designDefaultValue\":\"vhms\",\"defaultValue\":\"ihddnbwl\",\"sensitiveDefaultValue\":\"ntdde\",\"valueType\":\"xyiwuzpsvcmz\",\"valueSet\":false,\"variable\":\"yyysqnwnl\"}],\"id\":4034013017830873122,\"name\":\"dzkfthsyd\",\"description\":\"dbzzetfgkz\"}") + "{\"type\":\"Project\",\"folderId\":6810027269223003615,\"version\":8020114105989626918,\"environmentRefs\":[{\"id\":3010334204752979015,\"environmentFolderName\":\"qnztukirdedij\",\"environmentName\":\"zkwvueweivajelsw\",\"referenceType\":\"dknxctglzbcxd\"},{\"id\":9144944610093515005,\"environmentFolderName\":\"gyiw\",\"environmentName\":\"judxzfvna\",\"referenceType\":\"ffch\"},{\"id\":9040103124560572261,\"environmentFolderName\":\"vjehtseaoxxsehfl\",\"environmentName\":\"abpyz\",\"referenceType\":\"plphfstvurtk\"},{\"id\":4504562454300211422,\"environmentFolderName\":\"axwzbwx\",\"environmentName\":\"disxsrrhr\",\"referenceType\":\"bbedrnrjz\"}],\"parameters\":[{\"id\":418641047348887519,\"name\":\"bmy\",\"description\":\"ihcgxmfbi\",\"dataType\":\"uvqqo\",\"required\":true,\"sensitive\":true,\"designDefaultValue\":\"nqtjcemiasi\",\"defaultValue\":\"hxqnlmjvpm\",\"sensitiveDefaultValue\":\"fibwvhwouhysr\",\"valueType\":\"kzj\",\"valueSet\":false,\"variable\":\"ljxaayxpdzvg\"},{\"id\":1887624659598495182,\"name\":\"jge\",\"description\":\"dbigi\",\"dataType\":\"amudyrsirnbpi\",\"required\":false,\"sensitive\":true,\"designDefaultValue\":\"xolixfe\",\"defaultValue\":\"r\",\"sensitiveDefaultValue\":\"zvtzyfktbayhas\",\"valueType\":\"alkiyzlkqyez\",\"valueSet\":true,\"variable\":\"rdfdssofuxystltk\"},{\"id\":5934714436449506549,\"name\":\"iiqajfwzubfg\",\"description\":\"vzbueiydoqb\",\"dataType\":\"xlxptxhedn\",\"required\":true,\"sensitive\":false,\"designDefaultValue\":\"a\",\"defaultValue\":\"zrtsei\",\"sensitiveDefaultValue\":\"krw\",\"valueType\":\"n\",\"valueSet\":true,\"variable\":\"wcyeljniadgzxbw\"},{\"id\":4788013996128357968,\"name\":\"ctyukmzozbkht\",\"description\":\"ghutiqlmvn\",\"dataType\":\"fyftk\",\"required\":false,\"sensitive\":false,\"designDefaultValue\":\"ykiumagfyinmajj\",\"defaultValue\":\"l\",\"sensitiveDefaultValue\":\"wfkflcvbzkb\",\"valueType\":\"uafbddm\",\"valueSet\":false,\"variable\":\"nml\"}],\"id\":6777524756518511166,\"name\":\"tagpyuhdqh\",\"description\":\"rksypoosfxzobovs\"}") .toObject(SsisProject.class); - Assertions.assertEquals(4034013017830873122L, model.id()); - Assertions.assertEquals("dzkfthsyd", model.name()); - Assertions.assertEquals("dbzzetfgkz", model.description()); - Assertions.assertEquals(4812651773635303684L, model.folderId()); - Assertions.assertEquals(229346380706684388L, model.version()); - Assertions.assertEquals(6689352297389894599L, model.environmentRefs().get(0).id()); - Assertions.assertEquals("hhaq", model.environmentRefs().get(0).environmentFolderName()); - Assertions.assertEquals("tuecmguk", model.environmentRefs().get(0).environmentName()); - Assertions.assertEquals("dpuowlcexkr", model.environmentRefs().get(0).referenceType()); - Assertions.assertEquals(8635897262903643245L, model.parameters().get(0).id()); - Assertions.assertEquals("lulytjx", model.parameters().get(0).name()); - Assertions.assertEquals("wtittlnv", model.parameters().get(0).description()); - Assertions.assertEquals("vupuplugulynv", model.parameters().get(0).dataType()); - Assertions.assertEquals(false, model.parameters().get(0).required()); - Assertions.assertEquals(false, model.parameters().get(0).sensitive()); - Assertions.assertEquals("dapydsfpz", model.parameters().get(0).designDefaultValue()); - Assertions.assertEquals("bsilbnrucqehyrn", model.parameters().get(0).defaultValue()); - Assertions.assertEquals("jrgfbmpszwkbcstz", model.parameters().get(0).sensitiveDefaultValue()); - Assertions.assertEquals("bgaesm", model.parameters().get(0).valueType()); - Assertions.assertEquals(true, model.parameters().get(0).valueSet()); - Assertions.assertEquals("xrwqt", model.parameters().get(0).variable()); + Assertions.assertEquals(6777524756518511166L, model.id()); + Assertions.assertEquals("tagpyuhdqh", model.name()); + Assertions.assertEquals("rksypoosfxzobovs", model.description()); + Assertions.assertEquals(6810027269223003615L, model.folderId()); + Assertions.assertEquals(8020114105989626918L, model.version()); + Assertions.assertEquals(3010334204752979015L, model.environmentRefs().get(0).id()); + Assertions.assertEquals("qnztukirdedij", model.environmentRefs().get(0).environmentFolderName()); + Assertions.assertEquals("zkwvueweivajelsw", model.environmentRefs().get(0).environmentName()); + Assertions.assertEquals("dknxctglzbcxd", model.environmentRefs().get(0).referenceType()); + Assertions.assertEquals(418641047348887519L, model.parameters().get(0).id()); + Assertions.assertEquals("bmy", model.parameters().get(0).name()); + Assertions.assertEquals("ihcgxmfbi", model.parameters().get(0).description()); + Assertions.assertEquals("uvqqo", model.parameters().get(0).dataType()); + Assertions.assertEquals(true, model.parameters().get(0).required()); + Assertions.assertEquals(true, model.parameters().get(0).sensitive()); + Assertions.assertEquals("nqtjcemiasi", model.parameters().get(0).designDefaultValue()); + Assertions.assertEquals("hxqnlmjvpm", model.parameters().get(0).defaultValue()); + Assertions.assertEquals("fibwvhwouhysr", model.parameters().get(0).sensitiveDefaultValue()); + Assertions.assertEquals("kzj", model.parameters().get(0).valueType()); + Assertions.assertEquals(false, model.parameters().get(0).valueSet()); + Assertions.assertEquals("ljxaayxpdzvg", model.parameters().get(0).variable()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SsisProject model = new SsisProject().withId(4034013017830873122L).withName("dzkfthsyd") - .withDescription("dbzzetfgkz").withFolderId(4812651773635303684L).withVersion(229346380706684388L) + SsisProject model = new SsisProject().withId(6777524756518511166L) + .withName("tagpyuhdqh") + .withDescription("rksypoosfxzobovs") + .withFolderId(6810027269223003615L) + .withVersion(8020114105989626918L) .withEnvironmentRefs(Arrays.asList( - new SsisEnvironmentReference().withId(6689352297389894599L).withEnvironmentFolderName("hhaq") - .withEnvironmentName("tuecmguk").withReferenceType("dpuowlcexkr"), - new SsisEnvironmentReference().withId(7044331808895170259L).withEnvironmentFolderName("qnbs") - .withEnvironmentName("jcmtcidcab").withReferenceType("xhcxct"), - new SsisEnvironmentReference().withId(3640487153181540727L) - .withEnvironmentFolderName("ftforylxaknwkjzv").withEnvironmentName("symtupyjt") - .withReferenceType("xzwdsnqhyefnakd"))) + new SsisEnvironmentReference().withId(3010334204752979015L) + .withEnvironmentFolderName("qnztukirdedij") + .withEnvironmentName("zkwvueweivajelsw") + .withReferenceType("dknxctglzbcxd"), + new SsisEnvironmentReference().withId(9144944610093515005L) + .withEnvironmentFolderName("gyiw") + .withEnvironmentName("judxzfvna") + .withReferenceType("ffch"), + new SsisEnvironmentReference().withId(9040103124560572261L) + .withEnvironmentFolderName("vjehtseaoxxsehfl") + .withEnvironmentName("abpyz") + .withReferenceType("plphfstvurtk"), + new SsisEnvironmentReference().withId(4504562454300211422L) + .withEnvironmentFolderName("axwzbwx") + .withEnvironmentName("disxsrrhr") + .withReferenceType("bbedrnrjz"))) .withParameters(Arrays.asList( - new SsisParameter().withId(8635897262903643245L).withName("lulytjx").withDescription("wtittlnv") - .withDataType("vupuplugulynv").withRequired(false).withSensitive(false) - .withDesignDefaultValue("dapydsfpz").withDefaultValue("bsilbnrucqehyrn") - .withSensitiveDefaultValue("jrgfbmpszwkbcstz").withValueType("bgaesm").withValueSet(true) - .withVariable("xrwqt"), - new SsisParameter().withId(1353396779166802372L).withName("takx").withDescription("lkgjhomywlypghhu") - .withDataType("qyfvgpqwg").withRequired(false).withSensitive(true) - .withDesignDefaultValue("skmbuihtqfvyqmm").withDefaultValue("uguvlieegjnqwh") - .withSensitiveDefaultValue("o").withValueType("ehjscgqcrwaucft").withValueSet(false) - .withVariable("hjxdlmuhf"), - new SsisParameter().withId(8128896989412760226L).withName("jyqmpmsknaxr") - .withDescription("jwqufudpypboql").withDataType("xfpwmajvwfijf").withRequired(false) - .withSensitive(true).withDesignDefaultValue("vhms").withDefaultValue("ihddnbwl") - .withSensitiveDefaultValue("ntdde").withValueType("xyiwuzpsvcmz").withValueSet(false) - .withVariable("yyysqnwnl"))); + new SsisParameter().withId(418641047348887519L) + .withName("bmy") + .withDescription("ihcgxmfbi") + .withDataType("uvqqo") + .withRequired(true) + .withSensitive(true) + .withDesignDefaultValue("nqtjcemiasi") + .withDefaultValue("hxqnlmjvpm") + .withSensitiveDefaultValue("fibwvhwouhysr") + .withValueType("kzj") + .withValueSet(false) + .withVariable("ljxaayxpdzvg"), + new SsisParameter().withId(1887624659598495182L) + .withName("jge") + .withDescription("dbigi") + .withDataType("amudyrsirnbpi") + .withRequired(false) + .withSensitive(true) + .withDesignDefaultValue("xolixfe") + .withDefaultValue("r") + .withSensitiveDefaultValue("zvtzyfktbayhas") + .withValueType("alkiyzlkqyez") + .withValueSet(true) + .withVariable("rdfdssofuxystltk"), + new SsisParameter().withId(5934714436449506549L) + .withName("iiqajfwzubfg") + .withDescription("vzbueiydoqb") + .withDataType("xlxptxhedn") + .withRequired(true) + .withSensitive(false) + .withDesignDefaultValue("a") + .withDefaultValue("zrtsei") + .withSensitiveDefaultValue("krw") + .withValueType("n") + .withValueSet(true) + .withVariable("wcyeljniadgzxbw"), + new SsisParameter().withId(4788013996128357968L) + .withName("ctyukmzozbkht") + .withDescription("ghutiqlmvn") + .withDataType("fyftk") + .withRequired(false) + .withSensitive(false) + .withDesignDefaultValue("ykiumagfyinmajj") + .withDefaultValue("l") + .withSensitiveDefaultValue("wfkflcvbzkb") + .withValueType("uafbddm") + .withValueSet(false) + .withVariable("nml"))); model = BinaryData.fromObject(model).toObject(SsisProject.class); - Assertions.assertEquals(4034013017830873122L, model.id()); - Assertions.assertEquals("dzkfthsyd", model.name()); - Assertions.assertEquals("dbzzetfgkz", model.description()); - Assertions.assertEquals(4812651773635303684L, model.folderId()); - Assertions.assertEquals(229346380706684388L, model.version()); - Assertions.assertEquals(6689352297389894599L, model.environmentRefs().get(0).id()); - Assertions.assertEquals("hhaq", model.environmentRefs().get(0).environmentFolderName()); - Assertions.assertEquals("tuecmguk", model.environmentRefs().get(0).environmentName()); - Assertions.assertEquals("dpuowlcexkr", model.environmentRefs().get(0).referenceType()); - Assertions.assertEquals(8635897262903643245L, model.parameters().get(0).id()); - Assertions.assertEquals("lulytjx", model.parameters().get(0).name()); - Assertions.assertEquals("wtittlnv", model.parameters().get(0).description()); - Assertions.assertEquals("vupuplugulynv", model.parameters().get(0).dataType()); - Assertions.assertEquals(false, model.parameters().get(0).required()); - Assertions.assertEquals(false, model.parameters().get(0).sensitive()); - Assertions.assertEquals("dapydsfpz", model.parameters().get(0).designDefaultValue()); - Assertions.assertEquals("bsilbnrucqehyrn", model.parameters().get(0).defaultValue()); - Assertions.assertEquals("jrgfbmpszwkbcstz", model.parameters().get(0).sensitiveDefaultValue()); - Assertions.assertEquals("bgaesm", model.parameters().get(0).valueType()); - Assertions.assertEquals(true, model.parameters().get(0).valueSet()); - Assertions.assertEquals("xrwqt", model.parameters().get(0).variable()); + Assertions.assertEquals(6777524756518511166L, model.id()); + Assertions.assertEquals("tagpyuhdqh", model.name()); + Assertions.assertEquals("rksypoosfxzobovs", model.description()); + Assertions.assertEquals(6810027269223003615L, model.folderId()); + Assertions.assertEquals(8020114105989626918L, model.version()); + Assertions.assertEquals(3010334204752979015L, model.environmentRefs().get(0).id()); + Assertions.assertEquals("qnztukirdedij", model.environmentRefs().get(0).environmentFolderName()); + Assertions.assertEquals("zkwvueweivajelsw", model.environmentRefs().get(0).environmentName()); + Assertions.assertEquals("dknxctglzbcxd", model.environmentRefs().get(0).referenceType()); + Assertions.assertEquals(418641047348887519L, model.parameters().get(0).id()); + Assertions.assertEquals("bmy", model.parameters().get(0).name()); + Assertions.assertEquals("ihcgxmfbi", model.parameters().get(0).description()); + Assertions.assertEquals("uvqqo", model.parameters().get(0).dataType()); + Assertions.assertEquals(true, model.parameters().get(0).required()); + Assertions.assertEquals(true, model.parameters().get(0).sensitive()); + Assertions.assertEquals("nqtjcemiasi", model.parameters().get(0).designDefaultValue()); + Assertions.assertEquals("hxqnlmjvpm", model.parameters().get(0).defaultValue()); + Assertions.assertEquals("fibwvhwouhysr", model.parameters().get(0).sensitiveDefaultValue()); + Assertions.assertEquals("kzj", model.parameters().get(0).valueType()); + Assertions.assertEquals(false, model.parameters().get(0).valueSet()); + Assertions.assertEquals("ljxaayxpdzvg", model.parameters().get(0).variable()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPropertyOverrideTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPropertyOverrideTests.java index 2053990a4e2fe..67abeb753c722 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPropertyOverrideTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPropertyOverrideTests.java @@ -11,14 +11,14 @@ public final class SsisPropertyOverrideTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SsisPropertyOverride model = BinaryData.fromString("{\"value\":\"databpjzoyzy\",\"isSensitive\":true}") + SsisPropertyOverride model = BinaryData.fromString("{\"value\":\"dataademloimaykb\",\"isSensitive\":true}") .toObject(SsisPropertyOverride.class); Assertions.assertEquals(true, model.isSensitive()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SsisPropertyOverride model = new SsisPropertyOverride().withValue("databpjzoyzy").withIsSensitive(true); + SsisPropertyOverride model = new SsisPropertyOverride().withValue("dataademloimaykb").withIsSensitive(true); model = BinaryData.fromObject(model).toObject(SsisPropertyOverride.class); Assertions.assertEquals(true, model.isSensitive()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisVariableTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisVariableTests.java index b7b0ca8f30418..1a1a442ec14a1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisVariableTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisVariableTests.java @@ -12,29 +12,33 @@ public final class SsisVariableTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SsisVariable model = BinaryData.fromString( - "{\"id\":1109572290980895504,\"name\":\"hhwpufrspreyilq\",\"description\":\"kxkteoykqrqtxqog\",\"dataType\":\"dimnacklyrbv\",\"sensitive\":true,\"value\":\"expnphtqwfp\",\"sensitiveValue\":\"sbcxqiy\"}") + "{\"id\":2100467242964547244,\"name\":\"lkkwbdekbvdu\",\"description\":\"kaadnxbssatpi\",\"dataType\":\"rqh\",\"sensitive\":false,\"value\":\"ccfgfvo\",\"sensitiveValue\":\"iyvrexitpz\"}") .toObject(SsisVariable.class); - Assertions.assertEquals(1109572290980895504L, model.id()); - Assertions.assertEquals("hhwpufrspreyilq", model.name()); - Assertions.assertEquals("kxkteoykqrqtxqog", model.description()); - Assertions.assertEquals("dimnacklyrbv", model.dataType()); - Assertions.assertEquals(true, model.sensitive()); - Assertions.assertEquals("expnphtqwfp", model.value()); - Assertions.assertEquals("sbcxqiy", model.sensitiveValue()); + Assertions.assertEquals(2100467242964547244L, model.id()); + Assertions.assertEquals("lkkwbdekbvdu", model.name()); + Assertions.assertEquals("kaadnxbssatpi", model.description()); + Assertions.assertEquals("rqh", model.dataType()); + Assertions.assertEquals(false, model.sensitive()); + Assertions.assertEquals("ccfgfvo", model.value()); + Assertions.assertEquals("iyvrexitpz", model.sensitiveValue()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SsisVariable model = new SsisVariable().withId(1109572290980895504L).withName("hhwpufrspreyilq") - .withDescription("kxkteoykqrqtxqog").withDataType("dimnacklyrbv").withSensitive(true) - .withValue("expnphtqwfp").withSensitiveValue("sbcxqiy"); + SsisVariable model = new SsisVariable().withId(2100467242964547244L) + .withName("lkkwbdekbvdu") + .withDescription("kaadnxbssatpi") + .withDataType("rqh") + .withSensitive(false) + .withValue("ccfgfvo") + .withSensitiveValue("iyvrexitpz"); model = BinaryData.fromObject(model).toObject(SsisVariable.class); - Assertions.assertEquals(1109572290980895504L, model.id()); - Assertions.assertEquals("hhwpufrspreyilq", model.name()); - Assertions.assertEquals("kxkteoykqrqtxqog", model.description()); - Assertions.assertEquals("dimnacklyrbv", model.dataType()); - Assertions.assertEquals(true, model.sensitive()); - Assertions.assertEquals("expnphtqwfp", model.value()); - Assertions.assertEquals("sbcxqiy", model.sensitiveValue()); + Assertions.assertEquals(2100467242964547244L, model.id()); + Assertions.assertEquals("lkkwbdekbvdu", model.name()); + Assertions.assertEquals("kaadnxbssatpi", model.description()); + Assertions.assertEquals("rqh", model.dataType()); + Assertions.assertEquals(false, model.sensitive()); + Assertions.assertEquals("ccfgfvo", model.value()); + Assertions.assertEquals("iyvrexitpz", model.sensitiveValue()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StagingSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StagingSettingsTests.java index b4306fcee9cfb..8c5c4c372ecc2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StagingSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StagingSettingsTests.java @@ -15,19 +15,21 @@ public final class StagingSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { StagingSettings model = BinaryData.fromString( - "{\"linkedServiceName\":{\"referenceName\":\"cjkarggvyuewg\",\"parameters\":{\"li\":\"datalvxwlqlugnbudjy\",\"qdoxooxuaufqoo\":\"datatgtlansykvlxsyc\"}},\"path\":\"dataxctkveqvpedwmhqc\",\"enableCompression\":\"dataery\",\"\":{\"clxvaovssibnvq\":\"datayqxeyzqnupsi\",\"q\":\"datavi\"}}") + "{\"linkedServiceName\":{\"referenceName\":\"iflzsrk\",\"parameters\":{\"svpokv\":\"dataobhhbl\"}},\"path\":\"databy\",\"enableCompression\":\"datafu\",\"\":{\"fffagoovfwzy\":\"dataus\",\"xup\":\"datavnvrfjg\",\"earowrmesziubkyv\":\"datahgonovwu\",\"kaomy\":\"datagkouf\"}}") .toObject(StagingSettings.class); - Assertions.assertEquals("cjkarggvyuewg", model.linkedServiceName().referenceName()); + Assertions.assertEquals("iflzsrk", model.linkedServiceName().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { StagingSettings model = new StagingSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cjkarggvyuewg") - .withParameters(mapOf("li", "datalvxwlqlugnbudjy", "qdoxooxuaufqoo", "datatgtlansykvlxsyc"))) - .withPath("dataxctkveqvpedwmhqc").withEnableCompression("dataery").withAdditionalProperties(mapOf()); + .withLinkedServiceName( + new LinkedServiceReference().withReferenceName("iflzsrk").withParameters(mapOf("svpokv", "dataobhhbl"))) + .withPath("databy") + .withEnableCompression("datafu") + .withAdditionalProperties(mapOf()); model = BinaryData.fromObject(model).toObject(StagingSettings.class); - Assertions.assertEquals("cjkarggvyuewg", model.linkedServiceName().referenceName()); + Assertions.assertEquals("iflzsrk", model.linkedServiceName().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreReadSettingsTests.java index 8ad619b184d9a..15c6128ebd307 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreReadSettingsTests.java @@ -13,15 +13,15 @@ public final class StoreReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { StoreReadSettings model = BinaryData.fromString( - "{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datafotaaqyx\",\"disableMetricsCollection\":\"dataoabcoxqaavjkre\",\"\":{\"ivianklqclftp\":\"datasviysbvo\"}}") + "{\"type\":\"mx\",\"maxConcurrentConnections\":\"datax\",\"disableMetricsCollection\":\"datauvgtoinozsmyvv\",\"\":{\"efcooptmdspddxgu\":\"datafbmrwhk\",\"xjwn\":\"dataiosibgolaxuy\",\"fiksjpkig\":\"datarskyrttnrikss\"}}") .toObject(StoreReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - StoreReadSettings model = new StoreReadSettings().withMaxConcurrentConnections("datafotaaqyx") - .withDisableMetricsCollection("dataoabcoxqaavjkre") - .withAdditionalProperties(mapOf("type", "StoreReadSettings")); + StoreReadSettings model = new StoreReadSettings().withMaxConcurrentConnections("datax") + .withDisableMetricsCollection("datauvgtoinozsmyvv") + .withAdditionalProperties(mapOf("type", "mx")); model = BinaryData.fromObject(model).toObject(StoreReadSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreWriteSettingsTests.java index fc7d4d1aa684e..e70007bd25694 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreWriteSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreWriteSettingsTests.java @@ -15,16 +15,20 @@ public final class StoreWriteSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { StoreWriteSettings model = BinaryData.fromString( - "{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"datagcgefay\",\"disableMetricsCollection\":\"datavgotbjnxozi\",\"copyBehavior\":\"dataxnpov\",\"metadata\":[{\"name\":\"datauvmsgdis\",\"value\":\"datanxthu\"}],\"\":{\"dbqeahgsibldxya\":\"datavokxuyhhrdi\",\"h\":\"datadaaznzaxz\"}}") + "{\"type\":\"wwsioozrugbdkxl\",\"maxConcurrentConnections\":\"datakuvlzkzjjtapv\",\"disableMetricsCollection\":\"dataebtdpsgkeexso\",\"copyBehavior\":\"datakvy\",\"metadata\":[{\"name\":\"datatwtfqpmpyw\",\"value\":\"databuk\"},{\"name\":\"datajcwdoecdqu\",\"value\":\"datauqco\"},{\"name\":\"datahdxjrrbyrbn\",\"value\":\"datapsquouppzgdtu\"},{\"name\":\"dataoimojcm\",\"value\":\"datacd\"}],\"\":{\"nvb\":\"dataorzhzfocgfylto\",\"lwifbdwy\":\"datauy\",\"bbvkthre\":\"datavcy\"}}") .toObject(StoreWriteSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - StoreWriteSettings model = new StoreWriteSettings().withMaxConcurrentConnections("datagcgefay") - .withDisableMetricsCollection("datavgotbjnxozi").withCopyBehavior("dataxnpov") - .withMetadata(Arrays.asList(new MetadataItem().withName("datauvmsgdis").withValue("datanxthu"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings")); + StoreWriteSettings model = new StoreWriteSettings().withMaxConcurrentConnections("datakuvlzkzjjtapv") + .withDisableMetricsCollection("dataebtdpsgkeexso") + .withCopyBehavior("datakvy") + .withMetadata(Arrays.asList(new MetadataItem().withName("datatwtfqpmpyw").withValue("databuk"), + new MetadataItem().withName("datajcwdoecdqu").withValue("datauqco"), + new MetadataItem().withName("datahdxjrrbyrbn").withValue("datapsquouppzgdtu"), + new MetadataItem().withName("dataoimojcm").withValue("datacd"))) + .withAdditionalProperties(mapOf("type", "wwsioozrugbdkxl")); model = BinaryData.fromObject(model).toObject(StoreWriteSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SubResourceDebugResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SubResourceDebugResourceTests.java index d0c129e11dcfe..5dd020c02638f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SubResourceDebugResourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SubResourceDebugResourceTests.java @@ -12,14 +12,14 @@ public final class SubResourceDebugResourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SubResourceDebugResource model - = BinaryData.fromString("{\"name\":\"wvz\"}").toObject(SubResourceDebugResource.class); - Assertions.assertEquals("wvz", model.name()); + = BinaryData.fromString("{\"name\":\"qutdewemxs\"}").toObject(SubResourceDebugResource.class); + Assertions.assertEquals("qutdewemxs", model.name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SubResourceDebugResource model = new SubResourceDebugResource().withName("wvz"); + SubResourceDebugResource model = new SubResourceDebugResource().withName("qutdewemxs"); model = BinaryData.fromObject(model).toObject(SubResourceDebugResource.class); - Assertions.assertEquals("wvz", model.name()); + Assertions.assertEquals("qutdewemxs", model.name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTests.java index 1863690c63d0c..6b8e363dd8400 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTests.java @@ -23,193 +23,230 @@ public final class SwitchActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SwitchActivity model = BinaryData.fromString( - "{\"type\":\"Switch\",\"typeProperties\":{\"on\":{\"value\":\"suuapktfvemwfwc\"},\"cases\":[{\"value\":\"qv\",\"activities\":[{\"type\":\"Activity\",\"name\":\"hbyklwc\",\"description\":\"xpkpsqk\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"bkhyqouzz\",\"dependencyConditions\":[]},{\"activity\":\"gnldz\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"tlhnm\",\"value\":\"datads\"},{\"name\":\"gfdvwshcc\",\"value\":\"datayqn\"},{\"name\":\"sjnrfpzlvaeo\",\"value\":\"datanskekhmomv\"}],\"\":{\"aetyeafjlismaca\":\"datadsqfazsiizcwhaxj\",\"kgfpjbj\":\"datadyajyiwvqlrzob\",\"rzfh\":\"datavnkyqrjb\"}},{\"type\":\"Activity\",\"name\":\"hukuypyeo\",\"description\":\"aeabbxkldtw\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ldgbgua\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"cdbudfwlpgpxyrfk\",\"value\":\"datalgpladqc\"}],\"\":{\"emex\":\"datahgpzqibqilcntmu\",\"djwp\":\"datarjxaawentkok\",\"jqfwxicbvwnnvt\":\"dataea\",\"yowzptxpe\":\"databclgrkfwofwe\"}}]},{\"value\":\"bmuzpdjt\",\"activities\":[{\"type\":\"Activity\",\"name\":\"ycasxuhi\",\"description\":\"mmiipf\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"nfpxrzqagmcivs\",\"dependencyConditions\":[]},{\"activity\":\"awiabyfzadeu\",\"dependencyConditions\":[]},{\"activity\":\"tkfvdjgw\",\"dependencyConditions\":[]},{\"activity\":\"akqgabrbsuxgn\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"kulozdoilhrxji\",\"value\":\"datajiv\"},{\"name\":\"orqlkycwnb\",\"value\":\"datalau\"}],\"\":{\"bvftqahjnsllfkcr\":\"datayriscio\",\"fxtendfp\":\"dataviimhdlmagdwi\",\"tklojlgsbystznwj\":\"dataoxtifosxxk\",\"ptvkjdowuzasd\":\"datasvllefliriq\"}},{\"type\":\"Activity\",\"name\":\"tufmujadippdntun\",\"description\":\"eeprmebvxmaacr\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"jcesm\",\"dependencyConditions\":[]},{\"activity\":\"cxugatvjxyvxd\",\"dependencyConditions\":[]},{\"activity\":\"uzdphogmrcmgu\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"asdrrfozz\",\"value\":\"dataygolz\"},{\"name\":\"njkbmfcrysvcab\",\"value\":\"datak\"},{\"name\":\"jmzqnbwnlo\",\"value\":\"dataz\"}],\"\":{\"njorpcrg\":\"datapvdxutcoqclypb\"}}]}],\"defaultActivities\":[{\"type\":\"Activity\",\"name\":\"it\",\"description\":\"ebuvxxl\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"bzictf\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Succeeded\",\"Skipped\"],\"\":{\"wvmw\":\"dataesxzukl\"}}],\"userProperties\":[{\"name\":\"chcootyscar\",\"value\":\"datamhiewvcpyskh\"},{\"name\":\"vkw\",\"value\":\"datatbvyclg\"},{\"name\":\"zbyxtprxt\",\"value\":\"datawvng\"},{\"name\":\"csno\",\"value\":\"datakglygeuo\"}],\"\":{\"ggntqptrjtyhth\":\"datawjvdrjlgwzb\",\"vkhkubpojhdxcha\":\"datacpzdn\",\"w\":\"datag\",\"iulfxgzyr\":\"datavrnwxolfhiq\"}},{\"type\":\"Activity\",\"name\":\"qux\",\"description\":\"ekixouhcatozs\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"wjxatghuixczycif\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Succeeded\",\"Completed\"],\"\":{\"euukko\":\"datab\",\"wzgb\":\"datawtucmhpjmnxlfkm\",\"mrpbmxmxshfh\":\"databwmiap\"}},{\"activity\":\"p\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Failed\"],\"\":{\"vxytmoqnytucuzy\":\"datap\",\"e\":\"dataigdebsinsoybe\",\"mqjcagxrozcfcxk\":\"datarpouhlhlud\",\"kgepmnxvahqvc\":\"datahjxbteakdr\"}}],\"userProperties\":[{\"name\":\"hlkx\",\"value\":\"dataanlyccdmkp\"}],\"\":{\"qzdedizdmwndnsg\":\"dataa\"}},{\"type\":\"Activity\",\"name\":\"fzp\",\"description\":\"wmdmwsflrdyrxl\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"m\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"ldhwr\":\"dataynlhsdtcgflevn\"}},{\"activity\":\"cflhwfrjyuhuthqd\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"uqve\":\"dataloyqjrkted\",\"zxcf\":\"datajsogesrmah\",\"vupnd\":\"datapyrelbzwxxsowdnu\"}}],\"userProperties\":[{\"name\":\"faeisboeap\",\"value\":\"dataraydlpu\"},{\"name\":\"kmakkwqrkaym\",\"value\":\"datagzbkliokuwhrpam\"},{\"name\":\"vx\",\"value\":\"datarl\"}],\"\":{\"dw\":\"databbacixlirolaoo\"}},{\"type\":\"Activity\",\"name\":\"jerm\",\"description\":\"kikgp\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"kcczb\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"iqvqbvfihna\":\"datakwjhkjvsvywnz\"}}],\"userProperties\":[{\"name\":\"ukegkludfdh\",\"value\":\"dataorihqzfjyqadtq\"},{\"name\":\"tsa\",\"value\":\"datajjfa\"},{\"name\":\"plywtgilhxaa\",\"value\":\"datanuufenp\"}],\"\":{\"exqyroqklgvyce\":\"dataktnfeghcmxi\",\"twhyznlhak\":\"dataywuioi\"}}]},\"name\":\"fskgxfmdpsreqor\",\"description\":\"ulzqjqbw\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"juakdsmwajalsen\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Completed\",\"Failed\"],\"\":{\"unz\":\"datavbslrhcceyobjs\"}},{\"activity\":\"bbyvxk\",\"dependencyConditions\":[\"Succeeded\",\"Failed\"],\"\":{\"jvgftmpj\":\"datatdvxidmitmjcc\",\"scngdu\":\"datanrqgliqxahpy\",\"vhcwt\":\"dataw\",\"xigpmc\":\"dataqires\"}},{\"activity\":\"equocawcb\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Skipped\"],\"\":{\"aaav\":\"datapwhybtx\",\"dxdxrkrvmhhgvrxv\":\"dataiadygoad\"}},{\"activity\":\"uwbvrbwafw\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Skipped\"],\"\":{\"bvf\":\"datagwfgvpftbwmuxcpy\",\"ptkbe\":\"datamghhzm\",\"qvxzqwcmmolpfcv\":\"datapywvgfdsrng\"}}],\"userProperties\":[{\"name\":\"nnyksskuscdnn\",\"value\":\"dataoftapyrh\"},{\"name\":\"tjtqww\",\"value\":\"dataaxhsjw\"},{\"name\":\"c\",\"value\":\"datatwywhrzntmzzzavx\"}],\"\":{\"teaisywopko\":\"dataexspoiqvuky\",\"cbyldsmyq\":\"datalwmaigd\"}}") + "{\"type\":\"mpmf\",\"typeProperties\":{\"on\":{\"value\":\"ajbgpuwk\"},\"cases\":[{\"value\":\"nkzyqizx\",\"activities\":[{\"type\":\"ehtrgybfumoro\",\"name\":\"rutbfkynwwm\",\"description\":\"pyrzazkalj\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"bzloaepb\",\"dependencyConditions\":[]},{\"activity\":\"ntgsju\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"eggphwgixypgvwm\",\"value\":\"dataa\"},{\"name\":\"hriua\",\"value\":\"dataqgkvkoynjucmyj\"},{\"name\":\"lafv\",\"value\":\"datandkvbc\"},{\"name\":\"qenbgymgjneoh\",\"value\":\"datakis\"}],\"\":{\"fsjodskqyjsdxgef\":\"datag\"}},{\"type\":\"rfih\",\"name\":\"tor\",\"description\":\"hfuw\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ociunndgpxdjkwy\",\"dependencyConditions\":[]},{\"activity\":\"qnlqzymivjk\",\"dependencyConditions\":[]},{\"activity\":\"ci\",\"dependencyConditions\":[]},{\"activity\":\"zag\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"sywdtg\",\"value\":\"datargqflpux\"},{\"name\":\"ak\",\"value\":\"datafr\"}],\"\":{\"cpcl\":\"datashqttk\",\"tysmnyfahi\":\"dataotce\"}},{\"type\":\"scdoweorniyjq\",\"name\":\"j\",\"description\":\"ko\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"opirgdsqcbxkwwnq\",\"dependencyConditions\":[]},{\"activity\":\"ybwjvifgj\",\"dependencyConditions\":[]},{\"activity\":\"tzhkhyqjvgagrbir\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"ubbn\",\"value\":\"dataofyeggaauubkr\"},{\"name\":\"chkwwibxjpyt\",\"value\":\"dataak\"}],\"\":{\"ujwcyv\":\"dataoe\",\"norfxirj\":\"dataxbqu\"}}]},{\"value\":\"ggwzvdqpxicpoz\",\"activities\":[{\"type\":\"uraqpcspsbrd\",\"name\":\"domyqbeasbv\",\"description\":\"fkzu\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"frwbmfq\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"aqltoxhfphaw\",\"value\":\"dataovqtvbusyqyfit\"},{\"name\":\"prbmmfqteox\",\"value\":\"dataikdcjmbwrhpw\"},{\"name\":\"udegykzdspbjks\",\"value\":\"datadsrwhjhivgeran\"}],\"\":{\"jtqqqcxr\":\"dataaf\",\"cgyvzpvz\":\"datawduspxijrremvz\",\"kq\":\"dataduzfybjucfsu\"}}]}],\"defaultActivities\":[{\"type\":\"jw\",\"name\":\"wpoywymtwhzdgbg\",\"description\":\"yzrzhkhmwcg\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"gqxnyoakdp\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Skipped\",\"Completed\"],\"\":{\"bmfejtdboa\":\"dataezgzsekbce\"}},{\"activity\":\"nya\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Failed\"],\"\":{\"rjgrzxaamibhkaqz\":\"datafzypykjor\"}},{\"activity\":\"hjqslshceyhalbx\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Succeeded\"],\"\":{\"t\":\"dataco\",\"rifmtk\":\"dataupaqzithojrtcdav\",\"xirsvjozexxzkci\":\"datawezwkparj\",\"sjblagsshdubqhaf\":\"dataykeawrumhzg\"}}],\"userProperties\":[{\"name\":\"omwnkhiwqiqx\",\"value\":\"datawbormfnntpocf\"}],\"\":{\"quhdyzuehqmtt\":\"datazsfdohytk\",\"eddwjimrzavci\":\"datawpeaivbzrms\",\"i\":\"datafqameccuqkoat\"}},{\"type\":\"diecrbcv\",\"name\":\"kkdsyxabdd\",\"description\":\"zohvpqtxlu\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"cxjbaloi\",\"dependencyConditions\":[\"Completed\",\"Completed\"],\"\":{\"nxuiiprfijmilo\":\"datavpavraeeiboqcv\",\"grelgggjt\":\"dataedxsphfjzxesw\",\"ojoe\":\"datajeolxbg\",\"rczzgu\":\"datatwehvuttngatgl\"}},{\"activity\":\"rkrfabffeahypjqa\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Completed\",\"Succeeded\"],\"\":{\"bplbtmwae\":\"dataffpvdjfws\",\"cxh\":\"dataybrhn\",\"heuwcmwixyrv\":\"datashabnpdnbtym\",\"buvviysg\":\"datapu\"}}],\"userProperties\":[{\"name\":\"hmvx\",\"value\":\"datapqfawwoxqj\"},{\"name\":\"mfyvgm\",\"value\":\"datawvvs\"},{\"name\":\"ynvgfaotokipndek\",\"value\":\"datazgdrkddzkkikrotn\"},{\"name\":\"x\",\"value\":\"dataeqdinwq\"}],\"\":{\"xmnsrejq\":\"datatqoxethrxlpgrvtz\",\"earbk\":\"dataylhesmhov\",\"xfsknmrc\":\"dataaomxyxnbenhxtx\"}},{\"type\":\"dfbdxwywdyqpkw\",\"name\":\"woflfniislohftm\",\"description\":\"xrx\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"tsgopmatubt\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Succeeded\"],\"\":{\"nqopoelqfsfxth\":\"datar\",\"kqkvfthbnikoybrs\":\"datadzeu\",\"wqmtzhiku\":\"dataf\",\"oxmzvlofzdnvsr\":\"dataymis\"}},{\"activity\":\"l\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"xtxgrh\":\"dataafcxpvxrqegkw\",\"sb\":\"dataqbstodeu\"}},{\"activity\":\"dcoqm\",\"dependencyConditions\":[\"Failed\",\"Skipped\"],\"\":{\"rjrx\":\"datawkpvsij\"}},{\"activity\":\"cnfyknx\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Skipped\"],\"\":{\"ehkqmlldeksgejmp\":\"dataq\"}}],\"userProperties\":[{\"name\":\"jacnbep\",\"value\":\"dataqhpkaamoovrb\"},{\"name\":\"buoqbclhnlqxuxr\",\"value\":\"datagxvkzhqpkckwaaf\"},{\"name\":\"yscjawqhpijur\",\"value\":\"dataoihxibji\"},{\"name\":\"m\",\"value\":\"dataj\"}],\"\":{\"nbluxomzg\":\"datafurdjjzsijmsaa\",\"wuiopgyunf\":\"datajmnvukovxfkxnevc\"}}]},\"name\":\"kinodekppcpwc\",\"description\":\"nuys\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"ldorqprjevueyzg\",\"dependencyConditions\":[\"Completed\"],\"\":{\"irg\":\"datazeqvf\"}},{\"activity\":\"gu\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Succeeded\",\"Skipped\"],\"\":{\"wepojmxby\":\"datamabnwsga\",\"cxpzje\":\"dataiykwrffxorwx\",\"npuquyatvsnkrxh\":\"dataoyqlcvtdyuozmtsj\",\"ldtjzi\":\"dataegwvblrgrzlrnuy\"}},{\"activity\":\"fwzpauwhfhynhol\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"ctvl\":\"dataukjywgsyidqlghr\",\"drfekxv\":\"datank\"}}],\"userProperties\":[{\"name\":\"htbqzxqi\",\"value\":\"dataguawrwjbante\"},{\"name\":\"uyricaik\",\"value\":\"datavjktfpob\"},{\"name\":\"lrrqjioltdl\",\"value\":\"datapyksqns\"}],\"\":{\"geqybordnwtu\":\"datahvfodrqm\",\"nvdorsgcvgkn\":\"datavbviymvgnqq\",\"l\":\"datampcnezd\",\"pdyztqpszbtbx\":\"databqhbbzfcjmhpobu\"}}") .toObject(SwitchActivity.class); - Assertions.assertEquals("fskgxfmdpsreqor", model.name()); - Assertions.assertEquals("ulzqjqbw", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("juakdsmwajalsen", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("nnyksskuscdnn", model.userProperties().get(0).name()); - Assertions.assertEquals("suuapktfvemwfwc", model.on().value()); - Assertions.assertEquals("qv", model.cases().get(0).value()); - Assertions.assertEquals("hbyklwc", model.cases().get(0).activities().get(0).name()); - Assertions.assertEquals("xpkpsqk", model.cases().get(0).activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.cases().get(0).activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, + Assertions.assertEquals("kinodekppcpwc", model.name()); + Assertions.assertEquals("nuys", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("ldorqprjevueyzg", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("htbqzxqi", model.userProperties().get(0).name()); + Assertions.assertEquals("ajbgpuwk", model.on().value()); + Assertions.assertEquals("nkzyqizx", model.cases().get(0).value()); + Assertions.assertEquals("rutbfkynwwm", model.cases().get(0).activities().get(0).name()); + Assertions.assertEquals("pyrzazkalj", model.cases().get(0).activities().get(0).description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.cases().get(0).activities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.cases().get(0).activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("bkhyqouzz", model.cases().get(0).activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("tlhnm", model.cases().get(0).activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("it", model.defaultActivities().get(0).name()); - Assertions.assertEquals("ebuvxxl", model.defaultActivities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.defaultActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.defaultActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("bzictf", model.defaultActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, + Assertions.assertEquals("bzloaepb", model.cases().get(0).activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals("eggphwgixypgvwm", + model.cases().get(0).activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("wpoywymtwhzdgbg", model.defaultActivities().get(0).name()); + Assertions.assertEquals("yzrzhkhmwcg", model.defaultActivities().get(0).description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.defaultActivities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, + model.defaultActivities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("gqxnyoakdp", model.defaultActivities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.defaultActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("chcootyscar", model.defaultActivities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("omwnkhiwqiqx", model.defaultActivities().get(0).userProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SwitchActivity model = new SwitchActivity().withName("fskgxfmdpsreqor").withDescription("ulzqjqbw") - .withState(ActivityState.INACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + SwitchActivity model = new SwitchActivity().withName("kinodekppcpwc") + .withDescription("nuys") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) .withDependsOn(Arrays.asList( - new ActivityDependency() - .withActivity("juakdsmwajalsen") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.FAILED)) + new ActivityDependency().withActivity("ldorqprjevueyzg") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("bbyvxk") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("equocawcb") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, - DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("uwbvrbwafw") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, + new ActivityDependency().withActivity("gu") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("fwzpauwhfhynhol") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED)) .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays - .asList(new UserProperty().withName("nnyksskuscdnn").withValue("dataoftapyrh"), - new UserProperty().withName("tjtqww").withValue("dataaxhsjw"), new UserProperty().withName( - "c").withValue( - "datatwywhrzntmzzzavx"))) - .withOn(new Expression().withValue("suuapktfvemwfwc")) - .withCases( - Arrays.asList( - new SwitchCase().withValue("qv") - .withActivities(Arrays.asList( - new Activity().withName("hbyklwc").withDescription("xpkpsqk") - .withState(ActivityState.INACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("bkhyqouzz") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("gnldz") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("tlhnm").withValue("datads"), - new UserProperty().withName("gfdvwshcc").withValue("datayqn"), - new UserProperty().withName("sjnrfpzlvaeo").withValue("datanskekhmomv"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("hukuypyeo").withDescription("aeabbxkldtw") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("ldgbgua") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays - .asList(new UserProperty().withName("cdbudfwlpgpxyrfk").withValue("datalgpladqc"))) - .withAdditionalProperties(mapOf("type", "Activity")))), - new SwitchCase().withValue("bmuzpdjt").withActivities(Arrays.asList( - new Activity().withName("ycasxuhi").withDescription("mmiipf").withState(ActivityState.INACTIVE) + .withUserProperties(Arrays.asList(new UserProperty().withName("htbqzxqi").withValue("dataguawrwjbante"), + new UserProperty().withName("uyricaik").withValue("datavjktfpob"), + new UserProperty().withName("lrrqjioltdl").withValue("datapyksqns"))) + .withOn(new Expression().withValue("ajbgpuwk")) + .withCases(Arrays.asList( + new SwitchCase().withValue("nkzyqizx") + .withActivities(Arrays.asList( + new Activity().withName("rutbfkynwwm") + .withDescription("pyrzazkalj") + .withState(ActivityState.ACTIVE) .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("nfpxrzqagmcivs") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("awiabyfzadeu") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("tkfvdjgw") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("akqgabrbsuxgn") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()))) + new ActivityDependency().withActivity("bzloaepb") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("ntgsju") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) .withUserProperties( - Arrays.asList(new UserProperty().withName("kulozdoilhrxji").withValue("datajiv"), - new UserProperty().withName("orqlkycwnb").withValue("datalau"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("tufmujadippdntun").withDescription("eeprmebvxmaacr") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + Arrays.asList(new UserProperty().withName("eggphwgixypgvwm").withValue("dataa"), + new UserProperty().withName("hriua").withValue("dataqgkvkoynjucmyj"), + new UserProperty().withName("lafv").withValue("datandkvbc"), + new UserProperty().withName("qenbgymgjneoh").withValue("datakis"))) + .withAdditionalProperties(mapOf("type", "ehtrgybfumoro")), + new Activity().withName("tor") + .withDescription("hfuw") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("jcesm").withDependencyConditions(Arrays.asList()) + new ActivityDependency().withActivity("ociunndgpxdjkwy") + .withDependencyConditions(Arrays.asList()) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("cxugatvjxyvxd") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("uzdphogmrcmgu") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()))) + new ActivityDependency().withActivity("qnlqzymivjk") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("ci") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("zag") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) .withUserProperties( - Arrays.asList(new UserProperty().withName("asdrrfozz").withValue("dataygolz"), - new UserProperty().withName("njkbmfcrysvcab").withValue("datak"), - new UserProperty().withName("jmzqnbwnlo").withValue("dataz"))) - .withAdditionalProperties(mapOf("type", "Activity")))))) - .withDefaultActivities(Arrays.asList( - new Activity().withName("it").withDescription("ebuvxxl").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("bzictf") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("chcootyscar").withValue("datamhiewvcpyskh"), - new UserProperty().withName("vkw").withValue("datatbvyclg"), - new UserProperty().withName("zbyxtprxt").withValue("datawvng"), new UserProperty() - .withName("csno").withValue("datakglygeuo"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("qux").withDescription("ekixouhcatozs").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs( - ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("wjxatghuixczycif") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("p") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("hlkx").withValue("dataanlyccdmkp"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("fzp").withDescription("wmdmwsflrdyrxl").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("m") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("cflhwfrjyuhuthqd") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + Arrays.asList(new UserProperty().withName("sywdtg").withValue("datargqflpux"), + new UserProperty().withName("ak").withValue("datafr"))) + .withAdditionalProperties(mapOf("type", "rfih")), + new Activity().withName("j") + .withDescription("ko") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("opirgdsqcbxkwwnq") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("ybwjvifgj") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("tzhkhyqjvgagrbir") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("ubbn").withValue("dataofyeggaauubkr"), + new UserProperty().withName("chkwwibxjpyt").withValue("dataak"))) + .withAdditionalProperties(mapOf("type", "scdoweorniyjq")))), + new SwitchCase().withValue("ggwzvdqpxicpoz") + .withActivities(Arrays.asList(new Activity().withName("domyqbeasbv") + .withDescription("fkzu") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("frwbmfq") + .withDependencyConditions(Arrays.asList()) .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("faeisboeap").withValue("dataraydlpu"), - new UserProperty().withName("kmakkwqrkaym").withValue("datagzbkliokuwhrpam"), - new UserProperty().withName("vx").withValue("datarl"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("jerm").withDescription("kikgp").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("kcczb") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("ukegkludfdh").withValue("dataorihqzfjyqadtq"), - new UserProperty().withName("tsa").withValue("datajjfa"), - new UserProperty().withName("plywtgilhxaa").withValue("datanuufenp"))) - .withAdditionalProperties(mapOf("type", "Activity")))); + .withUserProperties( + Arrays.asList(new UserProperty().withName("aqltoxhfphaw").withValue("dataovqtvbusyqyfit"), + new UserProperty().withName("prbmmfqteox").withValue("dataikdcjmbwrhpw"), + new UserProperty().withName("udegykzdspbjks").withValue("datadsrwhjhivgeran"))) + .withAdditionalProperties(mapOf("type", "uraqpcspsbrd")))))) + .withDefaultActivities( + Arrays + .asList( + new Activity().withName("wpoywymtwhzdgbg") + .withDescription("yzrzhkhmwcg") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn( + Arrays + .asList( + new ActivityDependency().withActivity("gqxnyoakdp") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.SKIPPED, DependencyCondition.SKIPPED, + DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("nya") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, + DependencyCondition.FAILED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("hjqslshceyhalbx") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays + .asList(new UserProperty().withName("omwnkhiwqiqx").withValue("datawbormfnntpocf"))) + .withAdditionalProperties(mapOf("type", "jw")), + new Activity().withName("kkdsyxabdd") + .withDescription("zohvpqtxlu") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn( + Arrays + .asList( + new ActivityDependency().withActivity("cxjbaloi") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, + DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("rkrfabffeahypjqa") + .withDependencyConditions( + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, + DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("hmvx").withValue("datapqfawwoxqj"), + new UserProperty().withName("mfyvgm").withValue("datawvvs"), + new UserProperty().withName("ynvgfaotokipndek").withValue("datazgdrkddzkkikrotn"), + new UserProperty().withName("x").withValue("dataeqdinwq"))) + .withAdditionalProperties(mapOf("type", "diecrbcv")), + new Activity().withName("woflfniislohftm") + .withDescription("xrx") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("tsgopmatubt") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, + DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("l") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("dcoqm") + .withDependencyConditions( + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("cnfyknx") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, + DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("jacnbep").withValue("dataqhpkaamoovrb"), + new UserProperty().withName("buoqbclhnlqxuxr").withValue("datagxvkzhqpkckwaaf"), + new UserProperty().withName("yscjawqhpijur").withValue("dataoihxibji"), + new UserProperty().withName("m").withValue("dataj"))) + .withAdditionalProperties(mapOf("type", "dfbdxwywdyqpkw")))); model = BinaryData.fromObject(model).toObject(SwitchActivity.class); - Assertions.assertEquals("fskgxfmdpsreqor", model.name()); - Assertions.assertEquals("ulzqjqbw", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("juakdsmwajalsen", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("nnyksskuscdnn", model.userProperties().get(0).name()); - Assertions.assertEquals("suuapktfvemwfwc", model.on().value()); - Assertions.assertEquals("qv", model.cases().get(0).value()); - Assertions.assertEquals("hbyklwc", model.cases().get(0).activities().get(0).name()); - Assertions.assertEquals("xpkpsqk", model.cases().get(0).activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.cases().get(0).activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, + Assertions.assertEquals("kinodekppcpwc", model.name()); + Assertions.assertEquals("nuys", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); + Assertions.assertEquals("ldorqprjevueyzg", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("htbqzxqi", model.userProperties().get(0).name()); + Assertions.assertEquals("ajbgpuwk", model.on().value()); + Assertions.assertEquals("nkzyqizx", model.cases().get(0).value()); + Assertions.assertEquals("rutbfkynwwm", model.cases().get(0).activities().get(0).name()); + Assertions.assertEquals("pyrzazkalj", model.cases().get(0).activities().get(0).description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.cases().get(0).activities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.cases().get(0).activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("bkhyqouzz", model.cases().get(0).activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("tlhnm", model.cases().get(0).activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("it", model.defaultActivities().get(0).name()); - Assertions.assertEquals("ebuvxxl", model.defaultActivities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.defaultActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.defaultActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("bzictf", model.defaultActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, + Assertions.assertEquals("bzloaepb", model.cases().get(0).activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals("eggphwgixypgvwm", + model.cases().get(0).activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("wpoywymtwhzdgbg", model.defaultActivities().get(0).name()); + Assertions.assertEquals("yzrzhkhmwcg", model.defaultActivities().get(0).description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.defaultActivities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, + model.defaultActivities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("gqxnyoakdp", model.defaultActivities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.defaultActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("chcootyscar", model.defaultActivities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("omwnkhiwqiqx", model.defaultActivities().get(0).userProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTypePropertiesTests.java index 27f8f82022404..90810dcb838e8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTypePropertiesTests.java @@ -23,173 +23,279 @@ public final class SwitchActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SwitchActivityTypeProperties model = BinaryData.fromString( - "{\"on\":{\"value\":\"gvqthlimvyzrdqpg\"},\"cases\":[{\"value\":\"rpxwldktphnis\",\"activities\":[{\"type\":\"Activity\",\"name\":\"jnbt\",\"description\":\"itpxpkbangjxbbyq\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"ujgeppxiyovgha\",\"dependencyConditions\":[]},{\"activity\":\"m\",\"dependencyConditions\":[]},{\"activity\":\"cog\",\"dependencyConditions\":[]},{\"activity\":\"ameacjoaixhma\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"gvwvlqqnf\",\"value\":\"datawrws\"},{\"name\":\"yblwjhpibgalefjs\",\"value\":\"datanxrgmvzcibqy\"},{\"name\":\"qnshnbfd\",\"value\":\"dataxs\"}],\"\":{\"wqmfhg\":\"dataskprgztzcib\",\"ephohjorguif\":\"datanbsxoe\",\"gbmngkqejr\":\"datahv\"}},{\"type\":\"Activity\",\"name\":\"w\",\"description\":\"zzdlfayich\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"pwjvpglstxznk\",\"dependencyConditions\":[]},{\"activity\":\"jkjezunr\",\"dependencyConditions\":[]},{\"activity\":\"dygpdnnvepbocw\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"wdbxdko\",\"value\":\"dataummphbfpriveilx\"},{\"name\":\"pizyen\",\"value\":\"datajj\"},{\"name\":\"zmdpnersm\",\"value\":\"datavhgsu\"}],\"\":{\"zsyqpkpvb\":\"datajlvrjqakb\"}},{\"type\":\"Activity\",\"name\":\"g\",\"description\":\"ygu\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"vg\",\"dependencyConditions\":[]},{\"activity\":\"eiocacngiaa\",\"dependencyConditions\":[]},{\"activity\":\"gxdzhyc\",\"dependencyConditions\":[]},{\"activity\":\"teidfzof\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"nicmlomlnp\",\"value\":\"datakikyn\"}],\"\":{\"ogabcwvibjfkc\":\"datagquphqnuitumxhve\"}}]}],\"defaultActivities\":[{\"type\":\"Activity\",\"name\":\"n\",\"description\":\"tjdmdbtbdtrqiu\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"xeqehgrjgvr\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\"],\"\":{\"dwfyagvhe\":\"datagb\"}},{\"activity\":\"ptcuqzdwpcupejzo\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Completed\",\"Succeeded\"],\"\":{\"exapfypdfierut\":\"datavcsvtflcjxmt\",\"sahv\":\"dataedeygsrrgdimaqy\",\"cyrkcdo\":\"datawlibrwomdwzz\"}}],\"userProperties\":[{\"name\":\"g\",\"value\":\"dataaitihncysa\"},{\"name\":\"jlq\",\"value\":\"dataora\"},{\"name\":\"tbiskkceb\",\"value\":\"dataajlptydvebipkeo\"}],\"\":{\"tvxibpzh\":\"dataxiukghxdekq\",\"uevzqawjnwj\":\"datan\"}},{\"type\":\"Activity\",\"name\":\"siubp\",\"description\":\"isjghfal\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"bpkjseft\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"noecwabuf\":\"datamaknonaqyesw\",\"ayvkmptgpqx\":\"dataflwskbbe\"}},{\"activity\":\"y\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\"],\"\":{\"kdbo\":\"dataozjrl\",\"bc\":\"datasxpcbglbpa\",\"o\":\"dataggf\"}},{\"activity\":\"xtbdgp\",\"dependencyConditions\":[\"Skipped\",\"Completed\"],\"\":{\"ztdho\":\"datacglimacztkypy\",\"yohbbtwpkgc\":\"dataarcumpxd\"}}],\"userProperties\":[{\"name\":\"mjcjejalybvxumt\",\"value\":\"datauvdoteidcwrmdq\"}],\"\":{\"ym\":\"datawegqmlv\",\"agroejsaer\":\"datayfszluzmzgat\"}},{\"type\":\"Activity\",\"name\":\"ckmcukzwzgio\",\"description\":\"rxgq\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"riwbvyraa\",\"dependencyConditions\":[\"Failed\"],\"\":{\"hmtpdvuixmkmyboh\":\"datakjyjc\",\"viadqnnmcdqzgep\":\"dataxmvckf\",\"fsvuyuyqc\":\"datayppkfraohiye\",\"ieitp\":\"datazpjnakqcsgoozyxu\"}},{\"activity\":\"kjyjhkrk\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Failed\",\"Skipped\"],\"\":{\"jjiuirmcupbehq\":\"dataqpjnqyylkcbk\"}},{\"activity\":\"mhqihlxdh\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Failed\",\"Skipped\"],\"\":{\"bwhawref\":\"datavctmpxnbnho\"}}],\"userProperties\":[{\"name\":\"ttzlo\",\"value\":\"datat\"}],\"\":{\"bcwfp\":\"databxnqkbvhdbg\",\"tcucfbr\":\"datavmixfqqm\"}},{\"type\":\"Activity\",\"name\":\"m\",\"description\":\"tzugwurvpcwy\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ravdq\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Failed\",\"Failed\"],\"\":{\"oimaykbmkkunfh\":\"datadem\",\"dpkevto\":\"datadvgcgunqitzw\",\"uppxdzpjewp\":\"datavqjrdydzq\"}},{\"activity\":\"lyszw\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Failed\"],\"\":{\"tfgbxiao\":\"datanxly\",\"n\":\"datazrouwkkwtoxl\",\"keeeakzys\":\"datavealwdltstxronbz\"}},{\"activity\":\"krxajta\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"q\":\"datahjwkdwpcm\"}}],\"userProperties\":[{\"name\":\"kfhlayg\",\"value\":\"dataxoreed\"},{\"name\":\"ruiycvourq\",\"value\":\"datamzsitrspp\"},{\"name\":\"cxigkpevtblmrjl\",\"value\":\"dataldggwaldte\"},{\"name\":\"nvcfumezczh\",\"value\":\"dataradklzgiqm\"}],\"\":{\"icmezexwzpgy\":\"dataxnoogmfujeci\"}}]}") + "{\"on\":{\"value\":\"mbyltd\"},\"cases\":[{\"value\":\"ehxotizv\",\"activities\":[{\"type\":\"dcotfovjiy\",\"name\":\"u\",\"description\":\"bco\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"yokwaxehxsweb\",\"dependencyConditions\":[]},{\"activity\":\"a\",\"dependencyConditions\":[]},{\"activity\":\"xffttfqlcxymcmo\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"weuazxtsgsqoads\",\"value\":\"dataacemw\"},{\"name\":\"icdgimsbumpp\",\"value\":\"databcarcyrftcjxz\"},{\"name\":\"xwwmhdlrfy\",\"value\":\"datannbxvxrcmrdmyj\"},{\"name\":\"ouxzod\",\"value\":\"datalehc\"}],\"\":{\"yxhqwoxmcob\":\"datazrckzir\"}},{\"type\":\"nybfmbl\",\"name\":\"hpqnzpf\",\"description\":\"ppkqufdmgmfyia\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"rilhyfxmrq\",\"dependencyConditions\":[]},{\"activity\":\"icknygzdrdicwm\",\"dependencyConditions\":[]},{\"activity\":\"eavawywofgccj\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"vvr\",\"value\":\"dataxclf\"},{\"name\":\"mx\",\"value\":\"datafqwyiuhhuftn\"}],\"\":{\"wxossokafymlstg\":\"dataxwxxfkft\",\"evtsrcs\":\"datameijgj\"}}]},{\"value\":\"xdwv\",\"activities\":[{\"type\":\"wo\",\"name\":\"walefmenbajzeelb\",\"description\":\"yaohiz\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"upft\",\"dependencyConditions\":[]},{\"activity\":\"ddohxvcsoqxydcqp\",\"dependencyConditions\":[]},{\"activity\":\"ywttdanu\",\"dependencyConditions\":[]},{\"activity\":\"iwtkhcmoc\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"mfugik\",\"value\":\"datambkstkkzt\"},{\"name\":\"xdsnmhndcr\",\"value\":\"dataveccmqenfgba\"},{\"name\":\"uuyt\",\"value\":\"datadenv\"},{\"name\":\"olfiigox\",\"value\":\"datahjyvpfisy\"}],\"\":{\"vhqvmilpg\":\"dataymccwvcfayll\",\"fgtedfmcoru\":\"dataeaqwogpetsm\",\"ypckhqoo\":\"dataiod\",\"voqsudtmkmg\":\"datani\"}},{\"type\":\"pv\",\"name\":\"kngvpsukkk\",\"description\":\"ghugfdugqhm\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"lcbpbtjtiidozf\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"qurr\",\"value\":\"datanijdr\"},{\"name\":\"vohjg\",\"value\":\"dataoiikr\"},{\"name\":\"lzsgpoiccbzqko\",\"value\":\"dataja\"}],\"\":{\"suctt\":\"datatzkqnlzytaz\",\"gbfiosdizp\":\"datavt\",\"tgwvvenmu\":\"datacqnglzfgepblh\"}},{\"type\":\"oqhamr\",\"name\":\"trny\",\"description\":\"ixiduzrdvh\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"vfwlxkxlrungs\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"lxlezz\",\"value\":\"datammzzoiudel\"}],\"\":{\"cgxwa\":\"dataxbyxajia\"}},{\"type\":\"udnygtsjafvzdsjb\",\"name\":\"uzybmsyzz\",\"description\":\"bnmzjwhybsg\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"z\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"qmwmwoggbxiasfi\",\"value\":\"dataucnp\"},{\"name\":\"lfedwhvhlzpvpix\",\"value\":\"datajvycodfubnvdibb\"}],\"\":{\"lypauqyaisd\":\"datatbtmhanmptxlrv\",\"krma\":\"datawokgvksox\",\"vahjlvbnl\":\"dataenl\"}}]},{\"value\":\"bgojops\",\"activities\":[{\"type\":\"yy\",\"name\":\"ueifmtgntlfdiqzv\",\"description\":\"ff\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"wl\",\"dependencyConditions\":[]},{\"activity\":\"dgpud\",\"dependencyConditions\":[]},{\"activity\":\"imehdxcytyfhw\",\"dependencyConditions\":[]},{\"activity\":\"bhapfny\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"tebehj\",\"value\":\"datamfejeihnhwg\"}],\"\":{\"fzcpy\":\"databc\",\"obutkqwrsx\":\"datarngfujvxafrqqfgu\",\"ikiuxvdnchrvsfnl\":\"datacaxgr\"}},{\"type\":\"puasbfc\",\"name\":\"lvakhdigxxtf\",\"description\":\"asdhdiiwvznff\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"tpd\",\"dependencyConditions\":[]},{\"activity\":\"hbpf\",\"dependencyConditions\":[]},{\"activity\":\"mrhxpmt\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"xfglilfjcowrzqy\",\"value\":\"datacjxsgrt\"},{\"name\":\"itaamp\",\"value\":\"dataenyvpxpcj\"},{\"name\":\"bnffex\",\"value\":\"datazijtctfewniwt\"},{\"name\":\"plwyluvqp\",\"value\":\"datawvoyqsnt\"}],\"\":{\"tosoanxinlmi\":\"dataxvezoaldssesxc\",\"uivzsjf\":\"datacgu\"}},{\"type\":\"enhyhd\",\"name\":\"saykr\",\"description\":\"wybbda\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ltsxmd\",\"dependencyConditions\":[]},{\"activity\":\"cetj\",\"dependencyConditions\":[]},{\"activity\":\"apfieau\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"xdirdcxu\",\"value\":\"dataamrxlhf\"},{\"name\":\"jcqofpwjjtd\",\"value\":\"datafyivvtxqp\"},{\"name\":\"mqogtohzfvysv\",\"value\":\"datadbjd\"},{\"name\":\"htxvmnyslpdq\",\"value\":\"datamzjppblnervt\"}],\"\":{\"si\":\"datadtnjxvtvyy\"}}]}],\"defaultActivities\":[{\"type\":\"ygn\",\"name\":\"xgzzq\",\"description\":\"svjhm\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"sugkdvmgpeitfbgy\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Succeeded\"],\"\":{\"ookhcu\":\"datadsgfztmhvuoavp\",\"sutseejtfnjrrxf\":\"datawgbjzznmjwqwyhh\"}},{\"activity\":\"uywzpcx\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Skipped\",\"Succeeded\"],\"\":{\"h\":\"datalowm\",\"tceehqeahlfujp\":\"datauhywdckvcof\",\"uumldunalo\":\"datavtakijwkwed\"}}],\"userProperties\":[{\"name\":\"ikfqcbe\",\"value\":\"datansszu\"},{\"name\":\"dvhqecqqiulwfz\",\"value\":\"dataszgbgtwaquiuzsn\"},{\"name\":\"jgnmpu\",\"value\":\"datasjfvdajmczlvcxm\"}],\"\":{\"tbgkx\":\"databrp\",\"lszcwomayr\":\"dataxwjzleeup\",\"dfxnxtiwi\":\"dataatrjpa\",\"ihsgt\":\"datanho\"}},{\"type\":\"gmfnpeluvxsicp\",\"name\":\"ukupngorw\",\"description\":\"yrguxfjjgcfqfwgr\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"jgxkrppxjnrujd\",\"dependencyConditions\":[\"Succeeded\",\"Failed\"],\"\":{\"zdakfxzhapcwhj\":\"dataladibsjirhaqedfu\"}},{\"activity\":\"mjfr\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Completed\",\"Skipped\"],\"\":{\"bnvwhqctqdyfug\":\"datayyebgfffnt\",\"y\":\"datasmxvevudywny\"}},{\"activity\":\"naynlxwukpqcf\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Completed\",\"Failed\"],\"\":{\"aqrxzjmxtm\":\"datadslrrtvahizmz\",\"e\":\"databdwqwh\"}}],\"userProperties\":[{\"name\":\"fjzyin\",\"value\":\"datauuabe\"},{\"name\":\"sqk\",\"value\":\"datatb\"}],\"\":{\"s\":\"datayrifhuyavhesqn\",\"nfakcchcnmzvhdu\":\"datateprs\",\"chgvwggylbmfrxof\":\"dataigadpq\"}},{\"type\":\"yscwv\",\"name\":\"qzfg\",\"description\":\"yrppsowdo\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"tbpaircnupmz\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Completed\"],\"\":{\"ejvsfhjrsxrml\":\"dataoebzofmm\",\"o\":\"datasz\"}},{\"activity\":\"pqnp\",\"dependencyConditions\":[\"Completed\"],\"\":{\"b\":\"datasekdfhnhbkt\"}},{\"activity\":\"on\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Succeeded\"],\"\":{\"nqneo\":\"dataxetqknzevbyp\"}},{\"activity\":\"zcrmngaqlinlwc\",\"dependencyConditions\":[\"Failed\"],\"\":{\"u\":\"dataxctojxtkmdegmiv\",\"abt\":\"datalpctlbuobi\",\"ktg\":\"datarkmktcs\",\"epmhohq\":\"dataofzzsohcaet\"}}],\"userProperties\":[{\"name\":\"y\",\"value\":\"datatliwoodndu\"},{\"name\":\"tykyzirgiyqzuhn\",\"value\":\"dataa\"},{\"name\":\"dttgbsd\",\"value\":\"dataruwvr\"},{\"name\":\"xoozyhuc\",\"value\":\"datadeqslhzzy\"}],\"\":{\"oqusrlkphtyqydrn\":\"dataazol\",\"dlhuslqikocgzjm\":\"datasfaemkbpdp\"}}]}") .toObject(SwitchActivityTypeProperties.class); - Assertions.assertEquals("gvqthlimvyzrdqpg", model.on().value()); - Assertions.assertEquals("rpxwldktphnis", model.cases().get(0).value()); - Assertions.assertEquals("jnbt", model.cases().get(0).activities().get(0).name()); - Assertions.assertEquals("itpxpkbangjxbbyq", model.cases().get(0).activities().get(0).description()); + Assertions.assertEquals("mbyltd", model.on().value()); + Assertions.assertEquals("ehxotizv", model.cases().get(0).value()); + Assertions.assertEquals("u", model.cases().get(0).activities().get(0).name()); + Assertions.assertEquals("bco", model.cases().get(0).activities().get(0).description()); Assertions.assertEquals(ActivityState.INACTIVE, model.cases().get(0).activities().get(0).state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.cases().get(0).activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("ujgeppxiyovgha", + Assertions.assertEquals("yokwaxehxsweb", model.cases().get(0).activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("gvwvlqqnf", model.cases().get(0).activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("n", model.defaultActivities().get(0).name()); - Assertions.assertEquals("tjdmdbtbdtrqiu", model.defaultActivities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.defaultActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.defaultActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("xeqehgrjgvr", model.defaultActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, + Assertions.assertEquals("weuazxtsgsqoads", + model.cases().get(0).activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("xgzzq", model.defaultActivities().get(0).name()); + Assertions.assertEquals("svjhm", model.defaultActivities().get(0).description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.defaultActivities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.defaultActivities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("sugkdvmgpeitfbgy", model.defaultActivities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.defaultActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("g", model.defaultActivities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("ikfqcbe", model.defaultActivities().get(0).userProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { SwitchActivityTypeProperties model = new SwitchActivityTypeProperties() - .withOn(new Expression().withValue("gvqthlimvyzrdqpg")) - .withCases(Arrays.asList(new SwitchCase().withValue("rpxwldktphnis").withActivities(Arrays.asList( - new Activity().withName("jnbt").withDescription("itpxpkbangjxbbyq").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ujgeppxiyovgha") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("m").withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("cog").withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ameacjoaixhma").withDependencyConditions(Arrays.asList()) + .withOn(new Expression().withValue("mbyltd")) + .withCases(Arrays.asList( + new SwitchCase().withValue("ehxotizv") + .withActivities(Arrays.asList( + new Activity().withName("u") + .withDescription("bco") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("yokwaxehxsweb") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("a") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("xffttfqlcxymcmo") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("weuazxtsgsqoads").withValue("dataacemw"), + new UserProperty().withName("icdgimsbumpp").withValue("databcarcyrftcjxz"), + new UserProperty().withName("xwwmhdlrfy").withValue("datannbxvxrcmrdmyj"), + new UserProperty().withName("ouxzod").withValue("datalehc"))) + .withAdditionalProperties(mapOf("type", "dcotfovjiy")), + new Activity().withName("hpqnzpf") + .withDescription("ppkqufdmgmfyia") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("rilhyfxmrq") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("icknygzdrdicwm") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("eavawywofgccj") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("vvr").withValue("dataxclf"), + new UserProperty().withName("mx").withValue("datafqwyiuhhuftn"))) + .withAdditionalProperties(mapOf("type", "nybfmbl")))), + new SwitchCase().withValue("xdwv") + .withActivities(Arrays.asList( + new Activity().withName("walefmenbajzeelb") + .withDescription("yaohiz") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("upft") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("ddohxvcsoqxydcqp") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("ywttdanu") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("iwtkhcmoc") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("mfugik").withValue("datambkstkkzt"), + new UserProperty().withName("xdsnmhndcr").withValue("dataveccmqenfgba"), + new UserProperty().withName("uuyt").withValue("datadenv"), + new UserProperty().withName("olfiigox").withValue("datahjyvpfisy"))) + .withAdditionalProperties(mapOf("type", "wo")), + new Activity().withName("kngvpsukkk") + .withDescription("ghugfdugqhm") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("lcbpbtjtiidozf") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("qurr").withValue("datanijdr"), + new UserProperty().withName("vohjg").withValue("dataoiikr"), + new UserProperty().withName("lzsgpoiccbzqko").withValue("dataja"))) + .withAdditionalProperties(mapOf("type", "pv")), + new Activity().withName("trny") + .withDescription("ixiduzrdvh") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("vfwlxkxlrungs") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("lxlezz").withValue("datammzzoiudel"))) + .withAdditionalProperties(mapOf("type", "oqhamr")), + new Activity().withName("uzybmsyzz") + .withDescription("bnmzjwhybsg") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("z") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("qmwmwoggbxiasfi").withValue("dataucnp"), + new UserProperty().withName("lfedwhvhlzpvpix").withValue("datajvycodfubnvdibb"))) + .withAdditionalProperties(mapOf("type", "udnygtsjafvzdsjb")))), + new SwitchCase().withValue("bgojops") + .withActivities(Arrays.asList( + new Activity().withName("ueifmtgntlfdiqzv") + .withDescription("ff") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("wl") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("dgpud") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("imehdxcytyfhw") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("bhapfny") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("tebehj").withValue("datamfejeihnhwg"))) + .withAdditionalProperties(mapOf("type", "yy")), + new Activity().withName("lvakhdigxxtf") + .withDescription("asdhdiiwvznff") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("tpd") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("hbpf") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("mrhxpmt") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("xfglilfjcowrzqy").withValue("datacjxsgrt"), + new UserProperty().withName("itaamp").withValue("dataenyvpxpcj"), + new UserProperty().withName("bnffex").withValue("datazijtctfewniwt"), + new UserProperty().withName("plwyluvqp").withValue("datawvoyqsnt"))) + .withAdditionalProperties(mapOf("type", "puasbfc")), + new Activity().withName("saykr") + .withDescription("wybbda") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("ltsxmd") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("cetj") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("apfieau") + .withDependencyConditions(Arrays.asList()) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("xdirdcxu").withValue("dataamrxlhf"), + new UserProperty().withName("jcqofpwjjtd").withValue("datafyivvtxqp"), + new UserProperty().withName("mqogtohzfvysv").withValue("datadbjd"), + new UserProperty().withName("htxvmnyslpdq").withValue("datamzjppblnervt"))) + .withAdditionalProperties(mapOf("type", "enhyhd")))))) + .withDefaultActivities(Arrays.asList( + new Activity() + .withName("xgzzq") + .withDescription("svjhm") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList(new ActivityDependency() + .withActivity("sugkdvmgpeitfbgy") + .withDependencyConditions( + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, + DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("uywzpcx") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, + DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("gvwvlqqnf").withValue("datawrws"), - new UserProperty().withName("yblwjhpibgalefjs").withValue("datanxrgmvzcibqy"), - new UserProperty().withName("qnshnbfd").withValue("dataxs"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("w").withDescription("zzdlfayich").withState(ActivityState.INACTIVE) + .withUserProperties(Arrays.asList(new UserProperty().withName("ikfqcbe").withValue("datansszu"), + new UserProperty().withName("dvhqecqqiulwfz").withValue("dataszgbgtwaquiuzsn"), + new UserProperty().withName("jgnmpu").withValue("datasjfvdajmczlvcxm"))) + .withAdditionalProperties(mapOf("type", "ygn")), + new Activity().withName("ukupngorw") + .withDescription("yrguxfjjgcfqfwgr") + .withState(ActivityState.INACTIVE) .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("pwjvpglstxznk").withDependencyConditions(Arrays.asList()) + new ActivityDependency().withActivity("jgxkrppxjnrujd") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("jkjezunr").withDependencyConditions(Arrays.asList()) + new ActivityDependency().withActivity("mjfr") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, + DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("dygpdnnvepbocw") - .withDependencyConditions(Arrays.asList()).withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("wdbxdko").withValue("dataummphbfpriveilx"), - new UserProperty().withName("pizyen").withValue("datajj"), - new UserProperty().withName("zmdpnersm").withValue("datavhgsu"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("g").withDescription("ygu").withState(ActivityState.INACTIVE) + new ActivityDependency().withActivity("naynlxwukpqcf") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, + DependencyCondition.FAILED, DependencyCondition.COMPLETED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("fjzyin").withValue("datauuabe"), + new UserProperty().withName("sqk").withValue("datatb"))) + .withAdditionalProperties(mapOf("type", "gmfnpeluvxsicp")), + new Activity().withName("qzfg") + .withDescription("yrppsowdo") + .withState(ActivityState.ACTIVE) .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("vg").withDependencyConditions(Arrays.asList()) + new ActivityDependency().withActivity("tbpaircnupmz") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, + DependencyCondition.FAILED, DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("eiocacngiaa").withDependencyConditions(Arrays.asList()) + new ActivityDependency().withActivity("pqnp") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("gxdzhyc").withDependencyConditions(Arrays.asList()) + new ActivityDependency().withActivity("on") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, + DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("teidfzof").withDependencyConditions(Arrays.asList()) + new ActivityDependency().withActivity("zcrmngaqlinlwc") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("nicmlomlnp").withValue("datakikyn"))) - .withAdditionalProperties(mapOf("type", "Activity")))))) - .withDefaultActivities( - Arrays - .asList( - new Activity() - .withName("n").withDescription("tjdmdbtbdtrqiu").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED).withDependsOn( - Arrays - .asList( - new ActivityDependency().withActivity("xeqehgrjgvr") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ptcuqzdwpcupejzo") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays - .asList(new UserProperty().withName("g").withValue( - "dataaitihncysa"), new UserProperty().withName("jlq").withValue("dataora"), - new UserProperty().withName("tbiskkceb").withValue("dataajlptydvebipkeo"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("siubp").withDescription("isjghfal").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn( - Arrays.asList( - new ActivityDependency().withActivity("bpkjseft") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("y") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("xtbdgp") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays - .asList(new UserProperty().withName("mjcjejalybvxumt").withValue("datauvdoteidcwrmdq"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("ckmcukzwzgio").withDescription("rxgq").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED).withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("riwbvyraa") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("kjyjhkrk") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED, - DependencyCondition.FAILED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("mhqihlxdh") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, - DependencyCondition.FAILED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ttzlo").withValue("datat"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("m").withDescription("tzugwurvpcwy").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs( - ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ravdq") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SKIPPED, - DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("lyszw") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("krxajta") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("kfhlayg").withValue("dataxoreed"), - new UserProperty().withName("ruiycvourq").withValue("datamzsitrspp"), - new UserProperty().withName("cxigkpevtblmrjl").withValue("dataldggwaldte"), - new UserProperty().withName("nvcfumezczh").withValue("dataradklzgiqm"))) - .withAdditionalProperties(mapOf("type", "Activity")))); + .withUserProperties(Arrays.asList(new UserProperty().withName("y").withValue("datatliwoodndu"), + new UserProperty().withName("tykyzirgiyqzuhn").withValue("dataa"), + new UserProperty().withName("dttgbsd").withValue("dataruwvr"), + new UserProperty().withName("xoozyhuc").withValue("datadeqslhzzy"))) + .withAdditionalProperties(mapOf("type", "yscwv")))); model = BinaryData.fromObject(model).toObject(SwitchActivityTypeProperties.class); - Assertions.assertEquals("gvqthlimvyzrdqpg", model.on().value()); - Assertions.assertEquals("rpxwldktphnis", model.cases().get(0).value()); - Assertions.assertEquals("jnbt", model.cases().get(0).activities().get(0).name()); - Assertions.assertEquals("itpxpkbangjxbbyq", model.cases().get(0).activities().get(0).description()); + Assertions.assertEquals("mbyltd", model.on().value()); + Assertions.assertEquals("ehxotizv", model.cases().get(0).value()); + Assertions.assertEquals("u", model.cases().get(0).activities().get(0).name()); + Assertions.assertEquals("bco", model.cases().get(0).activities().get(0).description()); Assertions.assertEquals(ActivityState.INACTIVE, model.cases().get(0).activities().get(0).state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.cases().get(0).activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("ujgeppxiyovgha", + Assertions.assertEquals("yokwaxehxsweb", model.cases().get(0).activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("gvwvlqqnf", model.cases().get(0).activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("n", model.defaultActivities().get(0).name()); - Assertions.assertEquals("tjdmdbtbdtrqiu", model.defaultActivities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.defaultActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.defaultActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("xeqehgrjgvr", model.defaultActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, + Assertions.assertEquals("weuazxtsgsqoads", + model.cases().get(0).activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("xgzzq", model.defaultActivities().get(0).name()); + Assertions.assertEquals("svjhm", model.defaultActivities().get(0).description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.defaultActivities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.defaultActivities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("sugkdvmgpeitfbgy", model.defaultActivities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.FAILED, model.defaultActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("g", model.defaultActivities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("ikfqcbe", model.defaultActivities().get(0).userProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchCaseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchCaseTests.java index fddb70523eee5..97ccd3bc9aa6c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchCaseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchCaseTests.java @@ -21,66 +21,63 @@ public final class SwitchCaseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SwitchCase model = BinaryData.fromString( - "{\"value\":\"lj\",\"activities\":[{\"type\":\"Activity\",\"name\":\"mpydkgbcufhk\",\"description\":\"xxzhqouon\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"wdwzf\",\"dependencyConditions\":[\"Completed\"],\"\":{\"vmbsmxhsqdotbnf\":\"datala\",\"jfkuqvt\":\"dataniybotuq\",\"gqmxmiwfzrhilyp\":\"datarbsgwoykcvwqyfix\",\"n\":\"datax\"}},{\"activity\":\"quxut\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Skipped\",\"Completed\"],\"\":{\"vbgjhmyzsq\":\"dataaijnzqnqwka\",\"ir\":\"datavmtidmycyyajlnot\"}},{\"activity\":\"ipnclnbfxme\",\"dependencyConditions\":[\"Failed\"],\"\":{\"bqbwbw\":\"datafrfzghnjaqzdzkyq\"}},{\"activity\":\"twmmvbahftkcey\",\"dependencyConditions\":[\"Completed\",\"Completed\",\"Skipped\"],\"\":{\"pzrcq\":\"datatlqytxft\",\"phkmwbtrqklondbv\":\"datasjqrgtapawp\",\"rbjtjvqdwz\":\"dataqtpebaawzsxp\",\"eqlikyctun\":\"datavxdgten\"}}],\"userProperties\":[{\"name\":\"ehxvktlrc\",\"value\":\"datauad\"}],\"\":{\"n\":\"datajsu\",\"amxxpfyl\":\"dataqfiz\"}},{\"type\":\"Activity\",\"name\":\"pftw\",\"description\":\"pu\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"aolfdgjrgp\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Skipped\"],\"\":{\"kzznarnjueq\":\"dataaqarp\"}},{\"activity\":\"uzjgv\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Completed\",\"Succeeded\"],\"\":{\"hfxy\":\"dataadrmt\",\"azbfrqo\":\"datann\"}}],\"userProperties\":[{\"name\":\"xndfrxnvwqyhklho\",\"value\":\"datascpj\"}],\"\":{\"jbfmrsjgm\":\"datan\",\"kghd\":\"datasamhxkjjhflrgx\"}}]}") + "{\"value\":\"oq\",\"activities\":[{\"type\":\"cwekbbvtcoxd\",\"name\":\"gjdpy\",\"description\":\"m\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"tjcpoynbsttur\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Completed\"],\"\":{\"ecsreo\":\"datagaiusglg\",\"xbmgheyamoety\":\"dataswmkxbbziffpvvg\",\"xseyjqklaihqrbrm\":\"dataevyitidi\",\"pydjsubt\":\"datahljqqbue\"}}],\"userProperties\":[{\"name\":\"vcveomdlrsjgu\",\"value\":\"datadf\"},{\"name\":\"dqvuqufaowu\",\"value\":\"databujjvojm\"}],\"\":{\"lx\":\"datavovj\",\"rh\":\"datawfqvlhjaw\",\"ffwqbdvgfgirrzyn\":\"datafgzlrnfmmefppjxt\",\"nqfrxggvs\":\"datadvdrn\"}},{\"type\":\"xvfkqaqf\",\"name\":\"imfpnp\",\"description\":\"dgjndwtdorvx\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"effpidwqrzdzjm\",\"dependencyConditions\":[\"Completed\"],\"\":{\"uetfxz\":\"datapplcoqb\",\"s\":\"dataj\",\"dgq\":\"datadlokhimzfltxqpoz\",\"jwjnvhu\":\"datakfevhgjk\"}},{\"activity\":\"wmwvqbpazjmfqu\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Skipped\",\"Failed\"],\"\":{\"hdyifjvfrg\":\"datagjaa\",\"cwpjpkaf\":\"datanquj\",\"vuyc\":\"datakaf\",\"qsmk\":\"datatlmnrdkiqsqbdvk\"}},{\"activity\":\"iqljxn\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Failed\"],\"\":{\"ptsdlcsrhttmh\":\"datarofqh\",\"duzqu\":\"datagwov\"}}],\"userProperties\":[{\"name\":\"cwnlyq\",\"value\":\"datakknulrqpacu\"}],\"\":{\"r\":\"datasjawbnxcizeuifnd\",\"fs\":\"datanzjyghq\",\"vgec\":\"dataln\",\"cgrkgt\":\"dataems\"}}]}") .toObject(SwitchCase.class); - Assertions.assertEquals("lj", model.value()); - Assertions.assertEquals("mpydkgbcufhk", model.activities().get(0).name()); - Assertions.assertEquals("xxzhqouon", model.activities().get(0).description()); + Assertions.assertEquals("oq", model.value()); + Assertions.assertEquals("gjdpy", model.activities().get(0).name()); + Assertions.assertEquals("m", model.activities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("wdwzf", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("tjcpoynbsttur", model.activities().get(0).dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.COMPLETED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ehxvktlrc", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("vcveomdlrsjgu", model.activities().get(0).userProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SwitchCase model = new SwitchCase().withValue("lj").withActivities(Arrays.asList( - new Activity().withName("mpydkgbcufhk").withDescription("xxzhqouon").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("wdwzf") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("quxut") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, - DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency() - .withActivity("ipnclnbfxme").withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("twmmvbahftkcey") + SwitchCase model = new SwitchCase().withValue("oq") + .withActivities(Arrays.asList( + new Activity().withName("gjdpy") + .withDescription("m") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("tjcpoynbsttur") .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) + DependencyCondition.FAILED, DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList( - new UserProperty().withName("ehxvktlrc").withValue("datauad"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("pftw").withDescription("pu").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("aolfdgjrgp") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("uzjgv") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("xndfrxnvwqyhklho").withValue("datascpj"))) - .withAdditionalProperties(mapOf("type", "Activity")))); + .withUserProperties(Arrays.asList(new UserProperty().withName("vcveomdlrsjgu").withValue("datadf"), + new UserProperty().withName("dqvuqufaowu").withValue("databujjvojm"))) + .withAdditionalProperties(mapOf("type", "cwekbbvtcoxd")), + new Activity().withName("imfpnp") + .withDescription("dgjndwtdorvx") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("effpidwqrzdzjm") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("wmwvqbpazjmfqu") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, + DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("iqljxn") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.COMPLETED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("cwnlyq").withValue("datakknulrqpacu"))) + .withAdditionalProperties(mapOf("type", "xvfkqaqf")))); model = BinaryData.fromObject(model).toObject(SwitchCase.class); - Assertions.assertEquals("lj", model.value()); - Assertions.assertEquals("mpydkgbcufhk", model.activities().get(0).name()); - Assertions.assertEquals("xxzhqouon", model.activities().get(0).description()); + Assertions.assertEquals("oq", model.value()); + Assertions.assertEquals("gjdpy", model.activities().get(0).name()); + Assertions.assertEquals("m", model.activities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("wdwzf", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("tjcpoynbsttur", model.activities().get(0).dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.COMPLETED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ehxvktlrc", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("vcveomdlrsjgu", model.activities().get(0).userProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseSourceTests.java index 2a1e7295b3250..2bd4d61886b2f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseSourceTests.java @@ -11,16 +11,19 @@ public final class SybaseSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SybaseSource model = BinaryData.fromString( - "{\"type\":\"SybaseSource\",\"query\":\"datasyszdtgwmqcutkkp\",\"queryTimeout\":\"dataurtmccdejtoypl\",\"additionalColumns\":\"datavjutckfhmdcvlb\",\"sourceRetryCount\":\"dataezvujpbmz\",\"sourceRetryWait\":\"datalgm\",\"maxConcurrentConnections\":\"dataxwkkbnhmdtj\",\"disableMetricsCollection\":\"datapfoispchhvvmvs\",\"\":{\"ggtsovozy\":\"dataqdhazmc\",\"ugubob\":\"datapkrncjrq\"}}") + "{\"type\":\"upcio\",\"query\":\"dataqzhtmeuip\",\"queryTimeout\":\"datasrpsjkqfabju\",\"additionalColumns\":\"datats\",\"sourceRetryCount\":\"datarjdeyfnqanbadkzp\",\"sourceRetryWait\":\"datatuplpkjexq\",\"maxConcurrentConnections\":\"datanzlalugoeftrbxom\",\"disableMetricsCollection\":\"dataovarfqverx\",\"\":{\"epgvj\":\"datauq\",\"mddwqiucpjprtb\":\"datallzykalbau\",\"zelxrft\":\"datasjtj\",\"lvrofhhitjhh\":\"datadb\"}}") .toObject(SybaseSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SybaseSource model = new SybaseSource().withSourceRetryCount("dataezvujpbmz").withSourceRetryWait("datalgm") - .withMaxConcurrentConnections("dataxwkkbnhmdtj").withDisableMetricsCollection("datapfoispchhvvmvs") - .withQueryTimeout("dataurtmccdejtoypl").withAdditionalColumns("datavjutckfhmdcvlb") - .withQuery("datasyszdtgwmqcutkkp"); + SybaseSource model = new SybaseSource().withSourceRetryCount("datarjdeyfnqanbadkzp") + .withSourceRetryWait("datatuplpkjexq") + .withMaxConcurrentConnections("datanzlalugoeftrbxom") + .withDisableMetricsCollection("dataovarfqverx") + .withQueryTimeout("datasrpsjkqfabju") + .withAdditionalColumns("datats") + .withQuery("dataqzhtmeuip"); model = BinaryData.fromObject(model).toObject(SybaseSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTests.java index 2a32674d3a749..e54083eabe0b5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTests.java @@ -19,30 +19,35 @@ public final class SybaseTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SybaseTableDataset model = BinaryData.fromString( - "{\"type\":\"SybaseTable\",\"typeProperties\":{\"tableName\":\"datacrolrzesbomp\"},\"description\":\"kymunwjivtb\",\"structure\":\"datazbdjrdfeujywdal\",\"schema\":\"datadeqngc\",\"linkedServiceName\":{\"referenceName\":\"ydzin\",\"parameters\":{\"xrsi\":\"dataulpozmdahyc\",\"oiaf\":\"dataoebld\",\"x\":\"datajkrtnhrevimxm\"}},\"parameters\":{\"oqtbfkvuozbzc\":{\"type\":\"Object\",\"defaultValue\":\"datatygvdwd\"},\"rlcydjht\":{\"type\":\"Object\",\"defaultValue\":\"dataekwanklp\"}},\"annotations\":[\"dataerwi\",\"datandurdonkgobxbl\",\"datadolenrsw\",\"datanpdrgnmzaofroe\"],\"folder\":{\"name\":\"kievyrej\"},\"\":{\"ftusdwmnrt\":\"databk\",\"nrovome\":\"datavbuc\"}}") + "{\"type\":\"vxjdqosxzmdz\",\"typeProperties\":{\"tableName\":\"databk\"},\"description\":\"bqfufke\",\"structure\":\"datafkicxhsevmnkggh\",\"schema\":\"dataryjok\",\"linkedServiceName\":{\"referenceName\":\"lwvbjsarxs\",\"parameters\":{\"gieabbf\":\"datapabwbpzgfgqpudhg\",\"avlozu\":\"datax\",\"jsfmaxcebnbe\":\"datagsnuhwy\"}},\"parameters\":{\"v\":{\"type\":\"String\",\"defaultValue\":\"dataqqerwqxpj\"},\"httuobrx\":{\"type\":\"Array\",\"defaultValue\":\"dataf\"},\"cwtfmabvbmn\":{\"type\":\"Int\",\"defaultValue\":\"dataytebjkjge\"}},\"annotations\":[\"dataofxfmhlvyqnslbq\",\"datamlqkiekhj\",\"dataqqrugwespscvs\"],\"folder\":{\"name\":\"tluwozfvzasupc\"},\"\":{\"cgmlmpn\":\"dataxcvwioqhc\"}}") .toObject(SybaseTableDataset.class); - Assertions.assertEquals("kymunwjivtb", model.description()); - Assertions.assertEquals("ydzin", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("oqtbfkvuozbzc").type()); - Assertions.assertEquals("kievyrej", model.folder().name()); + Assertions.assertEquals("bqfufke", model.description()); + Assertions.assertEquals("lwvbjsarxs", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("v").type()); + Assertions.assertEquals("tluwozfvzasupc", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SybaseTableDataset model = new SybaseTableDataset().withDescription("kymunwjivtb") - .withStructure("datazbdjrdfeujywdal").withSchema("datadeqngc") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ydzin") - .withParameters(mapOf("xrsi", "dataulpozmdahyc", "oiaf", "dataoebld", "x", "datajkrtnhrevimxm"))) - .withParameters(mapOf("oqtbfkvuozbzc", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datatygvdwd"), "rlcydjht", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataekwanklp"))) - .withAnnotations(Arrays.asList("dataerwi", "datandurdonkgobxbl", "datadolenrsw", "datanpdrgnmzaofroe")) - .withFolder(new DatasetFolder().withName("kievyrej")).withTableName("datacrolrzesbomp"); + SybaseTableDataset model = new SybaseTableDataset().withDescription("bqfufke") + .withStructure("datafkicxhsevmnkggh") + .withSchema("dataryjok") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lwvbjsarxs") + .withParameters( + mapOf("gieabbf", "datapabwbpzgfgqpudhg", "avlozu", "datax", "jsfmaxcebnbe", "datagsnuhwy"))) + .withParameters(mapOf("v", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataqqerwqxpj"), + "httuobrx", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataf"), + "cwtfmabvbmn", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataytebjkjge"))) + .withAnnotations(Arrays.asList("dataofxfmhlvyqnslbq", "datamlqkiekhj", "dataqqrugwespscvs")) + .withFolder(new DatasetFolder().withName("tluwozfvzasupc")) + .withTableName("databk"); model = BinaryData.fromObject(model).toObject(SybaseTableDataset.class); - Assertions.assertEquals("kymunwjivtb", model.description()); - Assertions.assertEquals("ydzin", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("oqtbfkvuozbzc").type()); - Assertions.assertEquals("kievyrej", model.folder().name()); + Assertions.assertEquals("bqfufke", model.description()); + Assertions.assertEquals("lwvbjsarxs", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.STRING, model.parameters().get("v").type()); + Assertions.assertEquals("tluwozfvzasupc", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTypePropertiesTests.java index 56e850d0e9774..4eaa4ed4eb56d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTypePropertiesTests.java @@ -10,13 +10,13 @@ public final class SybaseTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - SybaseTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datawsicvwqzoc\"}") - .toObject(SybaseTableDatasetTypeProperties.class); + SybaseTableDatasetTypeProperties model + = BinaryData.fromString("{\"tableName\":\"dataxuyi\"}").toObject(SybaseTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SybaseTableDatasetTypeProperties model = new SybaseTableDatasetTypeProperties().withTableName("datawsicvwqzoc"); + SybaseTableDatasetTypeProperties model = new SybaseTableDatasetTypeProperties().withTableName("dataxuyi"); model = BinaryData.fromObject(model).toObject(SybaseTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTests.java index 2a98a0bc1f025..0237361306ea5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTests.java @@ -31,88 +31,82 @@ public final class SynapseNotebookActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SynapseNotebookActivity model = BinaryData.fromString( - "{\"type\":\"SynapseNotebook\",\"typeProperties\":{\"notebook\":{\"type\":\"NotebookReference\",\"referenceName\":\"datamcblmza\"},\"sparkPool\":{\"type\":\"BigDataPoolReference\",\"referenceName\":\"datasmpcajx\"},\"parameters\":{\"pysg\":{\"value\":\"datapdkrjlwrqhehnazc\",\"type\":\"int\"},\"iumccomjxx\":{\"value\":\"dataivoahek\",\"type\":\"bool\"},\"hrkhfyaxiw\":{\"value\":\"dataaf\",\"type\":\"float\"},\"rbogzwwyub\":{\"value\":\"datazsimbgvrksjjq\",\"type\":\"string\"}},\"executorSize\":\"datapocjyjqem\",\"conf\":\"datakjxuxmkkgbyjfe\",\"driverSize\":\"databnwfekpgllezvr\",\"numExecutors\":\"datawsffk\",\"configurationType\":\"Customized\",\"targetSparkConfiguration\":{\"type\":\"SparkConfigurationReference\",\"referenceName\":\"databsvkijynvguh\"},\"sparkConfig\":{\"sclrvquwhmncewcf\":\"datan\",\"xsybtpqgxzogclu\":\"datansoim\",\"i\":\"dataicnckdxflg\"}},\"linkedServiceName\":{\"referenceName\":\"ce\",\"parameters\":{\"mrsbgjjuhzf\":\"datacerrpal\"}},\"policy\":{\"timeout\":\"dataabyvmchhkwlmit\",\"retry\":\"databivhkdxhnv\",\"retryIntervalInSeconds\":559455759,\"secureInput\":true,\"secureOutput\":true,\"\":{\"bgv\":\"datas\",\"vvhovkadmih\":\"datazzukhl\",\"nl\":\"datab\",\"fjqobbpjlrvxryjx\":\"datajzdahckijvikpgz\"}},\"name\":\"dlgignja\",\"description\":\"cixwtwzgbuhcrwqr\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"mfufs\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Skipped\",\"Skipped\"],\"\":{\"lcqaafuwxeho\":\"datae\",\"q\":\"dataazbgcbd\",\"vtimyccdognhw\":\"datay\"}},{\"activity\":\"vgowkak\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"dv\":\"datajiykwbytuzhcpx\",\"y\":\"datafxvrfez\"}},{\"activity\":\"iyovcrmoalvea\",\"dependencyConditions\":[\"Failed\"],\"\":{\"jpedowmhgzrri\":\"datazr\",\"qqfycw\":\"datavyugxnopd\",\"ki\":\"dataupxf\"}},{\"activity\":\"mhvpxptq\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Failed\",\"Failed\"],\"\":{\"spnsbbhdjee\":\"datamxnjk\"}}],\"userProperties\":[{\"name\":\"cykihym\",\"value\":\"datagukf\"},{\"name\":\"kqok\",\"value\":\"datavxknygimoh\"},{\"name\":\"llxjyxhwv\",\"value\":\"datayupszch\"},{\"name\":\"wnudd\",\"value\":\"dataazvsmnxblc\"}],\"\":{\"yn\":\"dataymgfwdxukmeoxe\",\"uqm\":\"datarbwvai\",\"ztorvwgpjxdii\":\"dataaqoqjnvmfmrymk\"}}") + "{\"type\":\"uemotgkyfh\",\"typeProperties\":{\"notebook\":{\"type\":\"NotebookReference\",\"referenceName\":\"datallt\"},\"sparkPool\":{\"type\":\"BigDataPoolReference\",\"referenceName\":\"dataxxzhbfib\"},\"parameters\":{\"djrhxjawf\":{\"value\":\"datahzpjdbzhlchv\",\"type\":\"int\"},\"pucybtravel\":{\"value\":\"datavgvrpearooh\",\"type\":\"bool\"},\"ogxexeaexweei\":{\"value\":\"datam\",\"type\":\"float\"},\"ncnf\":{\"value\":\"datagvzmqdn\",\"type\":\"float\"}},\"executorSize\":\"dataggiomgv\",\"conf\":\"datarxlrtm\",\"driverSize\":\"datagnixkp\",\"numExecutors\":\"datajqjwlhqeibucm\",\"configurationType\":\"Artifact\",\"targetSparkConfiguration\":{\"type\":\"SparkConfigurationReference\",\"referenceName\":\"datajrs\"},\"sparkConfig\":{\"uzwy\":\"datamaezxldmzh\"}},\"linkedServiceName\":{\"referenceName\":\"omuapyskwi\",\"parameters\":{\"xfyqsfygafhbf\":\"datafqvulesqjdbcypvs\",\"vqlmzpckxlcslm\":\"dataz\"}},\"policy\":{\"timeout\":\"dataojqpjba\",\"retry\":\"datanxdizdfulv\",\"retryIntervalInSeconds\":1834458226,\"secureInput\":false,\"secureOutput\":true,\"\":{\"dht\":\"datasp\",\"hx\":\"dataopzfcexbtwic\",\"gkvmmkwa\":\"datae\"}},\"name\":\"xmwq\",\"description\":\"svzczisi\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"fuhqbatdnufvz\",\"dependencyConditions\":[\"Failed\"],\"\":{\"vdtssa\":\"datat\",\"mfaagpjslrf\":\"databmdoj\"}}],\"userProperties\":[{\"name\":\"ut\",\"value\":\"databhs\"},{\"name\":\"nnf\",\"value\":\"databxtabxdkboyqes\"}],\"\":{\"uikqzdqkxjcqdnzh\":\"datavutarurfjppmi\",\"pjinjikxocf\":\"datalbdlhnkv\",\"r\":\"datakcnjzxezo\"}}") .toObject(SynapseNotebookActivity.class); - Assertions.assertEquals("dlgignja", model.name()); - Assertions.assertEquals("cixwtwzgbuhcrwqr", model.description()); + Assertions.assertEquals("xmwq", model.name()); + Assertions.assertEquals("svzczisi", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("mfufs", model.dependsOn().get(0).activity()); + Assertions.assertEquals("fuhqbatdnufvz", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("cykihym", model.userProperties().get(0).name()); - Assertions.assertEquals("ce", model.linkedServiceName().referenceName()); - Assertions.assertEquals(559455759, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals("ut", model.userProperties().get(0).name()); + Assertions.assertEquals("omuapyskwi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1834458226, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(true, model.policy().secureOutput()); Assertions.assertEquals(NotebookReferenceType.NOTEBOOK_REFERENCE, model.notebook().type()); Assertions.assertEquals(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE, model.sparkPool().type()); - Assertions.assertEquals(NotebookParameterType.INT, model.parameters().get("pysg").type()); - Assertions.assertEquals(ConfigurationType.CUSTOMIZED, model.configurationType()); + Assertions.assertEquals(NotebookParameterType.INT, model.parameters().get("djrhxjawf").type()); + Assertions.assertEquals(ConfigurationType.ARTIFACT, model.configurationType()); Assertions.assertEquals(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE, model.targetSparkConfiguration().type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SynapseNotebookActivity model = new SynapseNotebookActivity().withName("dlgignja") - .withDescription("cixwtwzgbuhcrwqr").withState(ActivityState.INACTIVE) + SynapseNotebookActivity model = new SynapseNotebookActivity().withName("xmwq") + .withDescription("svzczisi") + .withState(ActivityState.INACTIVE) .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("mfufs") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, - DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vgowkak") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("iyovcrmoalvea") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("mhvpxptq") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SKIPPED, - DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("cykihym").withValue("datagukf"), - new UserProperty().withName("kqok").withValue("datavxknygimoh"), - new UserProperty().withName("llxjyxhwv").withValue("datayupszch"), - new UserProperty().withName("wnudd").withValue("dataazvsmnxblc"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ce") - .withParameters(mapOf("mrsbgjjuhzf", "datacerrpal"))) - .withPolicy(new ActivityPolicy().withTimeout("dataabyvmchhkwlmit").withRetry("databivhkdxhnv") - .withRetryIntervalInSeconds(559455759).withSecureInput(true).withSecureOutput(true) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("fuhqbatdnufvz") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("ut").withValue("databhs"), + new UserProperty().withName("nnf").withValue("databxtabxdkboyqes"))) + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("omuapyskwi") + .withParameters(mapOf("xfyqsfygafhbf", "datafqvulesqjdbcypvs", "vqlmzpckxlcslm", "dataz"))) + .withPolicy(new ActivityPolicy().withTimeout("dataojqpjba") + .withRetry("datanxdizdfulv") + .withRetryIntervalInSeconds(1834458226) + .withSecureInput(false) + .withSecureOutput(true) .withAdditionalProperties(mapOf())) .withNotebook(new SynapseNotebookReference().withType(NotebookReferenceType.NOTEBOOK_REFERENCE) - .withReferenceName("datamcblmza")) - .withSparkPool(new BigDataPoolParametrizationReference() - .withType(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE).withReferenceName("datasmpcajx")) - .withParameters(mapOf("pysg", - new NotebookParameter().withValue("datapdkrjlwrqhehnazc").withType(NotebookParameterType.INT), - "iumccomjxx", new NotebookParameter().withValue("dataivoahek").withType(NotebookParameterType.BOOL), - "hrkhfyaxiw", new NotebookParameter().withValue("dataaf").withType(NotebookParameterType.FLOAT), - "rbogzwwyub", - new NotebookParameter().withValue("datazsimbgvrksjjq").withType(NotebookParameterType.STRING))) - .withExecutorSize("datapocjyjqem").withConf("datakjxuxmkkgbyjfe").withDriverSize("databnwfekpgllezvr") - .withNumExecutors("datawsffk").withConfigurationType(ConfigurationType.CUSTOMIZED) + .withReferenceName("datallt")) + .withSparkPool( + new BigDataPoolParametrizationReference().withType(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE) + .withReferenceName("dataxxzhbfib")) + .withParameters(mapOf("djrhxjawf", + new NotebookParameter().withValue("datahzpjdbzhlchv").withType(NotebookParameterType.INT), + "pucybtravel", + new NotebookParameter().withValue("datavgvrpearooh").withType(NotebookParameterType.BOOL), + "ogxexeaexweei", new NotebookParameter().withValue("datam").withType(NotebookParameterType.FLOAT), + "ncnf", new NotebookParameter().withValue("datagvzmqdn").withType(NotebookParameterType.FLOAT))) + .withExecutorSize("dataggiomgv") + .withConf("datarxlrtm") + .withDriverSize("datagnixkp") + .withNumExecutors("datajqjwlhqeibucm") + .withConfigurationType(ConfigurationType.ARTIFACT) .withTargetSparkConfiguration(new SparkConfigurationParametrizationReference() .withType(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE) - .withReferenceName("databsvkijynvguh")) - .withSparkConfig(mapOf("sclrvquwhmncewcf", "datan", "xsybtpqgxzogclu", "datansoim", "i", "dataicnckdxflg")); + .withReferenceName("datajrs")) + .withSparkConfig(mapOf("uzwy", "datamaezxldmzh")); model = BinaryData.fromObject(model).toObject(SynapseNotebookActivity.class); - Assertions.assertEquals("dlgignja", model.name()); - Assertions.assertEquals("cixwtwzgbuhcrwqr", model.description()); + Assertions.assertEquals("xmwq", model.name()); + Assertions.assertEquals("svzczisi", model.description()); Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("mfufs", model.dependsOn().get(0).activity()); + Assertions.assertEquals("fuhqbatdnufvz", model.dependsOn().get(0).activity()); Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("cykihym", model.userProperties().get(0).name()); - Assertions.assertEquals("ce", model.linkedServiceName().referenceName()); - Assertions.assertEquals(559455759, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); + Assertions.assertEquals("ut", model.userProperties().get(0).name()); + Assertions.assertEquals("omuapyskwi", model.linkedServiceName().referenceName()); + Assertions.assertEquals(1834458226, model.policy().retryIntervalInSeconds()); + Assertions.assertEquals(false, model.policy().secureInput()); Assertions.assertEquals(true, model.policy().secureOutput()); Assertions.assertEquals(NotebookReferenceType.NOTEBOOK_REFERENCE, model.notebook().type()); Assertions.assertEquals(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE, model.sparkPool().type()); - Assertions.assertEquals(NotebookParameterType.INT, model.parameters().get("pysg").type()); - Assertions.assertEquals(ConfigurationType.CUSTOMIZED, model.configurationType()); + Assertions.assertEquals(NotebookParameterType.INT, model.parameters().get("djrhxjawf").type()); + Assertions.assertEquals(ConfigurationType.ARTIFACT, model.configurationType()); Assertions.assertEquals(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE, model.targetSparkConfiguration().type()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTypePropertiesTests.java index 25929a5e01ef8..527cfc96b7ef1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTypePropertiesTests.java @@ -23,11 +23,11 @@ public final class SynapseNotebookActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SynapseNotebookActivityTypeProperties model = BinaryData.fromString( - "{\"notebook\":{\"type\":\"NotebookReference\",\"referenceName\":\"datatdzhkbcouavotfm\"},\"sparkPool\":{\"type\":\"BigDataPoolReference\",\"referenceName\":\"datazvydzqmlk\"},\"parameters\":{\"ukjirtiu\":{\"value\":\"datapbbjcznxdhiwaa\",\"type\":\"string\"},\"msexaejb\":{\"value\":\"datayudkgonrrarzn\",\"type\":\"string\"}},\"executorSize\":\"dataoune\",\"conf\":\"datafhclssedxiig\",\"driverSize\":\"datazwqjpudupishcvsj\",\"numExecutors\":\"dataedsqfdulndywghn\",\"configurationType\":\"Artifact\",\"targetSparkConfiguration\":{\"type\":\"SparkConfigurationReference\",\"referenceName\":\"dataljnromhsia\"},\"sparkConfig\":{\"pmsyhrvifurg\":\"datahpelqckwc\"}}") + "{\"notebook\":{\"type\":\"NotebookReference\",\"referenceName\":\"dataewthslzt\"},\"sparkPool\":{\"type\":\"BigDataPoolReference\",\"referenceName\":\"datang\"},\"parameters\":{\"znlscfbwkh\":{\"value\":\"dataycbvefldfwqn\",\"type\":\"string\"},\"nbzpcxo\":{\"value\":\"dataumiboprg\",\"type\":\"int\"},\"vnanx\":{\"value\":\"datamepzekm\",\"type\":\"int\"}},\"executorSize\":\"datawzla\",\"conf\":\"datateqnttmhsrw\",\"driverSize\":\"datacxyfje\",\"numExecutors\":\"datacgelipoequjkhum\",\"configurationType\":\"Artifact\",\"targetSparkConfiguration\":{\"type\":\"SparkConfigurationReference\",\"referenceName\":\"databptvvwfamhljhi\"},\"sparkConfig\":{\"zwd\":\"dataccwmrckvlb\",\"ohxmzpfptt\":\"dataydbsrjofxoktokms\"}}") .toObject(SynapseNotebookActivityTypeProperties.class); Assertions.assertEquals(NotebookReferenceType.NOTEBOOK_REFERENCE, model.notebook().type()); Assertions.assertEquals(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE, model.sparkPool().type()); - Assertions.assertEquals(NotebookParameterType.STRING, model.parameters().get("ukjirtiu").type()); + Assertions.assertEquals(NotebookParameterType.STRING, model.parameters().get("znlscfbwkh").type()); Assertions.assertEquals(ConfigurationType.ARTIFACT, model.configurationType()); Assertions.assertEquals(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE, model.targetSparkConfiguration().type()); @@ -37,23 +37,27 @@ public void testDeserialize() throws Exception { public void testSerialize() throws Exception { SynapseNotebookActivityTypeProperties model = new SynapseNotebookActivityTypeProperties() .withNotebook(new SynapseNotebookReference().withType(NotebookReferenceType.NOTEBOOK_REFERENCE) - .withReferenceName("datatdzhkbcouavotfm")) - .withSparkPool(new BigDataPoolParametrizationReference() - .withType(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE).withReferenceName("datazvydzqmlk")) - .withParameters(mapOf("ukjirtiu", - new NotebookParameter().withValue("datapbbjcznxdhiwaa").withType(NotebookParameterType.STRING), - "msexaejb", - new NotebookParameter().withValue("datayudkgonrrarzn").withType(NotebookParameterType.STRING))) - .withExecutorSize("dataoune").withConf("datafhclssedxiig").withDriverSize("datazwqjpudupishcvsj") - .withNumExecutors("dataedsqfdulndywghn").withConfigurationType(ConfigurationType.ARTIFACT) + .withReferenceName("dataewthslzt")) + .withSparkPool( + new BigDataPoolParametrizationReference().withType(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE) + .withReferenceName("datang")) + .withParameters(mapOf("znlscfbwkh", + new NotebookParameter().withValue("dataycbvefldfwqn").withType(NotebookParameterType.STRING), "nbzpcxo", + new NotebookParameter().withValue("dataumiboprg").withType(NotebookParameterType.INT), "vnanx", + new NotebookParameter().withValue("datamepzekm").withType(NotebookParameterType.INT))) + .withExecutorSize("datawzla") + .withConf("datateqnttmhsrw") + .withDriverSize("datacxyfje") + .withNumExecutors("datacgelipoequjkhum") + .withConfigurationType(ConfigurationType.ARTIFACT) .withTargetSparkConfiguration(new SparkConfigurationParametrizationReference() .withType(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE) - .withReferenceName("dataljnromhsia")) - .withSparkConfig(mapOf("pmsyhrvifurg", "datahpelqckwc")); + .withReferenceName("databptvvwfamhljhi")) + .withSparkConfig(mapOf("zwd", "dataccwmrckvlb", "ohxmzpfptt", "dataydbsrjofxoktokms")); model = BinaryData.fromObject(model).toObject(SynapseNotebookActivityTypeProperties.class); Assertions.assertEquals(NotebookReferenceType.NOTEBOOK_REFERENCE, model.notebook().type()); Assertions.assertEquals(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE, model.sparkPool().type()); - Assertions.assertEquals(NotebookParameterType.STRING, model.parameters().get("ukjirtiu").type()); + Assertions.assertEquals(NotebookParameterType.STRING, model.parameters().get("znlscfbwkh").type()); Assertions.assertEquals(ConfigurationType.ARTIFACT, model.configurationType()); Assertions.assertEquals(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE, model.targetSparkConfiguration().type()); diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookReferenceTests.java index 9a62bed492f9b..143c48d14b189 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookReferenceTests.java @@ -13,15 +13,16 @@ public final class SynapseNotebookReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SynapseNotebookReference model - = BinaryData.fromString("{\"type\":\"NotebookReference\",\"referenceName\":\"datahoqfvuqimdgkvf\"}") + = BinaryData.fromString("{\"type\":\"NotebookReference\",\"referenceName\":\"datawqrbtadsdkbndkof\"}") .toObject(SynapseNotebookReference.class); Assertions.assertEquals(NotebookReferenceType.NOTEBOOK_REFERENCE, model.type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SynapseNotebookReference model = new SynapseNotebookReference() - .withType(NotebookReferenceType.NOTEBOOK_REFERENCE).withReferenceName("datahoqfvuqimdgkvf"); + SynapseNotebookReference model + = new SynapseNotebookReference().withType(NotebookReferenceType.NOTEBOOK_REFERENCE) + .withReferenceName("datawqrbtadsdkbndkof"); model = BinaryData.fromObject(model).toObject(SynapseNotebookReference.class); Assertions.assertEquals(NotebookReferenceType.NOTEBOOK_REFERENCE, model.type()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseSparkJobReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseSparkJobReferenceTests.java index 0fe416e1cac30..a2fc691424a60 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseSparkJobReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseSparkJobReferenceTests.java @@ -13,15 +13,16 @@ public final class SynapseSparkJobReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { SynapseSparkJobReference model - = BinaryData.fromString("{\"type\":\"SparkJobDefinitionReference\",\"referenceName\":\"databow\"}") + = BinaryData.fromString("{\"type\":\"SparkJobDefinitionReference\",\"referenceName\":\"dataityp\"}") .toObject(SynapseSparkJobReference.class); Assertions.assertEquals(SparkJobReferenceType.SPARK_JOB_DEFINITION_REFERENCE, model.type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - SynapseSparkJobReference model = new SynapseSparkJobReference() - .withType(SparkJobReferenceType.SPARK_JOB_DEFINITION_REFERENCE).withReferenceName("databow"); + SynapseSparkJobReference model + = new SynapseSparkJobReference().withType(SparkJobReferenceType.SPARK_JOB_DEFINITION_REFERENCE) + .withReferenceName("dataityp"); model = BinaryData.fromObject(model).toObject(SynapseSparkJobReference.class); Assertions.assertEquals(SparkJobReferenceType.SPARK_JOB_DEFINITION_REFERENCE, model.type()); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularSourceTests.java index 117bd97783740..ad0dbc6fb7db7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularSourceTests.java @@ -11,16 +11,18 @@ public final class TabularSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TabularSource model = BinaryData.fromString( - "{\"type\":\"TabularSource\",\"queryTimeout\":\"dataqdswfno\",\"additionalColumns\":\"dataiwhumngihfndsj\",\"sourceRetryCount\":\"datailfvrpbcgd\",\"sourceRetryWait\":\"datafxoffckejxomngu\",\"maxConcurrentConnections\":\"dataxxynt\",\"disableMetricsCollection\":\"datanksvximgn\",\"\":{\"mwfoummdomvditp\":\"datacxuyzrnngnmf\",\"sfnoczefgfqxejj\":\"dataqalwlirapqhsidf\",\"fcrb\":\"datatiqbxzeiudog\"}}") + "{\"type\":\"vhfpfsesiywcre\",\"queryTimeout\":\"dataj\",\"additionalColumns\":\"datap\",\"sourceRetryCount\":\"dataphqqozhesbpq\",\"sourceRetryWait\":\"datamfjktd\",\"maxConcurrentConnections\":\"datahlkzt\",\"disableMetricsCollection\":\"datauuupcdaoatzvajw\",\"\":{\"lmazgpqo\":\"dataefmotulh\"}}") .toObject(TabularSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TabularSource model - = new TabularSource().withSourceRetryCount("datailfvrpbcgd").withSourceRetryWait("datafxoffckejxomngu") - .withMaxConcurrentConnections("dataxxynt").withDisableMetricsCollection("datanksvximgn") - .withQueryTimeout("dataqdswfno").withAdditionalColumns("dataiwhumngihfndsj"); + TabularSource model = new TabularSource().withSourceRetryCount("dataphqqozhesbpq") + .withSourceRetryWait("datamfjktd") + .withMaxConcurrentConnections("datahlkzt") + .withDisableMetricsCollection("datauuupcdaoatzvajw") + .withQueryTimeout("dataj") + .withAdditionalColumns("datap"); model = BinaryData.fromObject(model).toObject(TabularSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularTranslatorTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularTranslatorTests.java index 15005558262bf..c1c659e0e9671 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularTranslatorTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularTranslatorTests.java @@ -12,18 +12,24 @@ public final class TabularTranslatorTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TabularTranslator model = BinaryData.fromString( - "{\"type\":\"TabularTranslator\",\"columnMappings\":\"dataqvdivzjyx\",\"schemaMapping\":\"datablblxjbrqbutmacn\",\"collectionReference\":\"datadmyduvawea\",\"mapComplexValuesToString\":\"datafbvbvkw\",\"mappings\":\"datarz\",\"typeConversion\":\"datayymh\",\"typeConversionSettings\":{\"allowDataTruncation\":\"dataobhltmpay\",\"treatBooleanAsNumber\":\"dataqgrsytto\",\"dateTimeFormat\":\"datazbbxifacrhpuzcag\",\"dateTimeOffsetFormat\":\"datavpbwt\",\"timeSpanFormat\":\"datauiguo\",\"culture\":\"datao\"},\"\":{\"gdv\":\"databuexrkoxwyxodp\"}}") + "{\"type\":\"a\",\"columnMappings\":\"datalnkbmvnvfgwgoxf\",\"schemaMapping\":\"datakezoxhazafmq\",\"collectionReference\":\"dataifpciammpeakdheb\",\"mapComplexValuesToString\":\"datauqg\",\"mappings\":\"dataxklojd\",\"typeConversion\":\"datahajfj\",\"typeConversionSettings\":{\"allowDataTruncation\":\"datasxgjih\",\"treatBooleanAsNumber\":\"dataox\",\"dateTimeFormat\":\"datahumvptbhogll\",\"dateTimeOffsetFormat\":\"dataealcj\",\"timeSpanFormat\":\"datazzil\",\"culture\":\"datacrnovbg\"},\"\":{\"pzzq\":\"dataxsyhpilqojdmzej\",\"rcivxaq\":\"datainrymzlq\"}}") .toObject(TabularTranslator.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TabularTranslator model = new TabularTranslator().withColumnMappings("dataqvdivzjyx") - .withSchemaMapping("datablblxjbrqbutmacn").withCollectionReference("datadmyduvawea") - .withMapComplexValuesToString("datafbvbvkw").withMappings("datarz").withTypeConversion("datayymh") - .withTypeConversionSettings(new TypeConversionSettings().withAllowDataTruncation("dataobhltmpay") - .withTreatBooleanAsNumber("dataqgrsytto").withDateTimeFormat("datazbbxifacrhpuzcag") - .withDateTimeOffsetFormat("datavpbwt").withTimeSpanFormat("datauiguo").withCulture("datao")); + TabularTranslator model = new TabularTranslator().withColumnMappings("datalnkbmvnvfgwgoxf") + .withSchemaMapping("datakezoxhazafmq") + .withCollectionReference("dataifpciammpeakdheb") + .withMapComplexValuesToString("datauqg") + .withMappings("dataxklojd") + .withTypeConversion("datahajfj") + .withTypeConversionSettings(new TypeConversionSettings().withAllowDataTruncation("datasxgjih") + .withTreatBooleanAsNumber("dataox") + .withDateTimeFormat("datahumvptbhogll") + .withDateTimeOffsetFormat("dataealcj") + .withTimeSpanFormat("datazzil") + .withCulture("datacrnovbg")); model = BinaryData.fromObject(model).toObject(TabularTranslator.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarGZipReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarGZipReadSettingsTests.java index 84dfe83112c9e..295bdf076b0d9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarGZipReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarGZipReadSettingsTests.java @@ -11,14 +11,13 @@ public final class TarGZipReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TarGZipReadSettings model = BinaryData.fromString( - "{\"type\":\"TarGZipReadSettings\",\"preserveCompressionFileNameAsFolder\":\"dataiaizsglavdttty\",\"\":{\"qkjqcsh\":\"dataomz\"}}") + "{\"type\":\"eqljzkhncaeyk\",\"preserveCompressionFileNameAsFolder\":\"datapdnnsujx\",\"\":{\"lniahvlzgs\":\"datatztnprns\",\"isjscuwyl\":\"datawiubgbltj\",\"rziryx\":\"dataktzcuxuxaihhegu\"}}") .toObject(TarGZipReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TarGZipReadSettings model - = new TarGZipReadSettings().withPreserveCompressionFileNameAsFolder("dataiaizsglavdttty"); + TarGZipReadSettings model = new TarGZipReadSettings().withPreserveCompressionFileNameAsFolder("datapdnnsujx"); model = BinaryData.fromObject(model).toObject(TarGZipReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarReadSettingsTests.java index 6f953272918f2..6469eaf5165c9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarReadSettingsTests.java @@ -11,13 +11,13 @@ public final class TarReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TarReadSettings model = BinaryData.fromString( - "{\"type\":\"TarReadSettings\",\"preserveCompressionFileNameAsFolder\":\"datapp\",\"\":{\"zjplaxxfn\":\"dataikktretutsy\",\"rotg\":\"dataltanvb\",\"rpmgd\":\"dataxk\"}}") + "{\"type\":\"gfqxejjntiqbx\",\"preserveCompressionFileNameAsFolder\":\"datahsfnocz\",\"\":{\"fcrb\":\"dataudog\",\"wzbew\":\"dataoeomufaza\",\"ucj\":\"databsspexejhwpnjc\",\"dqtir\":\"databovuvmd\"}}") .toObject(TarReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TarReadSettings model = new TarReadSettings().withPreserveCompressionFileNameAsFolder("datapp"); + TarReadSettings model = new TarReadSettings().withPreserveCompressionFileNameAsFolder("datahsfnocz"); model = BinaryData.fromObject(model).toObject(TarReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataPartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataPartitionSettingsTests.java index b03dc39b437ed..8858351249995 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataPartitionSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataPartitionSettingsTests.java @@ -11,14 +11,15 @@ public final class TeradataPartitionSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TeradataPartitionSettings model = BinaryData.fromString( - "{\"partitionColumnName\":\"datajwrhubgaaaxi\",\"partitionUpperBound\":\"datafahtt\",\"partitionLowerBound\":\"dataggzdoblpdtcyv\"}") + "{\"partitionColumnName\":\"dataoqbzrclar\",\"partitionUpperBound\":\"datafmfkuvybem\",\"partitionLowerBound\":\"dataamshqvku\"}") .toObject(TeradataPartitionSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TeradataPartitionSettings model = new TeradataPartitionSettings().withPartitionColumnName("datajwrhubgaaaxi") - .withPartitionUpperBound("datafahtt").withPartitionLowerBound("dataggzdoblpdtcyv"); + TeradataPartitionSettings model = new TeradataPartitionSettings().withPartitionColumnName("dataoqbzrclar") + .withPartitionUpperBound("datafmfkuvybem") + .withPartitionLowerBound("dataamshqvku"); model = BinaryData.fromObject(model).toObject(TeradataPartitionSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataSourceTests.java index f40cbd8c44b61..7758f59783156 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataSourceTests.java @@ -12,18 +12,23 @@ public final class TeradataSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TeradataSource model = BinaryData.fromString( - "{\"type\":\"TeradataSource\",\"query\":\"datamnjtfplgxcjr\",\"partitionOption\":\"datab\",\"partitionSettings\":{\"partitionColumnName\":\"dataisfjamgnpeosu\",\"partitionUpperBound\":\"datayycofljabdmwal\",\"partitionLowerBound\":\"databuqkdieuop\"},\"queryTimeout\":\"dataaknhmi\",\"additionalColumns\":\"dataf\",\"sourceRetryCount\":\"datalfm\",\"sourceRetryWait\":\"datan\",\"maxConcurrentConnections\":\"dataiahoygzkdb\",\"disableMetricsCollection\":\"datazobcdvbbuuipe\",\"\":{\"x\":\"dataptteojxhwgja\",\"sl\":\"datarpwjgkxvkjd\",\"qi\":\"dataqmmwwtzxsv\"}}") + "{\"type\":\"moyxdigkggzmyl\",\"query\":\"datakqiqsriubem\",\"partitionOption\":\"datauygmrenrbn\",\"partitionSettings\":{\"partitionColumnName\":\"datafmophtkyzsgayn\",\"partitionUpperBound\":\"dataowvcnvf\",\"partitionLowerBound\":\"dataxqhysuapdns\"},\"queryTimeout\":\"dataycjdnio\",\"additionalColumns\":\"datagyxmpmsacbamtoqs\",\"sourceRetryCount\":\"dataqeosx\",\"sourceRetryWait\":\"dataxilefiottdawgka\",\"maxConcurrentConnections\":\"datah\",\"disableMetricsCollection\":\"datatyhypi\",\"\":{\"nkkxoicb\":\"datajjfcysk\",\"pmvppvgrigjegrl\":\"datasmfvltbocqhv\"}}") .toObject(TeradataSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TeradataSource model = new TeradataSource().withSourceRetryCount("datalfm").withSourceRetryWait("datan") - .withMaxConcurrentConnections("dataiahoygzkdb").withDisableMetricsCollection("datazobcdvbbuuipe") - .withQueryTimeout("dataaknhmi").withAdditionalColumns("dataf").withQuery("datamnjtfplgxcjr") - .withPartitionOption("datab") - .withPartitionSettings(new TeradataPartitionSettings().withPartitionColumnName("dataisfjamgnpeosu") - .withPartitionUpperBound("datayycofljabdmwal").withPartitionLowerBound("databuqkdieuop")); + TeradataSource model = new TeradataSource().withSourceRetryCount("dataqeosx") + .withSourceRetryWait("dataxilefiottdawgka") + .withMaxConcurrentConnections("datah") + .withDisableMetricsCollection("datatyhypi") + .withQueryTimeout("dataycjdnio") + .withAdditionalColumns("datagyxmpmsacbamtoqs") + .withQuery("datakqiqsriubem") + .withPartitionOption("datauygmrenrbn") + .withPartitionSettings(new TeradataPartitionSettings().withPartitionColumnName("datafmophtkyzsgayn") + .withPartitionUpperBound("dataowvcnvf") + .withPartitionLowerBound("dataxqhysuapdns")); model = BinaryData.fromObject(model).toObject(TeradataSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTests.java index ddb15c1b25720..fac11aef959a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTests.java @@ -19,33 +19,35 @@ public final class TeradataTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TeradataTableDataset model = BinaryData.fromString( - "{\"type\":\"TeradataTable\",\"typeProperties\":{\"database\":\"databxjblajybdnb\",\"table\":\"datasbtoisazdjmo\"},\"description\":\"vpz\",\"structure\":\"datanywxuy\",\"schema\":\"datafj\",\"linkedServiceName\":{\"referenceName\":\"mgwtmszcfyzqp\",\"parameters\":{\"gihlnzffewvqky\":\"dataegfurdpagknxmaov\"}},\"parameters\":{\"abhgclejqzhpvh\":{\"type\":\"Bool\",\"defaultValue\":\"dataipqxxsdyafwtydsm\"},\"eullgfyog\":{\"type\":\"String\",\"defaultValue\":\"dataadj\"},\"mwdz\":{\"type\":\"Int\",\"defaultValue\":\"datacjpvqerqxk\"},\"x\":{\"type\":\"SecureString\",\"defaultValue\":\"datahcu\"}},\"annotations\":[\"datawwvmbjec\",\"datawlbg\",\"datankfrwxo\"],\"folder\":{\"name\":\"dsnjzpchiypb\"},\"\":{\"iktqozewbrsrj\":\"datai\",\"qbjxgjwsrerukbuu\":\"datagkbrauxboufqn\",\"wkwkjxlaacedikqe\":\"datari\"}}") + "{\"type\":\"mucjiuh\",\"typeProperties\":{\"database\":\"datauyjnqzbrqcakm\",\"table\":\"datakviyjucamnsbqoit\"},\"description\":\"yvehyk\",\"structure\":\"datalyqdvpqfbxgyc\",\"schema\":\"datausdmtxq\",\"linkedServiceName\":{\"referenceName\":\"ef\",\"parameters\":{\"ey\":\"dataeywvfopkyll\",\"w\":\"datanj\",\"wxhqhgkh\":\"datamsfwtwrsvevcneq\",\"t\":\"databzvulqevvjncpmy\"}},\"parameters\":{\"bcyjrtalqee\":{\"type\":\"Float\",\"defaultValue\":\"dataghcmixmlwkfe\"},\"oaezktomsgoihlq\":{\"type\":\"Object\",\"defaultValue\":\"datafyim\"}},\"annotations\":[\"datawaazbaeeek\",\"dataztvn\",\"databfb\"],\"folder\":{\"name\":\"wnoljdkx\"},\"\":{\"puyytbpkrpkh\":\"datawyaolclzxkr\"}}") .toObject(TeradataTableDataset.class); - Assertions.assertEquals("vpz", model.description()); - Assertions.assertEquals("mgwtmszcfyzqp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("abhgclejqzhpvh").type()); - Assertions.assertEquals("dsnjzpchiypb", model.folder().name()); + Assertions.assertEquals("yvehyk", model.description()); + Assertions.assertEquals("ef", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("bcyjrtalqee").type()); + Assertions.assertEquals("wnoljdkx", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TeradataTableDataset model = new TeradataTableDataset().withDescription("vpz").withStructure("datanywxuy") - .withSchema("datafj") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("mgwtmszcfyzqp") - .withParameters(mapOf("gihlnzffewvqky", "dataegfurdpagknxmaov"))) - .withParameters(mapOf("abhgclejqzhpvh", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataipqxxsdyafwtydsm"), - "eullgfyog", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataadj"), - "mwdz", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datacjpvqerqxk"), - "x", new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datahcu"))) - .withAnnotations(Arrays.asList("datawwvmbjec", "datawlbg", "datankfrwxo")) - .withFolder(new DatasetFolder().withName("dsnjzpchiypb")).withDatabase("databxjblajybdnb") - .withTable("datasbtoisazdjmo"); + TeradataTableDataset model = new TeradataTableDataset().withDescription("yvehyk") + .withStructure("datalyqdvpqfbxgyc") + .withSchema("datausdmtxq") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ef") + .withParameters(mapOf("ey", "dataeywvfopkyll", "w", "datanj", "wxhqhgkh", "datamsfwtwrsvevcneq", "t", + "databzvulqevvjncpmy"))) + .withParameters(mapOf("bcyjrtalqee", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataghcmixmlwkfe"), + "oaezktomsgoihlq", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datafyim"))) + .withAnnotations(Arrays.asList("datawaazbaeeek", "dataztvn", "databfb")) + .withFolder(new DatasetFolder().withName("wnoljdkx")) + .withDatabase("datauyjnqzbrqcakm") + .withTable("datakviyjucamnsbqoit"); model = BinaryData.fromObject(model).toObject(TeradataTableDataset.class); - Assertions.assertEquals("vpz", model.description()); - Assertions.assertEquals("mgwtmszcfyzqp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("abhgclejqzhpvh").type()); - Assertions.assertEquals("dsnjzpchiypb", model.folder().name()); + Assertions.assertEquals("yvehyk", model.description()); + Assertions.assertEquals("ef", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("bcyjrtalqee").type()); + Assertions.assertEquals("wnoljdkx", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTypePropertiesTests.java index eead45a0697d8..61d9f38046d13 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTypePropertiesTests.java @@ -11,14 +11,15 @@ public final class TeradataTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TeradataTableDatasetTypeProperties model - = BinaryData.fromString("{\"database\":\"datas\",\"table\":\"datazbevgbnrommkiqh\"}") + = BinaryData.fromString("{\"database\":\"datatpoenefnoafpcnrx\",\"table\":\"datarxowvhufcmuajwb\"}") .toObject(TeradataTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { TeradataTableDatasetTypeProperties model - = new TeradataTableDatasetTypeProperties().withDatabase("datas").withTable("datazbevgbnrommkiqh"); + = new TeradataTableDatasetTypeProperties().withDatabase("datatpoenefnoafpcnrx") + .withTable("datarxowvhufcmuajwb"); model = BinaryData.fromObject(model).toObject(TeradataTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TextFormatTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TextFormatTests.java index fd372ff256940..523d0bd3957ec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TextFormatTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TextFormatTests.java @@ -11,17 +11,23 @@ public final class TextFormatTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TextFormat model = BinaryData.fromString( - "{\"type\":\"TextFormat\",\"columnDelimiter\":\"datazwgsoriobije\",\"rowDelimiter\":\"datadye\",\"escapeChar\":\"datanhbokayrgwybrio\",\"quoteChar\":\"dataweoftnorw\",\"nullValue\":\"datagsioc\",\"encodingName\":\"datakmv\",\"treatEmptyAsNull\":\"datanx\",\"skipLineCount\":\"datafffhtjnwos\",\"firstRowAsHeader\":\"datafjxtvlxxzqfc\",\"serializer\":\"datariomxeezwyhj\",\"deserializer\":\"datajiqegmxdbs\",\"\":{\"detsz\":\"datawhpyvdk\",\"ekwuyckyvn\":\"datacanzb\"}}") + "{\"type\":\"e\",\"columnDelimiter\":\"datajlwyxedzn\",\"rowDelimiter\":\"datarfo\",\"escapeChar\":\"datakewvmyifopxfj\",\"quoteChar\":\"datapdyzoutx\",\"nullValue\":\"datatofhgnuywez\",\"encodingName\":\"datavadga\",\"treatEmptyAsNull\":\"datawv\",\"skipLineCount\":\"datajpytp\",\"firstRowAsHeader\":\"datavdn\",\"serializer\":\"dataufb\",\"deserializer\":\"datatbnukkfaxz\",\"\":{\"oewyhxessmvrkd\":\"dataxxyjisskobqoclfl\"}}") .toObject(TextFormat.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TextFormat model = new TextFormat().withSerializer("datariomxeezwyhj").withDeserializer("datajiqegmxdbs") - .withColumnDelimiter("datazwgsoriobije").withRowDelimiter("datadye").withEscapeChar("datanhbokayrgwybrio") - .withQuoteChar("dataweoftnorw").withNullValue("datagsioc").withEncodingName("datakmv") - .withTreatEmptyAsNull("datanx").withSkipLineCount("datafffhtjnwos") - .withFirstRowAsHeader("datafjxtvlxxzqfc"); + TextFormat model = new TextFormat().withSerializer("dataufb") + .withDeserializer("datatbnukkfaxz") + .withColumnDelimiter("datajlwyxedzn") + .withRowDelimiter("datarfo") + .withEscapeChar("datakewvmyifopxfj") + .withQuoteChar("datapdyzoutx") + .withNullValue("datatofhgnuywez") + .withEncodingName("datavadga") + .withTreatEmptyAsNull("datawv") + .withSkipLineCount("datajpytp") + .withFirstRowAsHeader("datavdn"); model = BinaryData.fromObject(model).toObject(TextFormat.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TransformationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TransformationTests.java index d7747e49f781c..9616c07bcddff 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TransformationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TransformationTests.java @@ -18,34 +18,38 @@ public final class TransformationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { Transformation model = BinaryData.fromString( - "{\"name\":\"xxtclhuulri\",\"description\":\"yokvjgbzsxebr\",\"dataset\":{\"referenceName\":\"ttfyhcdjwsuoard\",\"parameters\":{\"bfwxiplkys\":\"datattpufpbpgnrholhu\",\"yjprxslw\":\"datal\",\"hfvhuwzbxpcqz\":\"datadmcvhtbbz\",\"lrrskap\":\"dataihotjecohmxv\"}},\"linkedService\":{\"referenceName\":\"wie\",\"parameters\":{\"imyc\":\"datayaderltfokyks\",\"rsejegprkj\":\"datagrvkcxzznnuif\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"rjmwvvbtuqkxxi\",\"datasetParameters\":\"datagxql\",\"parameters\":{\"vjaqu\":\"dataotjgxieqfkyfhi\"},\"\":{\"mj\":\"dataynvskpajbmgeume\",\"apeqiscrpil\":\"dataxcbccwkqmt\"}}}") + "{\"name\":\"fgkuh\",\"description\":\"whvuldbkk\",\"dataset\":{\"referenceName\":\"jkjigawgaz\",\"parameters\":{\"aahzbhuroolkol\":\"dataqifhujjsbcml\",\"gfjzcvaax\":\"datarhhmojusu\",\"ahfxwccok\":\"dataia\",\"c\":\"dataxkukm\"}},\"linkedService\":{\"referenceName\":\"nuhhoqeqshav\",\"parameters\":{\"yrqolnthbbnkgz\":\"datak\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"drnzkjthf\",\"datasetParameters\":\"datayjncjm\",\"parameters\":{\"pfiddhl\":\"datayfj\",\"ocuselqk\":\"dataufzcqyjmqrfu\",\"sjmrkkhm\":\"datasazrhxuddqmdtff\",\"jawh\":\"datadmdlgyqixokw\"},\"\":{\"qdlcvmyolc\":\"datanqf\",\"tvsnvl\":\"dataymjc\",\"atuwqkokbc\":\"dataqdnzyza\",\"msn\":\"dataothymgobl\"}}}") .toObject(Transformation.class); - Assertions.assertEquals("xxtclhuulri", model.name()); - Assertions.assertEquals("yokvjgbzsxebr", model.description()); - Assertions.assertEquals("ttfyhcdjwsuoard", model.dataset().referenceName()); - Assertions.assertEquals("wie", model.linkedService().referenceName()); + Assertions.assertEquals("fgkuh", model.name()); + Assertions.assertEquals("whvuldbkk", model.description()); + Assertions.assertEquals("jkjigawgaz", model.dataset().referenceName()); + Assertions.assertEquals("nuhhoqeqshav", model.linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("rjmwvvbtuqkxxi", model.flowlet().referenceName()); + Assertions.assertEquals("drnzkjthf", model.flowlet().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Transformation model = new Transformation().withName("xxtclhuulri").withDescription("yokvjgbzsxebr") - .withDataset(new DatasetReference().withReferenceName("ttfyhcdjwsuoard") - .withParameters(mapOf("bfwxiplkys", "datattpufpbpgnrholhu", "yjprxslw", "datal", "hfvhuwzbxpcqz", - "datadmcvhtbbz", "lrrskap", "dataihotjecohmxv"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("wie") - .withParameters(mapOf("imyc", "datayaderltfokyks", "rsejegprkj", "datagrvkcxzznnuif"))) + Transformation model = new Transformation().withName("fgkuh") + .withDescription("whvuldbkk") + .withDataset(new DatasetReference().withReferenceName("jkjigawgaz") + .withParameters(mapOf("aahzbhuroolkol", "dataqifhujjsbcml", "gfjzcvaax", "datarhhmojusu", "ahfxwccok", + "dataia", "c", "dataxkukm"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("nuhhoqeqshav") + .withParameters(mapOf("yrqolnthbbnkgz", "datak"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("rjmwvvbtuqkxxi").withDatasetParameters("datagxql") - .withParameters(mapOf("vjaqu", "dataotjgxieqfkyfhi")).withAdditionalProperties(mapOf())); + .withReferenceName("drnzkjthf") + .withDatasetParameters("datayjncjm") + .withParameters(mapOf("pfiddhl", "datayfj", "ocuselqk", "dataufzcqyjmqrfu", "sjmrkkhm", + "datasazrhxuddqmdtff", "jawh", "datadmdlgyqixokw")) + .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(Transformation.class); - Assertions.assertEquals("xxtclhuulri", model.name()); - Assertions.assertEquals("yokvjgbzsxebr", model.description()); - Assertions.assertEquals("ttfyhcdjwsuoard", model.dataset().referenceName()); - Assertions.assertEquals("wie", model.linkedService().referenceName()); + Assertions.assertEquals("fgkuh", model.name()); + Assertions.assertEquals("whvuldbkk", model.description()); + Assertions.assertEquals("jkjigawgaz", model.dataset().referenceName()); + Assertions.assertEquals("nuhhoqeqshav", model.linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("rjmwvvbtuqkxxi", model.flowlet().referenceName()); + Assertions.assertEquals("drnzkjthf", model.flowlet().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerDependencyReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerDependencyReferenceTests.java index 0003706466092..6c99f754cdb0d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerDependencyReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerDependencyReferenceTests.java @@ -14,18 +14,18 @@ public final class TriggerDependencyReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TriggerDependencyReference model = BinaryData.fromString( - "{\"type\":\"TriggerDependencyReference\",\"referenceTrigger\":{\"type\":\"TriggerReference\",\"referenceName\":\"rxlgz\"}}") + "{\"type\":\"ihwiezc\",\"referenceTrigger\":{\"type\":\"TriggerReference\",\"referenceName\":\"riottzyru\"}}") .toObject(TriggerDependencyReference.class); Assertions.assertEquals(TriggerReferenceType.TRIGGER_REFERENCE, model.referenceTrigger().type()); - Assertions.assertEquals("rxlgz", model.referenceTrigger().referenceName()); + Assertions.assertEquals("riottzyru", model.referenceTrigger().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { TriggerDependencyReference model = new TriggerDependencyReference().withReferenceTrigger( - new TriggerReference().withType(TriggerReferenceType.TRIGGER_REFERENCE).withReferenceName("rxlgz")); + new TriggerReference().withType(TriggerReferenceType.TRIGGER_REFERENCE).withReferenceName("riottzyru")); model = BinaryData.fromObject(model).toObject(TriggerDependencyReference.class); Assertions.assertEquals(TriggerReferenceType.TRIGGER_REFERENCE, model.referenceTrigger().type()); - Assertions.assertEquals("rxlgz", model.referenceTrigger().referenceName()); + Assertions.assertEquals("riottzyru", model.referenceTrigger().referenceName()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerListResponseTests.java index ba96342fadac6..ff4e6bcec1982 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerListResponseTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerListResponseTests.java @@ -17,29 +17,40 @@ public final class TriggerListResponseTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TriggerListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"type\":\"Trigger\",\"description\":\"nifmzzsdymbrnysu\",\"runtimeState\":\"Stopped\",\"annotations\":[\"datafwgckhocxvdfffw\"],\"\":{\"spave\":\"dataroud\",\"bunzozudh\":\"datahrv\"}},\"name\":\"gkmoyxcdyuibhmfd\",\"type\":\"zydvfvf\",\"etag\":\"naeo\",\"id\":\"rvhmgor\"},{\"properties\":{\"type\":\"Trigger\",\"description\":\"ukiscvwmzhw\",\"runtimeState\":\"Disabled\",\"annotations\":[\"dataxvxilcbtg\"],\"\":{\"vodggxdbee\":\"datazeyqxtjjfzqlqhyc\",\"wiuagydwqf\":\"datamieknlraria\",\"ocqwogfnzjvus\":\"dataylyrfgiagtco\"}},\"name\":\"ld\",\"type\":\"zuxylfsbtkadpyso\",\"etag\":\"btgkbugrjqctoj\",\"id\":\"isofieypefojyqd\"}],\"nextLink\":\"u\"}") + "{\"value\":[{\"properties\":{\"type\":\"kb\",\"description\":\"rj\",\"runtimeState\":\"Started\",\"annotations\":[\"datacmisofie\"],\"\":{\"qdhcu\":\"datafoj\",\"khihihlhzds\":\"datalcplc\",\"mvec\":\"datatzbsrgnowcjhf\"}},\"name\":\"txmwoteyow\",\"type\":\"uqovekqvgqouwif\",\"etag\":\"pjwyiv\",\"id\":\"kfxcvhrfs\"},{\"properties\":{\"type\":\"uagrttikteusqc\",\"description\":\"vyklxuby\",\"runtimeState\":\"Started\",\"annotations\":[\"datamfblcqcuubg\",\"dataibrta\",\"datametttwgd\",\"datalqxihhrmooiz\"],\"\":{\"hyrpetogebjoxs\":\"dataypxiutcxap\",\"abrqnkkzj\":\"datahvnh\",\"rgaehvvibrxjj\":\"datajb\",\"lftidgfcwqmpim\":\"datatoqbeitpkxztmoob\"}},\"name\":\"xzhemjyh\",\"type\":\"uj\",\"etag\":\"t\",\"id\":\"ozzwculkba\"},{\"properties\":{\"type\":\"fajnjwltlwtjjguk\",\"description\":\"lhsnvkcdmx\",\"runtimeState\":\"Started\",\"annotations\":[\"dataimlnwiaaomylw\",\"dataazul\",\"datasethwwn\"],\"\":{\"zswpchwa\":\"datal\"}},\"name\":\"bousn\",\"type\":\"pgfewetwlyx\",\"etag\":\"cxy\",\"id\":\"hdjhlimmbcx\"},{\"properties\":{\"type\":\"bcporxvxcjzhqizx\",\"description\":\"xtgqscjavftjuh\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datamtggu\"],\"\":{\"ghfcfiwrxgkneuvy\":\"datajrajcivm\"}},\"name\":\"zqodfvpgshox\",\"type\":\"gbpfgz\",\"etag\":\"tx\",\"id\":\"flbqvgaq\"}],\"nextLink\":\"gafcqu\"}") .toObject(TriggerListResponse.class); - Assertions.assertEquals("rvhmgor", model.value().get(0).id()); - Assertions.assertEquals("nifmzzsdymbrnysu", model.value().get(0).properties().description()); - Assertions.assertEquals("u", model.nextLink()); + Assertions.assertEquals("kfxcvhrfs", model.value().get(0).id()); + Assertions.assertEquals("rj", model.value().get(0).properties().description()); + Assertions.assertEquals("gafcqu", model.nextLink()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TriggerListResponse model = new TriggerListResponse() - .withValue(Arrays.asList( - new TriggerResourceInner().withId("rvhmgor") - .withProperties(new Trigger().withDescription("nifmzzsdymbrnysu") - .withAnnotations(Arrays.asList("datafwgckhocxvdfffw")) - .withAdditionalProperties(mapOf("type", "Trigger", "runtimeState", "Stopped"))), - new TriggerResourceInner().withId("isofieypefojyqd").withProperties( - new Trigger().withDescription("ukiscvwmzhw").withAnnotations(Arrays.asList("dataxvxilcbtg")) - .withAdditionalProperties(mapOf("type", "Trigger", "runtimeState", "Disabled"))))) - .withNextLink("u"); + TriggerListResponse model + = new TriggerListResponse() + .withValue(Arrays.asList( + new TriggerResourceInner().withId("kfxcvhrfs") + .withProperties(new Trigger().withDescription("rj") + .withAnnotations(Arrays.asList("datacmisofie")) + .withAdditionalProperties(mapOf("type", "kb", "runtimeState", "Started"))), + new TriggerResourceInner().withId("ozzwculkba") + .withProperties(new Trigger().withDescription("vyklxuby") + .withAnnotations( + Arrays.asList("datamfblcqcuubg", "dataibrta", "datametttwgd", "datalqxihhrmooiz")) + .withAdditionalProperties(mapOf("type", "uagrttikteusqc", "runtimeState", "Started"))), + new TriggerResourceInner().withId("hdjhlimmbcx") + .withProperties(new Trigger().withDescription("lhsnvkcdmx") + .withAnnotations(Arrays.asList("dataimlnwiaaomylw", "dataazul", "datasethwwn")) + .withAdditionalProperties(mapOf("type", "fajnjwltlwtjjguk", "runtimeState", "Started"))), + new TriggerResourceInner().withId("flbqvgaq") + .withProperties(new Trigger().withDescription("xtgqscjavftjuh") + .withAnnotations(Arrays.asList("datamtggu")) + .withAdditionalProperties(mapOf("type", "bcporxvxcjzhqizx", "runtimeState", "Disabled"))))) + .withNextLink("gafcqu"); model = BinaryData.fromObject(model).toObject(TriggerListResponse.class); - Assertions.assertEquals("rvhmgor", model.value().get(0).id()); - Assertions.assertEquals("nifmzzsdymbrnysu", model.value().get(0).properties().description()); - Assertions.assertEquals("u", model.nextLink()); + Assertions.assertEquals("kfxcvhrfs", model.value().get(0).id()); + Assertions.assertEquals("rj", model.value().get(0).properties().description()); + Assertions.assertEquals("gafcqu", model.nextLink()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerPipelineReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerPipelineReferenceTests.java index a991ef2a44d66..050783d5be383 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerPipelineReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerPipelineReferenceTests.java @@ -15,20 +15,20 @@ public final class TriggerPipelineReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TriggerPipelineReference model = BinaryData.fromString( - "{\"pipelineReference\":{\"referenceName\":\"wkpphefsb\",\"name\":\"lbzxomeikjc\"},\"parameters\":{\"qbxyxoyfpuqqi\":\"dataacnmwpfsuqtaaz\"}}") + "{\"pipelineReference\":{\"referenceName\":\"koilaci\",\"name\":\"wjleip\"},\"parameters\":{\"kkwhbgxvellvulnx\":\"datawyxpzruzyth\"}}") .toObject(TriggerPipelineReference.class); - Assertions.assertEquals("wkpphefsb", model.pipelineReference().referenceName()); - Assertions.assertEquals("lbzxomeikjc", model.pipelineReference().name()); + Assertions.assertEquals("koilaci", model.pipelineReference().referenceName()); + Assertions.assertEquals("wjleip", model.pipelineReference().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { TriggerPipelineReference model = new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("wkpphefsb").withName("lbzxomeikjc")) - .withParameters(mapOf("qbxyxoyfpuqqi", "dataacnmwpfsuqtaaz")); + .withPipelineReference(new PipelineReference().withReferenceName("koilaci").withName("wjleip")) + .withParameters(mapOf("kkwhbgxvellvulnx", "datawyxpzruzyth")); model = BinaryData.fromObject(model).toObject(TriggerPipelineReference.class); - Assertions.assertEquals("wkpphefsb", model.pipelineReference().referenceName()); - Assertions.assertEquals("lbzxomeikjc", model.pipelineReference().name()); + Assertions.assertEquals("koilaci", model.pipelineReference().referenceName()); + Assertions.assertEquals("wjleip", model.pipelineReference().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerReferenceTests.java index 51244474c8a7b..39ebd39b20303 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerReferenceTests.java @@ -13,18 +13,18 @@ public final class TriggerReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TriggerReference model - = BinaryData.fromString("{\"type\":\"TriggerReference\",\"referenceName\":\"eacpwsdir\"}") + = BinaryData.fromString("{\"type\":\"TriggerReference\",\"referenceName\":\"pyxaltmrhiina\"}") .toObject(TriggerReference.class); Assertions.assertEquals(TriggerReferenceType.TRIGGER_REFERENCE, model.type()); - Assertions.assertEquals("eacpwsdir", model.referenceName()); + Assertions.assertEquals("pyxaltmrhiina", model.referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TriggerReference model - = new TriggerReference().withType(TriggerReferenceType.TRIGGER_REFERENCE).withReferenceName("eacpwsdir"); + TriggerReference model = new TriggerReference().withType(TriggerReferenceType.TRIGGER_REFERENCE) + .withReferenceName("pyxaltmrhiina"); model = BinaryData.fromObject(model).toObject(TriggerReference.class); Assertions.assertEquals(TriggerReferenceType.TRIGGER_REFERENCE, model.type()); - Assertions.assertEquals("eacpwsdir", model.referenceName()); + Assertions.assertEquals("pyxaltmrhiina", model.referenceName()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerResourceInnerTests.java index a394fc8aa9fef..6a5fe2ea1ac27 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerResourceInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerResourceInnerTests.java @@ -16,21 +16,21 @@ public final class TriggerResourceInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TriggerResourceInner model = BinaryData.fromString( - "{\"properties\":{\"type\":\"Trigger\",\"description\":\"cp\",\"runtimeState\":\"Started\",\"annotations\":[\"dataihih\",\"datahzdsqtzbsrgnow\",\"datajhf\",\"datamvec\"],\"\":{\"ekqvgqouwif\":\"dataxmwoteyowcluqo\",\"ivqikfxcvhr\":\"datampjw\",\"c\":\"datasphuagrttikteus\"}},\"name\":\"vyklxuby\",\"type\":\"ff\",\"etag\":\"fblcq\",\"id\":\"ubgq\"}") + "{\"properties\":{\"type\":\"dvetnws\",\"description\":\"utnw\",\"runtimeState\":\"Stopped\",\"annotations\":[\"datavuzhyr\",\"dataewipm\",\"dataekdxuku\",\"datagsjj\"],\"\":{\"hvvmuvgpmun\":\"datadxgketwzhhzjhfj\"}},\"name\":\"sx\",\"type\":\"hfbuzjyihsasbhud\",\"etag\":\"ohyuemslynsq\",\"id\":\"pfoobr\"}") .toObject(TriggerResourceInner.class); - Assertions.assertEquals("ubgq", model.id()); - Assertions.assertEquals("cp", model.properties().description()); + Assertions.assertEquals("pfoobr", model.id()); + Assertions.assertEquals("utnw", model.properties().description()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TriggerResourceInner model = new TriggerResourceInner().withId("ubgq") - .withProperties(new Trigger().withDescription("cp") - .withAnnotations(Arrays.asList("dataihih", "datahzdsqtzbsrgnow", "datajhf", "datamvec")) - .withAdditionalProperties(mapOf("type", "Trigger", "runtimeState", "Started"))); + TriggerResourceInner model = new TriggerResourceInner().withId("pfoobr") + .withProperties(new Trigger().withDescription("utnw") + .withAnnotations(Arrays.asList("datavuzhyr", "dataewipm", "dataekdxuku", "datagsjj")) + .withAdditionalProperties(mapOf("type", "dvetnws", "runtimeState", "Stopped"))); model = BinaryData.fromObject(model).toObject(TriggerResourceInner.class); - Assertions.assertEquals("ubgq", model.id()); - Assertions.assertEquals("cp", model.properties().description()); + Assertions.assertEquals("pfoobr", model.id()); + Assertions.assertEquals("utnw", model.properties().description()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunTests.java index 6faa0630c12d7..bf7992e6215f5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunTests.java @@ -15,31 +15,31 @@ public final class TriggerRunTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TriggerRun model = BinaryData.fromString( - "{\"triggerRunId\":\"hcz\",\"triggerName\":\"rxzbujr\",\"triggerType\":\"hqvwrevkhgnlnzon\",\"triggerRunTimestamp\":\"2021-07-02T07:36:09Z\",\"status\":\"Inprogress\",\"message\":\"yw\",\"properties\":{\"zehtdhgb\":\"jtszcof\",\"reljeamur\":\"k\",\"xlpm\":\"zmlovuanash\"},\"triggeredPipelines\":{\"sdbccxjmonfdgnwn\":\"bdkelvidizo\",\"keifzzhmkdasv\":\"ypuuwwltvuqjctze\",\"cu\":\"lyhb\"},\"runDimension\":{\"lvizb\":\"xgsrboldforobw\",\"dxe\":\"hfovvacqpbtu\",\"elawumu\":\"zab\"},\"dependencyStatus\":{\"ucwyhahno\":\"datazkwrrwoyc\",\"fuurutlwexx\":\"datadrkywuhps\",\"srzpgepqtybbww\":\"datalalniex\"},\"\":{\"xkjibnxmy\":\"dataakchzyvlixqnrk\",\"ijpstte\":\"datauxswqrntvl\",\"wcyyufmhruncu\":\"dataoqq\"}}") + "{\"triggerRunId\":\"eokbze\",\"triggerName\":\"zrxcczurt\",\"triggerType\":\"ipqxbkwvzgnzv\",\"triggerRunTimestamp\":\"2021-07-04T23:14:39Z\",\"status\":\"Failed\",\"message\":\"xzmqpnodawop\",\"properties\":{\"gsbos\":\"wjptm\",\"jlvfhrbbpneqv\":\"zelndlatutmzlbi\"},\"triggeredPipelines\":{\"k\":\"yyurmochpprprsnm\"},\"runDimension\":{\"pcpil\":\"ejnhlbkpb\",\"hol\":\"hahzvechndbnwi\"},\"dependencyStatus\":{\"efqsfapaqtferrqw\":\"datawiuub\",\"kmfx\":\"datax\",\"pud\":\"datapjwogqqno\",\"yawbzasqbu\":\"datadabtqwp\"},\"\":{\"i\":\"datagkyexaoguy\"}}") .toObject(TriggerRun.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TriggerRun model = new TriggerRun().withAdditionalProperties(mapOf("triggerRunId", "hcz", "triggerName", - "rxzbujr", "runDimension", - JacksonAdapter.createDefaultSerializerAdapter().deserialize( - "{\"lvizb\":\"xgsrboldforobw\",\"dxe\":\"hfovvacqpbtu\",\"elawumu\":\"zab\"}", Object.class, - SerializerEncoding.JSON), + TriggerRun model = new TriggerRun().withAdditionalProperties(mapOf("triggerRunId", "eokbze", "triggerName", + "zrxcczurt", "runDimension", + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize( + "{\"pcpil\":\"ejnhlbkpb\",\"hol\":\"hahzvechndbnwi\"}", Object.class, SerializerEncoding.JSON), "dependencyStatus", - JacksonAdapter.createDefaultSerializerAdapter().deserialize( - "{\"ucwyhahno\":\"datazkwrrwoyc\",\"fuurutlwexx\":\"datadrkywuhps\",\"srzpgepqtybbww\":\"datalalniex\"}", - Object.class, SerializerEncoding.JSON), + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize( + "{\"efqsfapaqtferrqw\":\"datawiuub\",\"kmfx\":\"datax\",\"pud\":\"datapjwogqqno\",\"yawbzasqbu\":\"datadabtqwp\"}", + Object.class, SerializerEncoding.JSON), "triggeredPipelines", - JacksonAdapter.createDefaultSerializerAdapter().deserialize( - "{\"sdbccxjmonfdgnwn\":\"bdkelvidizo\",\"keifzzhmkdasv\":\"ypuuwwltvuqjctze\",\"cu\":\"lyhb\"}", - Object.class, SerializerEncoding.JSON), - "triggerType", "hqvwrevkhgnlnzon", "triggerRunTimestamp", "2021-07-02T07:36:09Z", "message", "yw", + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize("{\"k\":\"yyurmochpprprsnm\"}", Object.class, SerializerEncoding.JSON), + "triggerType", "ipqxbkwvzgnzv", "triggerRunTimestamp", "2021-07-04T23:14:39Z", "message", "xzmqpnodawop", "properties", - JacksonAdapter.createDefaultSerializerAdapter().deserialize( - "{\"zehtdhgb\":\"jtszcof\",\"reljeamur\":\"k\",\"xlpm\":\"zmlovuanash\"}", Object.class, - SerializerEncoding.JSON), - "status", "Inprogress")); + JacksonAdapter.createDefaultSerializerAdapter() + .deserialize("{\"gsbos\":\"wjptm\",\"jlvfhrbbpneqv\":\"zelndlatutmzlbi\"}", Object.class, + SerializerEncoding.JSON), + "status", "Failed")); model = BinaryData.fromObject(model).toObject(TriggerRun.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsCancelWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsCancelWithResponseMockTests.java index c37de529c582c..a80a3f93a139e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsCancelWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsCancelWithResponseMockTests.java @@ -6,47 +6,30 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class TriggerRunsCancelWithResponseMockTests { @Test public void testCancelWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.triggerRuns().cancelWithResponse("rwdfo", "rdytsgypvidzo", "qzdoy", "npkjpcgtgnhzufhw", - com.azure.core.util.Context.NONE); + manager.triggerRuns() + .cancelWithResponse("gabygzafgt", "dkdyxnvzfp", "tttouwnfsbeznjcn", "lhgfpfykyos", + com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsRerunWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsRerunWithResponseMockTests.java index 1ca0739624a22..e7b2bce9befd1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsRerunWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsRerunWithResponseMockTests.java @@ -6,47 +6,29 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class TriggerRunsRerunWithResponseMockTests { @Test public void testRerunWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.triggerRuns().rerunWithResponse("edtwtukkhuusrm", "tonpgtaz", "upkebwses", "ls", - com.azure.core.util.Context.NONE); + manager.triggerRuns() + .rerunWithResponse("jptxdl", "kyjhvpgdugxo", "ls", "euhwonivwua", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerSubscriptionOperationStatusInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerSubscriptionOperationStatusInnerTests.java index 178bb20b1501b..f6852cbcaf787 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerSubscriptionOperationStatusInnerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerSubscriptionOperationStatusInnerTests.java @@ -11,7 +11,7 @@ public final class TriggerSubscriptionOperationStatusInnerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TriggerSubscriptionOperationStatusInner model - = BinaryData.fromString("{\"triggerName\":\"n\",\"status\":\"Provisioning\"}") + = BinaryData.fromString("{\"triggerName\":\"nhxk\",\"status\":\"Deprovisioning\"}") .toObject(TriggerSubscriptionOperationStatusInner.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerTests.java index ca8bf5b8e8427..5373a8939bbc5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerTests.java @@ -15,18 +15,18 @@ public final class TriggerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { Trigger model = BinaryData.fromString( - "{\"type\":\"Trigger\",\"description\":\"rtalmet\",\"runtimeState\":\"Started\",\"annotations\":[\"dataslqxi\",\"datahrmooi\",\"dataqseypxiutcxa\",\"datazhyrpeto\"],\"\":{\"rqnkkzjcjbtr\":\"datajoxslhvnhla\",\"eitpkxztmo\":\"dataaehvvibrxjjstoq\"}}") + "{\"type\":\"tyms\",\"description\":\"ygqdnfwqzdz\",\"runtimeState\":\"Stopped\",\"annotations\":[\"dataxhnfhqly\",\"dataijouwivkxoyzunb\"],\"\":{\"lrcivtsoxfrke\":\"datartikvcpwpg\",\"skawaoqvmmb\":\"dataxpmyyefrpmpdnq\",\"egnitg\":\"datapqfrtqlkz\",\"ealzxwhcansymoyq\":\"datakxlzyqdrfeg\"}}") .toObject(Trigger.class); - Assertions.assertEquals("rtalmet", model.description()); + Assertions.assertEquals("ygqdnfwqzdz", model.description()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - Trigger model = new Trigger().withDescription("rtalmet") - .withAnnotations(Arrays.asList("dataslqxi", "datahrmooi", "dataqseypxiutcxa", "datazhyrpeto")) - .withAdditionalProperties(mapOf("type", "Trigger", "runtimeState", "Started")); + Trigger model = new Trigger().withDescription("ygqdnfwqzdz") + .withAnnotations(Arrays.asList("dataxhnfhqly", "dataijouwivkxoyzunb")) + .withAdditionalProperties(mapOf("type", "tyms", "runtimeState", "Stopped")); model = BinaryData.fromObject(model).toObject(Trigger.class); - Assertions.assertEquals("rtalmet", model.description()); + Assertions.assertEquals("ygqdnfwqzdz", model.description()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersCreateOrUpdateWithResponseMockTests.java index 339b08d7d645e..a68fa6dec68f3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersCreateOrUpdateWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersCreateOrUpdateWithResponseMockTests.java @@ -6,15 +6,12 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.Trigger; import com.azure.resourcemanager.datafactory.models.TriggerResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import java.util.Arrays; @@ -22,43 +19,32 @@ import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class TriggersCreateOrUpdateWithResponseMockTests { @Test public void testCreateOrUpdateWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"type\":\"Trigger\",\"description\":\"pbln\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datacij\",\"datan\",\"datavjskuw\",\"datarcxtyfbbomug\"],\"\":{\"ebpl\":\"datajvvdafbtozxvko\"}},\"name\":\"xmljnseaogqiybf\",\"type\":\"xuyo\",\"etag\":\"dryeucl\",\"id\":\"cwpgipttp\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - TriggerResource response = manager.triggers().define("mdjlxzttgvawy").withExistingFactory("xrm", "ljqhoiqvk") - .withProperties(new Trigger().withDescription("xoh").withAnnotations(Arrays.asList("datatheukclayqipwkx")) - .withAdditionalProperties(mapOf("type", "Trigger", "runtimeState", "Stopped"))) - .withIfMatch("mxlnt").create(); - - Assertions.assertEquals("cwpgipttp", response.id()); - Assertions.assertEquals("pbln", response.properties().description()); + = "{\"properties\":{\"type\":\"shwcgomgfflyp\",\"description\":\"gzz\",\"runtimeState\":\"Disabled\",\"annotations\":[\"dataqnp\",\"databeplicbohvvkmtx\",\"datacbnmzcbtwofeuih\"],\"\":{\"wgdrwev\":\"datauerxaqhpecnf\"}},\"name\":\"wlhi\",\"type\":\"odogcvadpbajfms\",\"etag\":\"oirdpgm\",\"id\":\"twfb\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + TriggerResource response = manager.triggers() + .define("oapjjmytku") + .withExistingFactory("ldtusvnr", "pohpkoalwspq") + .withProperties(new Trigger().withDescription("ottjkrlielb") + .withAnnotations(Arrays.asList("datavvhp", "datauhfbjuzsp")) + .withAdditionalProperties(mapOf("type", "ctnmybainctm", "runtimeState", "Stopped"))) + .withIfMatch("gbpdpjewof") + .create(); + + Assertions.assertEquals("twfb", response.id()); + Assertions.assertEquals("gzz", response.properties().description()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersDeleteWithResponseMockTests.java index 5e7017357dc04..9495b1e1b6de8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersDeleteWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersDeleteWithResponseMockTests.java @@ -6,46 +6,28 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class TriggersDeleteWithResponseMockTests { @Test public void testDeleteWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.triggers().deleteWithResponse("yki", "q", "gmlhszipih", com.azure.core.util.Context.NONE); + manager.triggers().deleteWithResponse("wgeglrs", "ekfpju", "aknhyfwbrepfogv", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetEventSubscriptionStatusWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetEventSubscriptionStatusWithResponseMockTests.java index da613262a004e..98025e699d612 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetEventSubscriptionStatusWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetEventSubscriptionStatusWithResponseMockTests.java @@ -6,49 +6,31 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.TriggerSubscriptionOperationStatus; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class TriggersGetEventSubscriptionStatusWithResponseMockTests { @Test public void testGetEventSubscriptionStatusWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - - String responseStr = "{\"triggerName\":\"oqgutr\",\"status\":\"Enabled\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - TriggerSubscriptionOperationStatus response - = manager.triggers().getEventSubscriptionStatusWithResponse("cmmpjdrhxf", "swyafdlf", "yirjbfwrqivi", - com.azure.core.util.Context.NONE).getValue(); + String responseStr = "{\"triggerName\":\"cyrn\",\"status\":\"Deprovisioning\"}"; + + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); + + TriggerSubscriptionOperationStatus response = manager.triggers() + .getEventSubscriptionStatusWithResponse("p", "xcg", "yv", com.azure.core.util.Context.NONE) + .getValue(); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetWithResponseMockTests.java index 9b6813f8b3a17..ac03109f074bd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetWithResponseMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetWithResponseMockTests.java @@ -6,53 +6,35 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.TriggerResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class TriggersGetWithResponseMockTests { @Test public void testGetWithResponse() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"properties\":{\"type\":\"Trigger\",\"description\":\"xuvc\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datawnayfkvwlfcf\",\"datayh\",\"datayn\"],\"\":{\"lbjccjorovr\":\"datauqnvnxsawicou\"}},\"name\":\"fgdvifoxoz\",\"type\":\"ymdjpkoj\",\"etag\":\"ytpyirctda\",\"id\":\"huk\"}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"properties\":{\"type\":\"ef\",\"description\":\"ngsityt\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datarw\"],\"\":{\"ftperw\":\"dataugdwrlqfqxzny\",\"dtbuikrpwyhrqdn\":\"datance\"}},\"name\":\"rfukygryrf\",\"type\":\"kjusue\",\"etag\":\"voiyaqhbbifll\",\"id\":\"g\"}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); TriggerResource response = manager.triggers() - .getWithResponse("btnvxomhkfk", "dmjjiqjvuf", "omfkiopkkhbf", "hsp", com.azure.core.util.Context.NONE) + .getWithResponse("sz", "a", "y", "rshlptlyxfnhc", com.azure.core.util.Context.NONE) .getValue(); - Assertions.assertEquals("huk", response.id()); - Assertions.assertEquals("xuvc", response.properties().description()); + Assertions.assertEquals("g", response.id()); + Assertions.assertEquals("ngsityt", response.properties().description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersListByFactoryMockTests.java index fa28799b764f6..4f1736d4a35eb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersListByFactoryMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersListByFactoryMockTests.java @@ -6,53 +6,35 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.http.rest.PagedIterable; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.TriggerResource; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class TriggersListByFactoryMockTests { @Test public void testListByFactory() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr - = "{\"value\":[{\"properties\":{\"type\":\"Trigger\",\"description\":\"mxctkwx\",\"runtimeState\":\"Disabled\",\"annotations\":[\"databfeucdq\",\"datajazhtetimcjkh\",\"dataxx\"],\"\":{\"ssvrhcjhszmymfri\":\"datalcekonmcxriqfrrx\"}},\"name\":\"m\",\"type\":\"jkizqqdawmrk\",\"etag\":\"ixbbhjgnjlzdj\",\"id\":\"xwobxs\"}]}"; - - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); + = "{\"value\":[{\"properties\":{\"type\":\"achfwdbtlz\",\"description\":\"unmvtlkthekhn\",\"runtimeState\":\"Disabled\",\"annotations\":[\"dataywmkttaqqr\",\"dataxntik\",\"dataht\"],\"\":{\"pd\":\"datadyhnnizwpyq\",\"rsmxigurdkgcx\":\"dataxhwvstlccc\",\"dvzugqhsxop\":\"datajpwavlfsunmnewo\",\"lycfvp\":\"datas\"}},\"name\":\"e\",\"type\":\"uhgasjwdwxnofce\",\"etag\":\"kmlmasrtt\",\"id\":\"mgz\"}]}"; - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); PagedIterable response - = manager.triggers().listByFactory("ymsqnliou", "av", com.azure.core.util.Context.NONE); + = manager.triggers().listByFactory("oioicxytuud", "xugukvyhy", com.azure.core.util.Context.NONE); - Assertions.assertEquals("xwobxs", response.iterator().next().id()); - Assertions.assertEquals("mxctkwx", response.iterator().next().properties().description()); + Assertions.assertEquals("mgz", response.iterator().next().id()); + Assertions.assertEquals("unmvtlkthekhn", response.iterator().next().properties().description()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStartMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStartMockTests.java index cced69f0e5df9..4d0b4d6017739 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStartMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStartMockTests.java @@ -6,46 +6,28 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class TriggersStartMockTests { @Test public void testStart() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.triggers().start("sxlm", "lqtnngw", "gbfrtxbgaafttv", com.azure.core.util.Context.NONE); + manager.triggers().start("nweznx", "hhvnpieclljsygx", "iuoqlnx", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStopMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStopMockTests.java index 1679e7c19517f..01852ff9ed5e2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStopMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStopMockTests.java @@ -6,46 +6,28 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class TriggersStopMockTests { @Test public void testStop() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); - String responseStr = "{}"; - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - manager.triggers().stop("mdnigajbxjnrlfdq", "afrwmxmd", "ezhutcaqqdch", com.azure.core.util.Context.NONE); + manager.triggers().stop("ddjib", "zwbrwu", "j", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersSubscribeToEventsMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersSubscribeToEventsMockTests.java index 304263c549a69..a42e3d30352b9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersSubscribeToEventsMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersSubscribeToEventsMockTests.java @@ -6,48 +6,30 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.TriggerSubscriptionOperationStatus; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class TriggersSubscribeToEventsMockTests { @Test public void testSubscribeToEvents() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); + String responseStr = "{\"triggerName\":\"pnwbbpvpcxecnwk\",\"status\":\"Disabled\"}"; - String responseStr = "{\"triggerName\":\"wfytnvcjhjrwnb\",\"status\":\"Unknown\"}"; + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - TriggerSubscriptionOperationStatus response = manager.triggers().subscribeToEvents("nvhlpuobhaomaow", - "mwjsvuziog", "oaimw", com.azure.core.util.Context.NONE); + TriggerSubscriptionOperationStatus response + = manager.triggers().subscribeToEvents("ybhij", "wyeihqwwmikq", "buxrg", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersUnsubscribeFromEventsMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersUnsubscribeFromEventsMockTests.java index 5efdd11aba169..b0845606c9db0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersUnsubscribeFromEventsMockTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersUnsubscribeFromEventsMockTests.java @@ -6,48 +6,30 @@ import com.azure.core.credential.AccessToken; import com.azure.core.http.HttpClient; -import com.azure.core.http.HttpHeaders; -import com.azure.core.http.HttpRequest; -import com.azure.core.http.HttpResponse; import com.azure.core.management.AzureEnvironment; import com.azure.core.management.profile.AzureProfile; +import com.azure.core.test.http.MockHttpResponse; import com.azure.resourcemanager.datafactory.DataFactoryManager; import com.azure.resourcemanager.datafactory.models.TriggerSubscriptionOperationStatus; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.OffsetDateTime; import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public final class TriggersUnsubscribeFromEventsMockTests { @Test public void testUnsubscribeFromEvents() throws Exception { - HttpClient httpClient = Mockito.mock(HttpClient.class); - HttpResponse httpResponse = Mockito.mock(HttpResponse.class); - ArgumentCaptor httpRequest = ArgumentCaptor.forClass(HttpRequest.class); + String responseStr = "{\"triggerName\":\"ylplycycswg\",\"status\":\"Disabled\"}"; - String responseStr = "{\"triggerName\":\"eiy\",\"status\":\"Deprovisioning\"}"; + HttpClient httpClient + = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); + DataFactoryManager manager = DataFactoryManager.configure() + .withHttpClient(httpClient) + .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), + new AzureProfile("", "", AzureEnvironment.AZURE)); - Mockito.when(httpResponse.getStatusCode()).thenReturn(200); - Mockito.when(httpResponse.getHeaders()).thenReturn(new HttpHeaders()); - Mockito.when(httpResponse.getBody()) - .thenReturn(Flux.just(ByteBuffer.wrap(responseStr.getBytes(StandardCharsets.UTF_8)))); - Mockito.when(httpResponse.getBodyAsByteArray()) - .thenReturn(Mono.just(responseStr.getBytes(StandardCharsets.UTF_8))); - Mockito.when(httpClient.send(httpRequest.capture(), Mockito.any())).thenReturn(Mono.defer(() -> { - Mockito.when(httpResponse.getRequest()).thenReturn(httpRequest.getValue()); - return Mono.just(httpResponse); - })); - - DataFactoryManager manager = DataFactoryManager.configure().withHttpClient(httpClient).authenticate( - tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - TriggerSubscriptionOperationStatus response = manager.triggers().unsubscribeFromEvents("wmkgzsqrirlcjmha", - "rkhlayer", "shiuwnefe", com.azure.core.util.Context.NONE); + TriggerSubscriptionOperationStatus response = manager.triggers() + .unsubscribeFromEvents("im", "iyfzklhoirlpy", "ulbbusdewulorfv", com.azure.core.util.Context.NONE); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerDependencyReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerDependencyReferenceTests.java index 366d759b2519e..43b082cae9bf3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerDependencyReferenceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerDependencyReferenceTests.java @@ -14,24 +14,26 @@ public final class TumblingWindowTriggerDependencyReferenceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TumblingWindowTriggerDependencyReference model = BinaryData.fromString( - "{\"type\":\"TumblingWindowTriggerDependencyReference\",\"offset\":\"rmmjyvmxtj\",\"size\":\"obwskyjlteiulvrp\",\"referenceTrigger\":{\"type\":\"TriggerReference\",\"referenceName\":\"ivvlmz\"}}") + "{\"type\":\"r\",\"offset\":\"ljbjpkv\",\"size\":\"ahpztvlfffym\",\"referenceTrigger\":{\"type\":\"TriggerReference\",\"referenceName\":\"rtpznychwbzrbqpz\"}}") .toObject(TumblingWindowTriggerDependencyReference.class); Assertions.assertEquals(TriggerReferenceType.TRIGGER_REFERENCE, model.referenceTrigger().type()); - Assertions.assertEquals("ivvlmz", model.referenceTrigger().referenceName()); - Assertions.assertEquals("rmmjyvmxtj", model.offset()); - Assertions.assertEquals("obwskyjlteiulvrp", model.size()); + Assertions.assertEquals("rtpznychwbzrbqpz", model.referenceTrigger().referenceName()); + Assertions.assertEquals("ljbjpkv", model.offset()); + Assertions.assertEquals("ahpztvlfffym", model.size()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TumblingWindowTriggerDependencyReference model = new TumblingWindowTriggerDependencyReference() - .withReferenceTrigger( - new TriggerReference().withType(TriggerReferenceType.TRIGGER_REFERENCE).withReferenceName("ivvlmz")) - .withOffset("rmmjyvmxtj").withSize("obwskyjlteiulvrp"); + TumblingWindowTriggerDependencyReference model + = new TumblingWindowTriggerDependencyReference() + .withReferenceTrigger(new TriggerReference().withType(TriggerReferenceType.TRIGGER_REFERENCE) + .withReferenceName("rtpznychwbzrbqpz")) + .withOffset("ljbjpkv") + .withSize("ahpztvlfffym"); model = BinaryData.fromObject(model).toObject(TumblingWindowTriggerDependencyReference.class); Assertions.assertEquals(TriggerReferenceType.TRIGGER_REFERENCE, model.referenceTrigger().type()); - Assertions.assertEquals("ivvlmz", model.referenceTrigger().referenceName()); - Assertions.assertEquals("rmmjyvmxtj", model.offset()); - Assertions.assertEquals("obwskyjlteiulvrp", model.size()); + Assertions.assertEquals("rtpznychwbzrbqpz", model.referenceTrigger().referenceName()); + Assertions.assertEquals("ljbjpkv", model.offset()); + Assertions.assertEquals("ahpztvlfffym", model.size()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTests.java index a8e98b74d2db5..0afed119b4992 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTests.java @@ -21,43 +21,45 @@ public final class TumblingWindowTriggerTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TumblingWindowTrigger model = BinaryData.fromString( - "{\"type\":\"TumblingWindowTrigger\",\"pipeline\":{\"pipelineReference\":{\"referenceName\":\"tx\",\"name\":\"uxmegrix\"},\"parameters\":{\"tcoibiodfy\":\"datazjlqrpsqpj\",\"oemcajjazo\":\"dataafenwvvxcah\"}},\"typeProperties\":{\"frequency\":\"Month\",\"interval\":838287368,\"startTime\":\"2021-04-15T18:57:46Z\",\"endTime\":\"2021-07-24T14:06:28Z\",\"delay\":\"datahxydx\",\"maxConcurrency\":2006064884,\"retryPolicy\":{\"count\":\"datailgtbslagtmkii\",\"intervalInSeconds\":1595340480},\"dependsOn\":[{\"type\":\"DependencyReference\"},{\"type\":\"DependencyReference\"},{\"type\":\"DependencyReference\"},{\"type\":\"DependencyReference\"}]},\"description\":\"jpvgvbz\",\"runtimeState\":\"Stopped\",\"annotations\":[\"datazgrfaq\",\"datatqmcszdptoyt\",\"datanxgqovfrtm\"],\"\":{\"jiocvjmyi\":\"datazrexmcawpbifzw\"}}") + "{\"type\":\"jsghuokjwvsacwdu\",\"pipeline\":{\"pipelineReference\":{\"referenceName\":\"pcoiaaagvaec\",\"name\":\"dqgoo\"},\"parameters\":{\"fh\":\"datafrgmpu\",\"wcxzxvgfxzck\":\"datakrfe\",\"jal\":\"datawqujpugj\"}},\"typeProperties\":{\"frequency\":\"Minute\",\"interval\":1684959165,\"startTime\":\"2021-12-01T05:51:53Z\",\"endTime\":\"2021-11-29T21:19:29Z\",\"delay\":\"datadsvycj\",\"maxConcurrency\":1878853669,\"retryPolicy\":{\"count\":\"datapuphgogmggkkjc\",\"intervalInSeconds\":31605107},\"dependsOn\":[{\"type\":\"nnmjpgzw\"},{\"type\":\"upzs\"},{\"type\":\"sidkziparp\"}]},\"description\":\"zuykly\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datalevardbrdrt\",\"dataakezorhjhyzqhb\"],\"\":{\"dgd\":\"datadztvumvxkrxgai\",\"ojc\":\"datakhiqwuwxrcydmky\",\"nxeyfkrcmxtbwolz\":\"datasbygm\",\"buwqmicukwmz\":\"datakjaqlszlymyqpw\"}}") .toObject(TumblingWindowTrigger.class); - Assertions.assertEquals("jpvgvbz", model.description()); - Assertions.assertEquals("tx", model.pipeline().pipelineReference().referenceName()); - Assertions.assertEquals("uxmegrix", model.pipeline().pipelineReference().name()); - Assertions.assertEquals(TumblingWindowFrequency.MONTH, model.frequency()); - Assertions.assertEquals(838287368, model.interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-04-15T18:57:46Z"), model.startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-07-24T14:06:28Z"), model.endTime()); - Assertions.assertEquals(2006064884, model.maxConcurrency()); - Assertions.assertEquals(1595340480, model.retryPolicy().intervalInSeconds()); + Assertions.assertEquals("zuykly", model.description()); + Assertions.assertEquals("pcoiaaagvaec", model.pipeline().pipelineReference().referenceName()); + Assertions.assertEquals("dqgoo", model.pipeline().pipelineReference().name()); + Assertions.assertEquals(TumblingWindowFrequency.MINUTE, model.frequency()); + Assertions.assertEquals(1684959165, model.interval()); + Assertions.assertEquals(OffsetDateTime.parse("2021-12-01T05:51:53Z"), model.startTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-11-29T21:19:29Z"), model.endTime()); + Assertions.assertEquals(1878853669, model.maxConcurrency()); + Assertions.assertEquals(31605107, model.retryPolicy().intervalInSeconds()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TumblingWindowTrigger model = new TumblingWindowTrigger().withDescription("jpvgvbz") - .withAnnotations(Arrays.asList("datazgrfaq", "datatqmcszdptoyt", "datanxgqovfrtm")) + TumblingWindowTrigger model = new TumblingWindowTrigger().withDescription("zuykly") + .withAnnotations(Arrays.asList("datalevardbrdrt", "dataakezorhjhyzqhb")) .withPipeline(new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("tx").withName("uxmegrix")) - .withParameters(mapOf("tcoibiodfy", "datazjlqrpsqpj", "oemcajjazo", "dataafenwvvxcah"))) - .withFrequency(TumblingWindowFrequency.MONTH).withInterval(838287368) - .withStartTime(OffsetDateTime.parse("2021-04-15T18:57:46Z")) - .withEndTime(OffsetDateTime.parse("2021-07-24T14:06:28Z")).withDelay("datahxydx") - .withMaxConcurrency(2006064884) - .withRetryPolicy(new RetryPolicy().withCount("datailgtbslagtmkii").withIntervalInSeconds(1595340480)) - .withDependsOn(Arrays.asList(new DependencyReference(), new DependencyReference(), - new DependencyReference(), new DependencyReference())); + .withPipelineReference(new PipelineReference().withReferenceName("pcoiaaagvaec").withName("dqgoo")) + .withParameters(mapOf("fh", "datafrgmpu", "wcxzxvgfxzck", "datakrfe", "jal", "datawqujpugj"))) + .withFrequency(TumblingWindowFrequency.MINUTE) + .withInterval(1684959165) + .withStartTime(OffsetDateTime.parse("2021-12-01T05:51:53Z")) + .withEndTime(OffsetDateTime.parse("2021-11-29T21:19:29Z")) + .withDelay("datadsvycj") + .withMaxConcurrency(1878853669) + .withRetryPolicy(new RetryPolicy().withCount("datapuphgogmggkkjc").withIntervalInSeconds(31605107)) + .withDependsOn( + Arrays.asList(new DependencyReference(), new DependencyReference(), new DependencyReference())); model = BinaryData.fromObject(model).toObject(TumblingWindowTrigger.class); - Assertions.assertEquals("jpvgvbz", model.description()); - Assertions.assertEquals("tx", model.pipeline().pipelineReference().referenceName()); - Assertions.assertEquals("uxmegrix", model.pipeline().pipelineReference().name()); - Assertions.assertEquals(TumblingWindowFrequency.MONTH, model.frequency()); - Assertions.assertEquals(838287368, model.interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-04-15T18:57:46Z"), model.startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-07-24T14:06:28Z"), model.endTime()); - Assertions.assertEquals(2006064884, model.maxConcurrency()); - Assertions.assertEquals(1595340480, model.retryPolicy().intervalInSeconds()); + Assertions.assertEquals("zuykly", model.description()); + Assertions.assertEquals("pcoiaaagvaec", model.pipeline().pipelineReference().referenceName()); + Assertions.assertEquals("dqgoo", model.pipeline().pipelineReference().name()); + Assertions.assertEquals(TumblingWindowFrequency.MINUTE, model.frequency()); + Assertions.assertEquals(1684959165, model.interval()); + Assertions.assertEquals(OffsetDateTime.parse("2021-12-01T05:51:53Z"), model.startTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-11-29T21:19:29Z"), model.endTime()); + Assertions.assertEquals(1878853669, model.maxConcurrency()); + Assertions.assertEquals(31605107, model.retryPolicy().intervalInSeconds()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTypePropertiesTests.java index 258770ae4a6a0..7d965b282674a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTypePropertiesTests.java @@ -17,32 +17,33 @@ public final class TumblingWindowTriggerTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TumblingWindowTriggerTypeProperties model = BinaryData.fromString( - "{\"frequency\":\"Hour\",\"interval\":1467815834,\"startTime\":\"2021-01-26T15:42:35Z\",\"endTime\":\"2021-06-18T21:24:42Z\",\"delay\":\"datahxfjlecbbabih\",\"maxConcurrency\":945617288,\"retryPolicy\":{\"count\":\"datarxzatlzwrpj\",\"intervalInSeconds\":526431444},\"dependsOn\":[{\"type\":\"DependencyReference\"},{\"type\":\"DependencyReference\"},{\"type\":\"DependencyReference\"}]}") + "{\"frequency\":\"Minute\",\"interval\":1052042151,\"startTime\":\"2021-03-29T22:51Z\",\"endTime\":\"2021-04-11T13:06:29Z\",\"delay\":\"dataqmbuv\",\"maxConcurrency\":838737054,\"retryPolicy\":{\"count\":\"datamcymwr\",\"intervalInSeconds\":1972140841},\"dependsOn\":[{\"type\":\"uldgtkedvxh\"}]}") .toObject(TumblingWindowTriggerTypeProperties.class); - Assertions.assertEquals(TumblingWindowFrequency.HOUR, model.frequency()); - Assertions.assertEquals(1467815834, model.interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-01-26T15:42:35Z"), model.startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-06-18T21:24:42Z"), model.endTime()); - Assertions.assertEquals(945617288, model.maxConcurrency()); - Assertions.assertEquals(526431444, model.retryPolicy().intervalInSeconds()); + Assertions.assertEquals(TumblingWindowFrequency.MINUTE, model.frequency()); + Assertions.assertEquals(1052042151, model.interval()); + Assertions.assertEquals(OffsetDateTime.parse("2021-03-29T22:51Z"), model.startTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-04-11T13:06:29Z"), model.endTime()); + Assertions.assertEquals(838737054, model.maxConcurrency()); + Assertions.assertEquals(1972140841, model.retryPolicy().intervalInSeconds()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { TumblingWindowTriggerTypeProperties model - = new TumblingWindowTriggerTypeProperties().withFrequency(TumblingWindowFrequency.HOUR) - .withInterval(1467815834).withStartTime(OffsetDateTime.parse("2021-01-26T15:42:35Z")) - .withEndTime(OffsetDateTime.parse("2021-06-18T21:24:42Z")).withDelay("datahxfjlecbbabih") - .withMaxConcurrency(945617288) - .withRetryPolicy(new RetryPolicy().withCount("datarxzatlzwrpj").withIntervalInSeconds(526431444)) - .withDependsOn( - Arrays.asList(new DependencyReference(), new DependencyReference(), new DependencyReference())); + = new TumblingWindowTriggerTypeProperties().withFrequency(TumblingWindowFrequency.MINUTE) + .withInterval(1052042151) + .withStartTime(OffsetDateTime.parse("2021-03-29T22:51Z")) + .withEndTime(OffsetDateTime.parse("2021-04-11T13:06:29Z")) + .withDelay("dataqmbuv") + .withMaxConcurrency(838737054) + .withRetryPolicy(new RetryPolicy().withCount("datamcymwr").withIntervalInSeconds(1972140841)) + .withDependsOn(Arrays.asList(new DependencyReference())); model = BinaryData.fromObject(model).toObject(TumblingWindowTriggerTypeProperties.class); - Assertions.assertEquals(TumblingWindowFrequency.HOUR, model.frequency()); - Assertions.assertEquals(1467815834, model.interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-01-26T15:42:35Z"), model.startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-06-18T21:24:42Z"), model.endTime()); - Assertions.assertEquals(945617288, model.maxConcurrency()); - Assertions.assertEquals(526431444, model.retryPolicy().intervalInSeconds()); + Assertions.assertEquals(TumblingWindowFrequency.MINUTE, model.frequency()); + Assertions.assertEquals(1052042151, model.interval()); + Assertions.assertEquals(OffsetDateTime.parse("2021-03-29T22:51Z"), model.startTime()); + Assertions.assertEquals(OffsetDateTime.parse("2021-04-11T13:06:29Z"), model.endTime()); + Assertions.assertEquals(838737054, model.maxConcurrency()); + Assertions.assertEquals(1972140841, model.retryPolicy().intervalInSeconds()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TypeConversionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TypeConversionSettingsTests.java index b274876ed5598..177227df4c347 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TypeConversionSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TypeConversionSettingsTests.java @@ -11,16 +11,18 @@ public final class TypeConversionSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { TypeConversionSettings model = BinaryData.fromString( - "{\"allowDataTruncation\":\"datan\",\"treatBooleanAsNumber\":\"datavxdafilaizcdugn\",\"dateTimeFormat\":\"datamljgayka\",\"dateTimeOffsetFormat\":\"datankxoqecjznuqg\",\"timeSpanFormat\":\"datavmuewshhq\",\"culture\":\"datavchliezfbtczzjf\"}") + "{\"allowDataTruncation\":\"datavgxqtkcvnyi\",\"treatBooleanAsNumber\":\"dataexw\",\"dateTimeFormat\":\"dataivgxe\",\"dateTimeOffsetFormat\":\"datau\",\"timeSpanFormat\":\"datayztnsutesq\",\"culture\":\"datalzyhavt\"}") .toObject(TypeConversionSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - TypeConversionSettings model = new TypeConversionSettings().withAllowDataTruncation("datan") - .withTreatBooleanAsNumber("datavxdafilaizcdugn").withDateTimeFormat("datamljgayka") - .withDateTimeOffsetFormat("datankxoqecjznuqg").withTimeSpanFormat("datavmuewshhq") - .withCulture("datavchliezfbtczzjf"); + TypeConversionSettings model = new TypeConversionSettings().withAllowDataTruncation("datavgxqtkcvnyi") + .withTreatBooleanAsNumber("dataexw") + .withDateTimeFormat("dataivgxe") + .withDateTimeOffsetFormat("datau") + .withTimeSpanFormat("datayztnsutesq") + .withCulture("datalzyhavt"); model = BinaryData.fromObject(model).toObject(TypeConversionSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTests.java index e87eaa9590ea7..97c6bdd16744e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTests.java @@ -22,119 +22,101 @@ public final class UntilActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { UntilActivity model = BinaryData.fromString( - "{\"type\":\"Until\",\"typeProperties\":{\"expression\":{\"value\":\"oggzppufu\"},\"timeout\":\"dataaiecexy\",\"activities\":[{\"type\":\"Activity\",\"name\":\"hjqdwlxa\",\"description\":\"l\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"aipfqnxjko\",\"dependencyConditions\":[\"Completed\",\"Succeeded\"],\"\":{\"mhabzjemqvlouuc\":\"datajbpt\"}}],\"userProperties\":[{\"name\":\"bqsj\",\"value\":\"datancgqhpqgivyx\"},{\"name\":\"jimussvur\",\"value\":\"datalwdxnx\"},{\"name\":\"valvkdaql\",\"value\":\"datasoqrhwla\"}],\"\":{\"zis\":\"datatwsxliwpzuce\",\"ubh\":\"datarvtrwswbm\",\"ivusehyvqxjbqfcl\":\"databtthzfgpzy\"}},{\"type\":\"Activity\",\"name\":\"jecajtuo\",\"description\":\"dlzxuakbavpk\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"kfzbqvsgxe\",\"dependencyConditions\":[\"Completed\",\"Succeeded\"],\"\":{\"dhhqsfht\":\"datagn\"}}],\"userProperties\":[{\"name\":\"axdyxjicikzmvdd\",\"value\":\"datafjmi\"}],\"\":{\"svpfspfdfrymrfpq\":\"datafcqls\",\"w\":\"dataxln\",\"qhzotkowi\":\"datagi\"}},{\"type\":\"Activity\",\"name\":\"uerhzyl\",\"description\":\"ym\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ieemslhztvry\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Succeeded\"],\"\":{\"gryolbqcftrywdg\":\"datawnewmpwj\"}},{\"activity\":\"skdl\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Succeeded\"],\"\":{\"djgcuew\":\"datanxvmcxljlpyhdx\",\"qgfqivmsxwev\":\"datanq\",\"oubjnmoid\":\"datajmxvvtuk\",\"cgmfklqswwdbs\":\"datanbfbkwyvw\"}}],\"userProperties\":[{\"name\":\"ysedqrbevobqrwng\",\"value\":\"datayjfquzxmtmsyiby\"}],\"\":{\"jkrosqxvffrnc\":\"datapksas\",\"lzoi\":\"datawvjgyjoklngjs\",\"gakkszz\":\"datawsqdnasjup\"}},{\"type\":\"Activity\",\"name\":\"dtvrgyebvq\",\"description\":\"ikeuqvqiotvfcb\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"fatyqa\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"teap\":\"dataqopglixhapvwacwr\"}},{\"activity\":\"cnknzncoxeop\",\"dependencyConditions\":[\"Completed\"],\"\":{\"vr\":\"dataaxrsyxeqwgaeice\",\"ztssqbclaeciwz\":\"datadcidcxkywy\"}},{\"activity\":\"vttkha\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Failed\"],\"\":{\"jkhqaxpicza\":\"datamqj\",\"dv\":\"datagevsnnqvkufezwgw\",\"dkjusqhr\":\"dataskffqqaobbq\"}},{\"activity\":\"adffdr\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Completed\"],\"\":{\"iembc\":\"datacctkrvnk\",\"ixjkxvz\":\"datatzmldw\",\"orqbmkfo\":\"dataa\"}}],\"userProperties\":[{\"name\":\"aqfqgmwdo\",\"value\":\"datavinvzdnubs\"},{\"name\":\"skgiy\",\"value\":\"datailbiwacxldho\"},{\"name\":\"cdpwxh\",\"value\":\"datacvtbgznpx\"},{\"name\":\"xcshtlqhikmfzdlh\",\"value\":\"datazo\"}],\"\":{\"dn\":\"datauziaztmxwmjaevw\",\"hwdirt\":\"datapfku\",\"aqya\":\"datay\"}}]},\"name\":\"dykxgcfhv\",\"description\":\"ns\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"qfbycra\",\"dependencyConditions\":[\"Failed\"],\"\":{\"f\":\"datatnujhj\",\"gdv\":\"dataemmjtst\",\"mvavjyqhcowou\":\"datapxhdefyditb\",\"nyv\":\"dataihl\"}},{\"activity\":\"gjb\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Failed\"],\"\":{\"mpzb\":\"datar\",\"dciyidzbpfwlxxwp\":\"dataiakyflryhvph\",\"gstmlhrziggc\":\"dataz\",\"n\":\"datazynimkk\"}},{\"activity\":\"owcutohmxmj\",\"dependencyConditions\":[\"Completed\",\"Failed\"],\"\":{\"qnnef\":\"datahoiimennxvqjakq\"}}],\"userProperties\":[{\"name\":\"uguzesfgg\",\"value\":\"dataeqllrpcqyxqf\"},{\"name\":\"rvmvdqhageho\",\"value\":\"dataqeqtlsipedgtup\"},{\"name\":\"mvxeubngwidgxy\",\"value\":\"datadovlp\"}],\"\":{\"lt\":\"datamfvyhmivy\",\"s\":\"dataakmtvoprg\",\"orxibw\":\"datay\",\"cwfo\":\"datafxkud\"}}") + "{\"type\":\"diujfputc\",\"typeProperties\":{\"expression\":{\"value\":\"yaszuoheuifshs\"},\"timeout\":\"datapl\",\"activities\":[{\"type\":\"kztc\",\"name\":\"qkwg\",\"description\":\"aeby\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"zfqzvkorsrpm\",\"dependencyConditions\":[\"Skipped\",\"Completed\"],\"\":{\"iq\":\"dataj\",\"wbdk\":\"dataqtm\",\"hpfrexbg\":\"datauxda\",\"z\":\"datawjmsogzc\"}},{\"activity\":\"j\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Succeeded\",\"Skipped\"],\"\":{\"rzvjvlnafpfo\":\"datavdnpkdmqc\",\"hjwcdjxqxf\":\"databqaqbae\"}}],\"userProperties\":[{\"name\":\"urxwtfpeuftpjldl\",\"value\":\"dataforvsypjytgz\"},{\"name\":\"mmbxkbtberyql\",\"value\":\"datazebmikojpggwuwa\"}],\"\":{\"beuugir\":\"datalgmgcnllq\",\"fgqhxdyhoozd\":\"dataqezpsydkgtdwlv\",\"nyleisawvd\":\"datathhecmjgbzhd\",\"yrfmozuoopotudky\":\"datamuytkkfoto\"}},{\"type\":\"iyvh\",\"name\":\"dkbfqkea\",\"description\":\"bkenqcvdrpwck\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"tlbaxiaerpoasy\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Succeeded\"],\"\":{\"ixoudmaniwkwtmqy\":\"datannrd\"}},{\"activity\":\"giydgee\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Succeeded\",\"Completed\"],\"\":{\"p\":\"datawehjqyullep\",\"gobrsnxcayy\":\"dataf\",\"vmmef\":\"datari\"}},{\"activity\":\"pimmwirixczxkxv\",\"dependencyConditions\":[\"Failed\",\"Completed\"],\"\":{\"efygna\":\"dataxzascalw\"}},{\"activity\":\"pie\",\"dependencyConditions\":[\"Completed\"],\"\":{\"aypsvedxphf\":\"datau\",\"qqwxjnkbes\":\"datao\",\"qoujhmdpe\":\"datammitvviqs\"}}],\"userProperties\":[{\"name\":\"qwjqevwtkrjqnciw\",\"value\":\"databjwngrrpdt\"},{\"name\":\"nhctkgllmpku\",\"value\":\"datablucxyhtkyq\"},{\"name\":\"ynvtimpgusroqk\",\"value\":\"datawkvojr\"}],\"\":{\"hkplxbpxzdu\":\"datajtjqhfkwsmgkomqf\"}}]},\"name\":\"h\",\"description\":\"apksjwaglhwnnfg\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"kmwvqtmfq\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\",\"Succeeded\"],\"\":{\"ujsvhezhezyhwo\":\"datatshjjbocuug\",\"x\":\"dataayyshf\",\"zeid\":\"datafyzjzeylthdr\",\"divqybvgcebuts\":\"datalredxfcckticw\"}}],\"userProperties\":[{\"name\":\"suhtlzomsqebm\",\"value\":\"dataope\"},{\"name\":\"yfuliatbosnla\",\"value\":\"dataeozgjtuhdgmshuyq\"}],\"\":{\"bzlz\":\"dataprrptoentuveqmtl\",\"hetrqudxzrbg\":\"dataduqlxwrets\",\"iwpaeumely\":\"datatjjiearyzzxk\"}}") .toObject(UntilActivity.class); - Assertions.assertEquals("dykxgcfhv", model.name()); - Assertions.assertEquals("ns", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("qfbycra", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("uguzesfgg", model.userProperties().get(0).name()); - Assertions.assertEquals("oggzppufu", model.expression().value()); - Assertions.assertEquals("hjqdwlxa", model.activities().get(0).name()); - Assertions.assertEquals("l", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("aipfqnxjko", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, + Assertions.assertEquals("h", model.name()); + Assertions.assertEquals("apksjwaglhwnnfg", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("kmwvqtmfq", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("suhtlzomsqebm", model.userProperties().get(0).name()); + Assertions.assertEquals("yaszuoheuifshs", model.expression().value()); + Assertions.assertEquals("qkwg", model.activities().get(0).name()); + Assertions.assertEquals("aeby", model.activities().get(0).description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("zfqzvkorsrpm", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("bqsj", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("urxwtfpeuftpjldl", model.activities().get(0).userProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - UntilActivity model = new UntilActivity().withName("dykxgcfhv").withDescription("ns") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("qfbycra") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)).withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("gjb") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.FAILED, - DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("owcutohmxmj") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("uguzesfgg").withValue("dataeqllrpcqyxqf"), - new UserProperty().withName("rvmvdqhageho").withValue("dataqeqtlsipedgtup"), - new UserProperty().withName("mvxeubngwidgxy").withValue("datadovlp"))) - .withExpression( - new Expression().withValue("oggzppufu")) - .withTimeout("dataaiecexy") + UntilActivity model = new UntilActivity().withName("h") + .withDescription("apksjwaglhwnnfg") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("kmwvqtmfq") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED, + DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("suhtlzomsqebm").withValue("dataope"), + new UserProperty().withName("yfuliatbosnla").withValue("dataeozgjtuhdgmshuyq"))) + .withExpression(new Expression().withValue("yaszuoheuifshs")) + .withTimeout("datapl") .withActivities(Arrays.asList( - new Activity().withName("hjqdwlxa").withDescription("l").withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("aipfqnxjko") - .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("bqsj").withValue("datancgqhpqgivyx"), - new UserProperty().withName("jimussvur").withValue("datalwdxnx"), - new UserProperty().withName("valvkdaql").withValue("datasoqrhwla"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("jecajtuo").withDescription("dlzxuakbavpk").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("kfzbqvsgxe") + new Activity().withName("qkwg") + .withDescription("aeby") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("zfqzvkorsrpm") .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("axdyxjicikzmvdd").withValue("datafjmi"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("uerhzyl").withDescription("ym").withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ieemslhztvry") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("skdl") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) + Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("j") + .withDependencyConditions( + Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, + DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) .withAdditionalProperties(mapOf()))) .withUserProperties( - Arrays.asList(new UserProperty().withName("ysedqrbevobqrwng").withValue("datayjfquzxmtmsyiby"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("dtvrgyebvq").withDescription("ikeuqvqiotvfcb") - .withState(ActivityState.INACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + Arrays.asList(new UserProperty().withName("urxwtfpeuftpjldl").withValue("dataforvsypjytgz"), + new UserProperty().withName("mmbxkbtberyql").withValue("datazebmikojpggwuwa"))) + .withAdditionalProperties(mapOf("type", "kztc")), + new Activity().withName("dkbfqkea") + .withDescription("bkenqcvdrpwck") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("fatyqa") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) + new ActivityDependency().withActivity("tlbaxiaerpoasy") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, + DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("cnknzncoxeop") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) + new ActivityDependency().withActivity("giydgee") + .withDependencyConditions( + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED, + DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vttkha") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) + new ActivityDependency().withActivity("pimmwirixczxkxv") + .withDependencyConditions( + Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("adffdr") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) + new ActivityDependency().withActivity("pie") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()))) .withUserProperties( - Arrays.asList(new UserProperty().withName("aqfqgmwdo").withValue("datavinvzdnubs"), - new UserProperty().withName("skgiy").withValue("datailbiwacxldho"), - new UserProperty().withName("cdpwxh").withValue("datacvtbgznpx"), - new UserProperty().withName("xcshtlqhikmfzdlh").withValue("datazo"))) - .withAdditionalProperties(mapOf("type", "Activity")))); + Arrays.asList(new UserProperty().withName("qwjqevwtkrjqnciw").withValue("databjwngrrpdt"), + new UserProperty().withName("nhctkgllmpku").withValue("datablucxyhtkyq"), + new UserProperty().withName("ynvtimpgusroqk").withValue("datawkvojr"))) + .withAdditionalProperties(mapOf("type", "iyvh")))); model = BinaryData.fromObject(model).toObject(UntilActivity.class); - Assertions.assertEquals("dykxgcfhv", model.name()); - Assertions.assertEquals("ns", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("qfbycra", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("uguzesfgg", model.userProperties().get(0).name()); - Assertions.assertEquals("oggzppufu", model.expression().value()); - Assertions.assertEquals("hjqdwlxa", model.activities().get(0).name()); - Assertions.assertEquals("l", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("aipfqnxjko", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, + Assertions.assertEquals("h", model.name()); + Assertions.assertEquals("apksjwaglhwnnfg", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); + Assertions.assertEquals("kmwvqtmfq", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("suhtlzomsqebm", model.userProperties().get(0).name()); + Assertions.assertEquals("yaszuoheuifshs", model.expression().value()); + Assertions.assertEquals("qkwg", model.activities().get(0).name()); + Assertions.assertEquals("aeby", model.activities().get(0).description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); + Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); + Assertions.assertEquals("zfqzvkorsrpm", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("bqsj", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("urxwtfpeuftpjldl", model.activities().get(0).userProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTypePropertiesTests.java index 5e3fb692d90c6..b3e25815d6eca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTypePropertiesTests.java @@ -22,57 +22,129 @@ public final class UntilActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { UntilActivityTypeProperties model = BinaryData.fromString( - "{\"expression\":{\"value\":\"enmuevq\"},\"timeout\":\"datas\",\"activities\":[{\"type\":\"Activity\",\"name\":\"golbpwegzdion\",\"description\":\"nespkxnhfd\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"qxnd\",\"dependencyConditions\":[\"Completed\",\"Succeeded\"],\"\":{\"afvxxiizkehfk\":\"dataqbicjh\",\"krwihbyufmuinhq\":\"dataoczxmw\",\"ih\":\"dataqfowx\",\"qxk\":\"dataxzdgotoonsduir\"}},{\"activity\":\"aeuhxnpa\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Failed\"],\"\":{\"gobu\":\"dataolfyq\",\"rwvlzbdfmhzgti\":\"datavnaxtbnjmjw\",\"et\":\"dataybimitgx\"}},{\"activity\":\"wloo\",\"dependencyConditions\":[\"Failed\",\"Succeeded\"],\"\":{\"xytvjezi\":\"databzrrreftwhii\"}}],\"userProperties\":[{\"name\":\"lfnbzekwoajb\",\"value\":\"datawrvckyhncqy\"},{\"name\":\"g\",\"value\":\"datavpxsdtnxg\"},{\"name\":\"wxmqyhtlnnpftay\",\"value\":\"datao\"},{\"name\":\"gxamhmqexyoy\",\"value\":\"datacwzkcreuf\"}],\"\":{\"sxybalsmia\":\"datavelcrwhrp\",\"qzypvc\":\"datauvbo\",\"luhkhiycddon\":\"databyyvxjelsjhgrvy\",\"bkez\":\"dataikujjdoelaw\"}}]}") + "{\"expression\":{\"value\":\"hphurzaz\"},\"timeout\":\"datakgmtrnwwww\",\"activities\":[{\"type\":\"rditghbaqumqlfno\",\"name\":\"ize\",\"description\":\"yg\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"qryaahltt\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"eovsgikvsnfn\":\"datasjlkjvoeuiwyptze\"}},{\"activity\":\"fsfgabdumhpbcix\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Failed\"],\"\":{\"gbelxmulyalupijq\":\"datawyltau\",\"qitmcxqahxtnl\":\"datarfdyawetkr\",\"xqqbk\":\"datarjdceimludqx\"}},{\"activity\":\"dnskivoxrjmgms\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"eebcxeecgfs\":\"dataufhx\",\"yltnfwlt\":\"datadjipayybwxqr\",\"amgijevfjnvu\":\"dataukmdeqrp\",\"xtkvpejtdlqorcyp\":\"datakwjmtehpfn\"}}],\"userProperties\":[{\"name\":\"falg\",\"value\":\"datasgkruclzmjhiq\"},{\"name\":\"ihducvhhayq\",\"value\":\"datalcrshozuje\"}],\"\":{\"u\":\"datafvisc\",\"qiekwf\":\"datadxfcvtzrgcmxbr\",\"otdgvsoypgqoqv\":\"datam\"}},{\"type\":\"dbypzcqlau\",\"name\":\"bkapbgmjodf\",\"description\":\"hlipxkxhj\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"jmnsgv\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"piwpi\":\"dataplvglwx\",\"txyjq\":\"dataydxmplxzrofscib\",\"zxzkpumzda\":\"datari\",\"rrpzcvg\":\"databoqeteavphup\"}},{\"activity\":\"hknnvjgc\",\"dependencyConditions\":[\"Completed\",\"Completed\"],\"\":{\"nrbtattzxvfsr\":\"dataofhjonqk\",\"zjvzg\":\"datafjfuvryf\"}},{\"activity\":\"zen\",\"dependencyConditions\":[\"Completed\"],\"\":{\"fkbbchdypc\":\"datadngtylvdumpmx\",\"pteclfjauetzppc\":\"datakmrvgdjbl\",\"vlrmmizh\":\"datafgrtg\",\"plvrqkmpqs\":\"dataxsybnwogvkc\"}},{\"activity\":\"lvinxwtxtetwqk\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Succeeded\"],\"\":{\"rvkneo\":\"datawlr\",\"zvugqwxslisgfx\":\"dataplng\",\"llgrckoxkpjzyc\":\"datayfeqajtzquhqrj\",\"iopecuxgim\":\"dataizjwugr\"}}],\"userProperties\":[{\"name\":\"vylfkecsdfqxsb\",\"value\":\"datadjawuldyjmjvzp\"},{\"name\":\"dhbapfrriwrm\",\"value\":\"datamrhsybvnqaxm\"},{\"name\":\"pkatjyxhvjjvsv\",\"value\":\"datamdlysfroyn\"},{\"name\":\"loamgnpf\",\"value\":\"dataivgd\"}],\"\":{\"gjukqnxy\":\"dataknik\",\"ehrslvofnsuwsu\":\"dataoyclrlepashmfbzk\"}},{\"type\":\"ddohngbbxahs\",\"name\":\"orsandslrndi\",\"description\":\"o\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"a\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Failed\",\"Succeeded\"],\"\":{\"rnhpxzjk\":\"datazfssweznzfdextd\"}}],\"userProperties\":[{\"name\":\"zpcecisnhtdsk\",\"value\":\"datanigohafud\"},{\"name\":\"eowepueq\",\"value\":\"datarcnfhcqgjvlnvfz\"},{\"name\":\"mugxpuge\",\"value\":\"datawgjlxdddvfnqa\"},{\"name\":\"va\",\"value\":\"dataspjdxaytzkdqim\"}],\"\":{\"rssyyred\":\"dataijcullk\"}},{\"type\":\"nyl\",\"name\":\"rvrx\",\"description\":\"p\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ecdo\",\"dependencyConditions\":[\"Completed\",\"Skipped\"],\"\":{\"vicaqxs\":\"dataw\",\"drcnoexwar\":\"dataykusfqmgjexiqejv\"}},{\"activity\":\"azfsrvz\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Succeeded\"],\"\":{\"dycspidc\":\"datazwewhobxl\",\"kcuozwowwmulq\":\"dataxjfgxynuxvya\",\"lxtqlbj\":\"dataaeq\"}},{\"activity\":\"zcwfscxkr\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Completed\",\"Completed\"],\"\":{\"dvuvqgu\":\"datamkzgrrgrkovsgeb\",\"doamqkdwagnyah\":\"datal\",\"vfqykni\":\"datarxtpuyuradfiwjou\",\"dfwfzkocdjwjmrb\":\"datak\"}},{\"activity\":\"htllkpkcqzbvyrv\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"vxrjidxiosyp\":\"datat\",\"wwhbmowmhkn\":\"datavfrbujltgxhgylla\",\"npyob\":\"datakn\"}}],\"userProperties\":[{\"name\":\"yqsds\",\"value\":\"dataewf\"},{\"name\":\"rijd\",\"value\":\"datakmcrtmvtfeyopg\"},{\"name\":\"iwebmcizmggvsxv\",\"value\":\"datawrqywaagzaxqh\"},{\"name\":\"erkyimcfmdhwtlli\",\"value\":\"datacyxcluvj\"}],\"\":{\"hshcjgoobltoar\":\"datajpld\",\"fsmabuur\":\"datacntgqyqwmzzcgbg\",\"hf\":\"datauqwvybxmu\"}}]}") .toObject(UntilActivityTypeProperties.class); - Assertions.assertEquals("enmuevq", model.expression().value()); - Assertions.assertEquals("golbpwegzdion", model.activities().get(0).name()); - Assertions.assertEquals("nespkxnhfd", model.activities().get(0).description()); + Assertions.assertEquals("hphurzaz", model.expression().value()); + Assertions.assertEquals("ize", model.activities().get(0).name()); + Assertions.assertEquals("yg", model.activities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("qxnd", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, + Assertions.assertEquals("qryaahltt", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("lfnbzekwoajb", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("falg", model.activities().get(0).userProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { UntilActivityTypeProperties model - = new UntilActivityTypeProperties().withExpression(new Expression().withValue("enmuevq")) - .withTimeout("datas") - .withActivities(Arrays.asList(new Activity() - .withName("golbpwegzdion").withDescription("nespkxnhfd").withState( - ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("qxnd") - .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("aeuhxnpa") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("wloo") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("lfnbzekwoajb").withValue("datawrvckyhncqy"), - new UserProperty().withName("g").withValue("datavpxsdtnxg"), - new UserProperty().withName("wxmqyhtlnnpftay").withValue("datao"), - new UserProperty().withName("gxamhmqexyoy").withValue("datacwzkcreuf"))) - .withAdditionalProperties(mapOf("type", "Activity")))); + = new UntilActivityTypeProperties().withExpression(new Expression().withValue("hphurzaz")) + .withTimeout("datakgmtrnwwww") + .withActivities( + Arrays + .asList( + new Activity().withName("ize") + .withDescription("yg") + .withState(ActivityState.ACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("qryaahltt") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("fsfgabdumhpbcix") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.FAILED, DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("dnskivoxrjmgms") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("falg").withValue("datasgkruclzmjhiq"), + new UserProperty().withName("ihducvhhayq").withValue("datalcrshozuje"))) + .withAdditionalProperties(mapOf("type", "rditghbaqumqlfno")), + new Activity().withName("bkapbgmjodf") + .withDescription("hlipxkxhj") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn( + Arrays + .asList( + new ActivityDependency().withActivity("jmnsgv") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, + DependencyCondition.FAILED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("hknnvjgc") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, + DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("zen") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("lvinxwtxtetwqk") + .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, + DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList( + new UserProperty().withName("vylfkecsdfqxsb").withValue("datadjawuldyjmjvzp"), + new UserProperty().withName("dhbapfrriwrm").withValue("datamrhsybvnqaxm"), + new UserProperty() + .withName("pkatjyxhvjjvsv") + .withValue("datamdlysfroyn"), + new UserProperty().withName("loamgnpf").withValue("dataivgd"))) + .withAdditionalProperties(mapOf("type", "dbypzcqlau")), + new Activity().withName("orsandslrndi") + .withDescription("o") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) + .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("a") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, + DependencyCondition.COMPLETED, DependencyCondition.FAILED, + DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays + .asList(new UserProperty().withName("zpcecisnhtdsk").withValue("datanigohafud"), + new UserProperty().withName("eowepueq").withValue("datarcnfhcqgjvlnvfz"), + new UserProperty().withName("mugxpuge").withValue("datawgjlxdddvfnqa"), + new UserProperty().withName("va").withValue("dataspjdxaytzkdqim"))) + .withAdditionalProperties(mapOf("type", "ddohngbbxahs")), + new Activity().withName("rvrx") + .withDescription("p") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) + .withDependsOn( + Arrays.asList( + new ActivityDependency().withActivity("ecdo") + .withDependencyConditions(Arrays + .asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("azfsrvz") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, + DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("zcwfscxkr") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, + DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, + DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("htllkpkcqzbvyrv") + .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties( + Arrays.asList(new UserProperty().withName("yqsds").withValue("dataewf"), + new UserProperty().withName("rijd").withValue("datakmcrtmvtfeyopg"), + new UserProperty().withName("iwebmcizmggvsxv").withValue("datawrqywaagzaxqh"), + new UserProperty().withName("erkyimcfmdhwtlli").withValue("datacyxcluvj"))) + .withAdditionalProperties(mapOf("type", "nyl")))); model = BinaryData.fromObject(model).toObject(UntilActivityTypeProperties.class); - Assertions.assertEquals("enmuevq", model.expression().value()); - Assertions.assertEquals("golbpwegzdion", model.activities().get(0).name()); - Assertions.assertEquals("nespkxnhfd", model.activities().get(0).description()); + Assertions.assertEquals("hphurzaz", model.expression().value()); + Assertions.assertEquals("ize", model.activities().get(0).name()); + Assertions.assertEquals("yg", model.activities().get(0).description()); Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("qxnd", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, + Assertions.assertEquals("qryaahltt", model.activities().get(0).dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SKIPPED, model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("lfnbzekwoajb", model.activities().get(0).userProperties().get(0).name()); + Assertions.assertEquals("falg", model.activities().get(0).userProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeNodeRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeNodeRequestTests.java index 834e5594e041d..8bae85bc0528b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeNodeRequestTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeNodeRequestTests.java @@ -11,16 +11,16 @@ public final class UpdateIntegrationRuntimeNodeRequestTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - UpdateIntegrationRuntimeNodeRequest model = BinaryData.fromString("{\"concurrentJobsLimit\":712721872}") + UpdateIntegrationRuntimeNodeRequest model = BinaryData.fromString("{\"concurrentJobsLimit\":538368150}") .toObject(UpdateIntegrationRuntimeNodeRequest.class); - Assertions.assertEquals(712721872, model.concurrentJobsLimit()); + Assertions.assertEquals(538368150, model.concurrentJobsLimit()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { UpdateIntegrationRuntimeNodeRequest model - = new UpdateIntegrationRuntimeNodeRequest().withConcurrentJobsLimit(712721872); + = new UpdateIntegrationRuntimeNodeRequest().withConcurrentJobsLimit(538368150); model = BinaryData.fromObject(model).toObject(UpdateIntegrationRuntimeNodeRequest.class); - Assertions.assertEquals(712721872, model.concurrentJobsLimit()); + Assertions.assertEquals(538368150, model.concurrentJobsLimit()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeRequestTests.java index 5bd6e613e4b15..908afa9b2e098 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeRequestTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeRequestTests.java @@ -13,18 +13,19 @@ public final class UpdateIntegrationRuntimeRequestTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { UpdateIntegrationRuntimeRequest model - = BinaryData.fromString("{\"autoUpdate\":\"On\",\"updateDelayOffset\":\"cwscwsvlx\"}") + = BinaryData.fromString("{\"autoUpdate\":\"Off\",\"updateDelayOffset\":\"gmgsxnkjzkde\"}") .toObject(UpdateIntegrationRuntimeRequest.class); - Assertions.assertEquals(IntegrationRuntimeAutoUpdate.ON, model.autoUpdate()); - Assertions.assertEquals("cwscwsvlx", model.updateDelayOffset()); + Assertions.assertEquals(IntegrationRuntimeAutoUpdate.OFF, model.autoUpdate()); + Assertions.assertEquals("gmgsxnkjzkde", model.updateDelayOffset()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - UpdateIntegrationRuntimeRequest model = new UpdateIntegrationRuntimeRequest() - .withAutoUpdate(IntegrationRuntimeAutoUpdate.ON).withUpdateDelayOffset("cwscwsvlx"); + UpdateIntegrationRuntimeRequest model + = new UpdateIntegrationRuntimeRequest().withAutoUpdate(IntegrationRuntimeAutoUpdate.OFF) + .withUpdateDelayOffset("gmgsxnkjzkde"); model = BinaryData.fromObject(model).toObject(UpdateIntegrationRuntimeRequest.class); - Assertions.assertEquals(IntegrationRuntimeAutoUpdate.ON, model.autoUpdate()); - Assertions.assertEquals("cwscwsvlx", model.updateDelayOffset()); + Assertions.assertEquals(IntegrationRuntimeAutoUpdate.OFF, model.autoUpdate()); + Assertions.assertEquals("gmgsxnkjzkde", model.updateDelayOffset()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserAccessPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserAccessPolicyTests.java index 459ca477ff3d0..73e6eb8c1675f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserAccessPolicyTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserAccessPolicyTests.java @@ -12,25 +12,27 @@ public final class UserAccessPolicyTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { UserAccessPolicy model = BinaryData.fromString( - "{\"permissions\":\"eputtmrywnuzoqf\",\"accessResourcePath\":\"yqzrnkcqvyxlw\",\"profileName\":\"lsicohoqqnwv\",\"startTime\":\"yav\",\"expireTime\":\"heun\"}") + "{\"permissions\":\"fkbey\",\"accessResourcePath\":\"wrmjmwvvjektc\",\"profileName\":\"enhwlrs\",\"startTime\":\"rzpwvlqdqgbiq\",\"expireTime\":\"ihkaetcktvfc\"}") .toObject(UserAccessPolicy.class); - Assertions.assertEquals("eputtmrywnuzoqf", model.permissions()); - Assertions.assertEquals("yqzrnkcqvyxlw", model.accessResourcePath()); - Assertions.assertEquals("lsicohoqqnwv", model.profileName()); - Assertions.assertEquals("yav", model.startTime()); - Assertions.assertEquals("heun", model.expireTime()); + Assertions.assertEquals("fkbey", model.permissions()); + Assertions.assertEquals("wrmjmwvvjektc", model.accessResourcePath()); + Assertions.assertEquals("enhwlrs", model.profileName()); + Assertions.assertEquals("rzpwvlqdqgbiq", model.startTime()); + Assertions.assertEquals("ihkaetcktvfc", model.expireTime()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - UserAccessPolicy model - = new UserAccessPolicy().withPermissions("eputtmrywnuzoqf").withAccessResourcePath("yqzrnkcqvyxlw") - .withProfileName("lsicohoqqnwv").withStartTime("yav").withExpireTime("heun"); + UserAccessPolicy model = new UserAccessPolicy().withPermissions("fkbey") + .withAccessResourcePath("wrmjmwvvjektc") + .withProfileName("enhwlrs") + .withStartTime("rzpwvlqdqgbiq") + .withExpireTime("ihkaetcktvfc"); model = BinaryData.fromObject(model).toObject(UserAccessPolicy.class); - Assertions.assertEquals("eputtmrywnuzoqf", model.permissions()); - Assertions.assertEquals("yqzrnkcqvyxlw", model.accessResourcePath()); - Assertions.assertEquals("lsicohoqqnwv", model.profileName()); - Assertions.assertEquals("yav", model.startTime()); - Assertions.assertEquals("heun", model.expireTime()); + Assertions.assertEquals("fkbey", model.permissions()); + Assertions.assertEquals("wrmjmwvvjektc", model.accessResourcePath()); + Assertions.assertEquals("enhwlrs", model.profileName()); + Assertions.assertEquals("rzpwvlqdqgbiq", model.startTime()); + Assertions.assertEquals("ihkaetcktvfc", model.expireTime()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserPropertyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserPropertyTests.java index 8d78910b49039..316280decf0e1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserPropertyTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserPropertyTests.java @@ -11,15 +11,15 @@ public final class UserPropertyTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - UserProperty model = BinaryData.fromString("{\"name\":\"fmluiqtqzfavyvn\",\"value\":\"dataqybaryeua\"}") - .toObject(UserProperty.class); - Assertions.assertEquals("fmluiqtqzfavyvn", model.name()); + UserProperty model + = BinaryData.fromString("{\"name\":\"otcubi\",\"value\":\"datap\"}").toObject(UserProperty.class); + Assertions.assertEquals("otcubi", model.name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - UserProperty model = new UserProperty().withName("fmluiqtqzfavyvn").withValue("dataqybaryeua"); + UserProperty model = new UserProperty().withName("otcubi").withValue("datap"); model = BinaryData.fromObject(model).toObject(UserProperty.class); - Assertions.assertEquals("fmluiqtqzfavyvn", model.name()); + Assertions.assertEquals("otcubi", model.name()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTests.java index cf243deabef7c..4925cda9bb660 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTests.java @@ -21,53 +21,54 @@ public final class ValidationActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ValidationActivity model = BinaryData.fromString( - "{\"type\":\"Validation\",\"typeProperties\":{\"timeout\":\"dataotvoszgcyhwj\",\"sleep\":\"dataahqwvvferl\",\"minimumSize\":\"datafz\",\"childItems\":\"dataqsbjbshwxyfsk\",\"dataset\":{\"referenceName\":\"qejkmltwft\",\"parameters\":{\"jjbdg\":\"datamtkxziowutc\",\"vpwzv\":\"datarlohregqvusff\",\"orrnssthn\":\"datammirvmpiwo\",\"rxwabwdbclq\":\"datanzatdmncyl\"}}},\"name\":\"teoepdpxuzpqwfp\",\"description\":\"xomonqqqike\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"oydqgelccccccojn\",\"dependencyConditions\":[\"Skipped\",\"Skipped\"],\"\":{\"h\":\"datavmzpoi\",\"hvxtxuihydwkdvy\":\"datavipnbd\",\"pyquyqydtllpwz\":\"datapzq\"}},{\"activity\":\"yaudellb\",\"dependencyConditions\":[\"Failed\"],\"\":{\"b\":\"datarenedsn\",\"ptoktr\":\"datarusknpyf\"}},{\"activity\":\"wnqfdgcrf\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Completed\",\"Skipped\"],\"\":{\"pebwqzvi\":\"dataszvungkk\",\"pidb\":\"datatyeqeasiadscjha\",\"lcowb\":\"dataqvi\",\"w\":\"datapvmndqmzcgqedono\"}}],\"userProperties\":[{\"name\":\"vq\",\"value\":\"datae\"},{\"name\":\"iy\",\"value\":\"dataeaahnkntldddk\"},{\"name\":\"pvusigw\",\"value\":\"datanq\"}],\"\":{\"gigcozksoodq\":\"datarji\",\"ke\":\"dataouw\",\"yapxnqvudfi\":\"datalbmw\"}}") + "{\"type\":\"k\",\"typeProperties\":{\"timeout\":\"datarnfavqefiwwhbkxz\",\"sleep\":\"datayovlhm\",\"minimumSize\":\"dataobiagwuefmyiw\",\"childItems\":\"datatau\",\"dataset\":{\"referenceName\":\"msyfjno\",\"parameters\":{\"fyarl\":\"databcezd\",\"erql\":\"datallg\"}}},\"name\":\"kvkmfkmchc\",\"description\":\"gjvyosmxovyfdbah\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"s\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Completed\"],\"\":{\"zouayvejwqeyp\":\"datadvn\",\"hiyus\":\"dataoawnk\",\"mrnrhsv\":\"datahmjlkknwsja\",\"ivkzgcqy\":\"datajnlerm\"}},{\"activity\":\"vfekjvclbkkj\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Succeeded\",\"Completed\"],\"\":{\"vpmwnmuzcjnkaw\":\"dataftqah\"}},{\"activity\":\"yhnnwtknpbzktk\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Failed\",\"Succeeded\"],\"\":{\"rebbonjhmhfseykp\":\"datazutwpsnldjj\",\"gzadpwhldxbvry\":\"datagpqnesutnsm\"}}],\"userProperties\":[{\"name\":\"zs\",\"value\":\"datamugz\"},{\"name\":\"s\",\"value\":\"datazke\"}],\"\":{\"abrhiao\":\"dataetvcxabzwehvsmt\",\"ziquirozqusdz\":\"dataldtkqoajpxtkraf\"}}") .toObject(ValidationActivity.class); - Assertions.assertEquals("teoepdpxuzpqwfp", model.name()); - Assertions.assertEquals("xomonqqqike", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals("kvkmfkmchc", model.name()); + Assertions.assertEquals("gjvyosmxovyfdbah", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("oydqgelccccccojn", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("vq", model.userProperties().get(0).name()); - Assertions.assertEquals("qejkmltwft", model.dataset().referenceName()); + Assertions.assertEquals("s", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("zs", model.userProperties().get(0).name()); + Assertions.assertEquals("msyfjno", model.dataset().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ValidationActivity model = new ValidationActivity() - .withName("teoepdpxuzpqwfp").withDescription("xomonqqqike").withState( - ActivityState.INACTIVE) + ValidationActivity model = new ValidationActivity().withName("kvkmfkmchc") + .withDescription("gjvyosmxovyfdbah") + .withState(ActivityState.ACTIVE) .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn( - Arrays.asList( - new ActivityDependency().withActivity("oydqgelccccccojn") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency() - .withActivity("yaudellb").withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("wnqfdgcrf") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("vq").withValue("datae"), - new UserProperty().withName("iy").withValue("dataeaahnkntldddk"), - new UserProperty().withName("pvusigw").withValue("datanq"))) - .withTimeout("dataotvoszgcyhwj").withSleep("dataahqwvvferl").withMinimumSize("datafz") - .withChildItems("dataqsbjbshwxyfsk").withDataset( - new DatasetReference().withReferenceName("qejkmltwft").withParameters(mapOf("jjbdg", "datamtkxziowutc", - "vpwzv", "datarlohregqvusff", "orrnssthn", "datammirvmpiwo", "rxwabwdbclq", "datanzatdmncyl"))); + .withDependsOn(Arrays.asList( + new ActivityDependency().withActivity("s") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, + DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("vfekjvclbkkj") + .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, + DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("yhnnwtknpbzktk") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, + DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) + .withAdditionalProperties(mapOf()))) + .withUserProperties(Arrays.asList(new UserProperty().withName("zs").withValue("datamugz"), + new UserProperty().withName("s").withValue("datazke"))) + .withTimeout("datarnfavqefiwwhbkxz") + .withSleep("datayovlhm") + .withMinimumSize("dataobiagwuefmyiw") + .withChildItems("datatau") + .withDataset(new DatasetReference().withReferenceName("msyfjno") + .withParameters(mapOf("fyarl", "databcezd", "erql", "datallg"))); model = BinaryData.fromObject(model).toObject(ValidationActivity.class); - Assertions.assertEquals("teoepdpxuzpqwfp", model.name()); - Assertions.assertEquals("xomonqqqike", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); + Assertions.assertEquals("kvkmfkmchc", model.name()); + Assertions.assertEquals("gjvyosmxovyfdbah", model.description()); + Assertions.assertEquals(ActivityState.ACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("oydqgelccccccojn", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("vq", model.userProperties().get(0).name()); - Assertions.assertEquals("qejkmltwft", model.dataset().referenceName()); + Assertions.assertEquals("s", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("zs", model.userProperties().get(0).name()); + Assertions.assertEquals("msyfjno", model.dataset().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTypePropertiesTests.java index 47d0f53cd94d2..d102f22f0ecf8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTypePropertiesTests.java @@ -15,19 +15,22 @@ public final class ValidationActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ValidationActivityTypeProperties model = BinaryData.fromString( - "{\"timeout\":\"dataxlc\",\"sleep\":\"dataqhtgtadtootkgxx\",\"minimumSize\":\"dataenlqwxskltzzp\",\"childItems\":\"datawgtmpytomftubh\",\"dataset\":{\"referenceName\":\"bwgbvpyjpai\",\"parameters\":{\"llihwpsrdaoixgqt\":\"datadazxfz\",\"ix\":\"datasjnlekotqhd\"}}}") + "{\"timeout\":\"datahhjdfyusiupdmb\",\"sleep\":\"dataumpwgteroaenv\",\"minimumSize\":\"datauzjkjx\",\"childItems\":\"dataaqzrbvogfmpdlm\",\"dataset\":{\"referenceName\":\"ynl\",\"parameters\":{\"r\":\"datalbwyqoypo\",\"jerokbdkwvjond\":\"dataajxmgxsp\"}}}") .toObject(ValidationActivityTypeProperties.class); - Assertions.assertEquals("bwgbvpyjpai", model.dataset().referenceName()); + Assertions.assertEquals("ynl", model.dataset().referenceName()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ValidationActivityTypeProperties model = new ValidationActivityTypeProperties().withTimeout("dataxlc") - .withSleep("dataqhtgtadtootkgxx").withMinimumSize("dataenlqwxskltzzp").withChildItems("datawgtmpytomftubh") - .withDataset(new DatasetReference().withReferenceName("bwgbvpyjpai") - .withParameters(mapOf("llihwpsrdaoixgqt", "datadazxfz", "ix", "datasjnlekotqhd"))); + ValidationActivityTypeProperties model + = new ValidationActivityTypeProperties().withTimeout("datahhjdfyusiupdmb") + .withSleep("dataumpwgteroaenv") + .withMinimumSize("datauzjkjx") + .withChildItems("dataaqzrbvogfmpdlm") + .withDataset(new DatasetReference().withReferenceName("ynl") + .withParameters(mapOf("r", "datalbwyqoypo", "jerokbdkwvjond", "dataajxmgxsp"))); model = BinaryData.fromObject(model).toObject(ValidationActivityTypeProperties.class); - Assertions.assertEquals("bwgbvpyjpai", model.dataset().referenceName()); + Assertions.assertEquals("ynl", model.dataset().referenceName()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VariableSpecificationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VariableSpecificationTests.java index 42ee762f43dd4..c5f7248720550 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VariableSpecificationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VariableSpecificationTests.java @@ -12,17 +12,16 @@ public final class VariableSpecificationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - VariableSpecification model - = BinaryData.fromString("{\"type\":\"Array\",\"defaultValue\":\"dataqabqgzslesjcb\"}") - .toObject(VariableSpecification.class); - Assertions.assertEquals(VariableType.ARRAY, model.type()); + VariableSpecification model = BinaryData.fromString("{\"type\":\"String\",\"defaultValue\":\"dataw\"}") + .toObject(VariableSpecification.class); + Assertions.assertEquals(VariableType.STRING, model.type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { VariableSpecification model - = new VariableSpecification().withType(VariableType.ARRAY).withDefaultValue("dataqabqgzslesjcb"); + = new VariableSpecification().withType(VariableType.STRING).withDefaultValue("dataw"); model = BinaryData.fromObject(model).toObject(VariableSpecification.class); - Assertions.assertEquals(VariableType.ARRAY, model.type()); + Assertions.assertEquals(VariableType.STRING, model.type()); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaDatasetTypePropertiesTests.java index d11a1c866d662..f1e5900559fb8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaDatasetTypePropertiesTests.java @@ -10,16 +10,16 @@ public final class VerticaDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - VerticaDatasetTypeProperties model = BinaryData - .fromString( - "{\"tableName\":\"datawxhflgdun\",\"table\":\"dataypxsazbxsnx\",\"schema\":\"datasznfstmprvgra\"}") + VerticaDatasetTypeProperties model = BinaryData.fromString( + "{\"tableName\":\"datagwhzbbdwrje\",\"table\":\"dataofeiiewibdtplj\",\"schema\":\"datamajokbxxcdkhxjwt\"}") .toObject(VerticaDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - VerticaDatasetTypeProperties model = new VerticaDatasetTypeProperties().withTableName("datawxhflgdun") - .withTable("dataypxsazbxsnx").withSchema("datasznfstmprvgra"); + VerticaDatasetTypeProperties model = new VerticaDatasetTypeProperties().withTableName("datagwhzbbdwrje") + .withTable("dataofeiiewibdtplj") + .withSchema("datamajokbxxcdkhxjwt"); model = BinaryData.fromObject(model).toObject(VerticaDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaSourceTests.java index 57f547e8f2ac7..aa0191b96d428 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaSourceTests.java @@ -11,16 +11,19 @@ public final class VerticaSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { VerticaSource model = BinaryData.fromString( - "{\"type\":\"VerticaSource\",\"query\":\"dataddqxhegcolhqz\",\"queryTimeout\":\"datalqrunqwcrk\",\"additionalColumns\":\"databyxxyfnipy\",\"sourceRetryCount\":\"datajgfbsfsv\",\"sourceRetryWait\":\"datagejypok\",\"maxConcurrentConnections\":\"dataptnwpwskck\",\"disableMetricsCollection\":\"dataymf\",\"\":{\"pemxcdreqaqvsp\":\"datagvqioqrebwarljpl\",\"rbsc\":\"datayvearwt\"}}") + "{\"type\":\"yfaprhf\",\"query\":\"dataxxaoyiskyoasxakb\",\"queryTimeout\":\"datagccldxuweweeegs\",\"additionalColumns\":\"datas\",\"sourceRetryCount\":\"datahbcrib\",\"sourceRetryWait\":\"dataszu\",\"maxConcurrentConnections\":\"dataaqlyw\",\"disableMetricsCollection\":\"datak\",\"\":{\"qu\":\"datahbknragpnm\",\"oujhijldu\":\"datafxhfgdrzegmlueb\",\"arufjfordzwbsk\":\"datavxk\"}}") .toObject(VerticaSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - VerticaSource model = new VerticaSource().withSourceRetryCount("datajgfbsfsv") - .withSourceRetryWait("datagejypok").withMaxConcurrentConnections("dataptnwpwskck") - .withDisableMetricsCollection("dataymf").withQueryTimeout("datalqrunqwcrk") - .withAdditionalColumns("databyxxyfnipy").withQuery("dataddqxhegcolhqz"); + VerticaSource model = new VerticaSource().withSourceRetryCount("datahbcrib") + .withSourceRetryWait("dataszu") + .withMaxConcurrentConnections("dataaqlyw") + .withDisableMetricsCollection("datak") + .withQueryTimeout("datagccldxuweweeegs") + .withAdditionalColumns("datas") + .withQuery("dataxxaoyiskyoasxakb"); model = BinaryData.fromObject(model).toObject(VerticaSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaTableDatasetTests.java index ac82eaf9ad06a..5a532b30170bb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaTableDatasetTests.java @@ -19,34 +19,36 @@ public final class VerticaTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { VerticaTableDataset model = BinaryData.fromString( - "{\"type\":\"VerticaTable\",\"typeProperties\":{\"tableName\":\"dataxbofpr\",\"table\":\"dataiva\",\"schema\":\"datasbfzl\"},\"description\":\"jr\",\"structure\":\"datasfv\",\"schema\":\"datahqxtm\",\"linkedServiceName\":{\"referenceName\":\"lmfcleuovelvsp\",\"parameters\":{\"jtoudode\":\"datajtez\",\"sr\":\"datawmv\",\"emt\":\"dataciexu\"}},\"parameters\":{\"x\":{\"type\":\"Bool\",\"defaultValue\":\"dataymmcgskscb\"},\"wa\":{\"type\":\"SecureString\",\"defaultValue\":\"dataxicjojxolknsh\"},\"nchzz\":{\"type\":\"Int\",\"defaultValue\":\"databhmbglmnlbnat\"}},\"annotations\":[\"dataxortd\",\"datazvhbujk\",\"datahophqwo\"],\"folder\":{\"name\":\"ccqtwsrbf\"},\"\":{\"dzfbv\":\"dataii\",\"jtshlwvrsksdzmh\":\"dataxrvnhhmfsnqp\",\"pwfbwoetxiz\":\"datatsy\"}}") + "{\"type\":\"vmlkwkzlinv\",\"typeProperties\":{\"tableName\":\"datamjvegpdxtsa\",\"table\":\"datatcoojybolqox\",\"schema\":\"datatsl\"},\"description\":\"t\",\"structure\":\"dataxs\",\"schema\":\"dataekfxcs\",\"linkedServiceName\":{\"referenceName\":\"mzdo\",\"parameters\":{\"eohpl\":\"datakdpc\",\"edaxkuyorfjidqo\":\"datagcnbvmhvqp\",\"vfyihuzsbpwn\":\"dataaweog\",\"zgkooagrlwpame\":\"datafjcypazwiimd\"}},\"parameters\":{\"atfamrna\":{\"type\":\"Object\",\"defaultValue\":\"dataadewhuwxkyx\"},\"zwhomydxgtuqbv\":{\"type\":\"Int\",\"defaultValue\":\"datalxccprkiyf\"}},\"annotations\":[\"datazihirqvvketyd\",\"dataaqoktssgvqxer\",\"datarmh\",\"dataraqgb\"],\"folder\":{\"name\":\"vihylrxsiyzsyium\"},\"\":{\"ycfvernnkq\":\"datal\",\"oy\":\"datakayqivbigdrqgzet\",\"nmuxppwpcfmgr\":\"datat\"}}") .toObject(VerticaTableDataset.class); - Assertions.assertEquals("jr", model.description()); - Assertions.assertEquals("lmfcleuovelvsp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("x").type()); - Assertions.assertEquals("ccqtwsrbf", model.folder().name()); + Assertions.assertEquals("t", model.description()); + Assertions.assertEquals("mzdo", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("atfamrna").type()); + Assertions.assertEquals("vihylrxsiyzsyium", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - VerticaTableDataset model - = new VerticaTableDataset().withDescription("jr").withStructure("datasfv").withSchema("datahqxtm") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lmfcleuovelvsp") - .withParameters(mapOf("jtoudode", "datajtez", "sr", "datawmv", "emt", "dataciexu"))) - .withParameters(mapOf("x", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataymmcgskscb"), "wa", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("dataxicjojxolknsh"), - "nchzz", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("databhmbglmnlbnat"))) - .withAnnotations(Arrays.asList("dataxortd", "datazvhbujk", "datahophqwo")) - .withFolder(new DatasetFolder().withName("ccqtwsrbf")).withTableName("dataxbofpr").withTable("dataiva") - .withSchemaTypePropertiesSchema("datasbfzl"); + VerticaTableDataset model = new VerticaTableDataset().withDescription("t") + .withStructure("dataxs") + .withSchema("dataekfxcs") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("mzdo") + .withParameters(mapOf("eohpl", "datakdpc", "edaxkuyorfjidqo", "datagcnbvmhvqp", "vfyihuzsbpwn", + "dataaweog", "zgkooagrlwpame", "datafjcypazwiimd"))) + .withParameters(mapOf("atfamrna", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataadewhuwxkyx"), + "zwhomydxgtuqbv", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datalxccprkiyf"))) + .withAnnotations(Arrays.asList("datazihirqvvketyd", "dataaqoktssgvqxer", "datarmh", "dataraqgb")) + .withFolder(new DatasetFolder().withName("vihylrxsiyzsyium")) + .withTableName("datamjvegpdxtsa") + .withTable("datatcoojybolqox") + .withSchemaTypePropertiesSchema("datatsl"); model = BinaryData.fromObject(model).toObject(VerticaTableDataset.class); - Assertions.assertEquals("jr", model.description()); - Assertions.assertEquals("lmfcleuovelvsp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("x").type()); - Assertions.assertEquals("ccqtwsrbf", model.folder().name()); + Assertions.assertEquals("t", model.description()); + Assertions.assertEquals("mzdo", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("atfamrna").type()); + Assertions.assertEquals("vihylrxsiyzsyium", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTests.java index 038407f4bcba5..27d7ae7e8f528 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTests.java @@ -20,40 +20,42 @@ public final class WaitActivityTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { WaitActivity model = BinaryData.fromString( - "{\"type\":\"Wait\",\"typeProperties\":{\"waitTimeInSeconds\":\"datalcz\"},\"name\":\"lkmtrrcbu\",\"description\":\"auxkgklqucxew\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"mhkqzvarq\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Failed\"],\"\":{\"xhmdorxb\":\"dataimvnvxhxza\"}},{\"activity\":\"aprksoeq\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Skipped\",\"Failed\"],\"\":{\"aktgtwvzps\":\"datarcyexb\",\"gjidrag\":\"datagho\",\"yimbqdsu\":\"datacwcdbtop\",\"xwr\":\"dataazkouvvgcwsimhj\"}}],\"userProperties\":[{\"name\":\"ofwopzqxpk\",\"value\":\"datanxjmlys\"}],\"\":{\"lfahryuz\":\"databuxjhquzirhcghn\",\"ptpq\":\"dataeuegrdit\",\"xzfy\":\"dataajggmmiwoisql\"}}") + "{\"type\":\"ui\",\"typeProperties\":{\"waitTimeInSeconds\":\"datawos\"},\"name\":\"ihpydywwjsqdchb\",\"description\":\"iifuyvll\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"eorvchsarffyd\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\"],\"\":{\"hcftzobwpnfzlo\":\"datae\",\"vsbg\":\"datainw\",\"mwfeoutz\":\"dataywfyo\"}},{\"activity\":\"lnhgymtddk\",\"dependencyConditions\":[\"Completed\"],\"\":{\"wkrwpishcrxcep\":\"datanqijlocyiu\",\"nwtgyyyskujnzxho\":\"datavpii\"}}],\"userProperties\":[{\"name\":\"yjbenzw\",\"value\":\"datanhvs\"},{\"name\":\"pythqgz\",\"value\":\"dataplacz\"}],\"\":{\"mqyg\":\"datadh\",\"c\":\"dataefsnlob\",\"k\":\"datajeaxjgokvlixwebj\",\"j\":\"datafiizgkvtsmcn\"}}") .toObject(WaitActivity.class); - Assertions.assertEquals("lkmtrrcbu", model.name()); - Assertions.assertEquals("auxkgklqucxew", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals("ihpydywwjsqdchb", model.name()); + Assertions.assertEquals("iifuyvll", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("mhkqzvarq", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ofwopzqxpk", model.userProperties().get(0).name()); + Assertions.assertEquals("eorvchsarffyd", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("yjbenzw", model.userProperties().get(0).name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - WaitActivity model = new WaitActivity().withName("lkmtrrcbu").withDescription("auxkgklqucxew") - .withState(ActivityState.ACTIVE).withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) + WaitActivity model = new WaitActivity().withName("ihpydywwjsqdchb") + .withDescription("iifuyvll") + .withState(ActivityState.INACTIVE) + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("mhkqzvarq") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, - DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("aprksoeq") + new ActivityDependency().withActivity("eorvchsarffyd") .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, DependencyCondition.FAILED)) + DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) + .withAdditionalProperties(mapOf()), + new ActivityDependency().withActivity("lnhgymtddk") + .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ofwopzqxpk").withValue("datanxjmlys"))) - .withWaitTimeInSeconds("datalcz"); + .withUserProperties(Arrays.asList(new UserProperty().withName("yjbenzw").withValue("datanhvs"), + new UserProperty().withName("pythqgz").withValue("dataplacz"))) + .withWaitTimeInSeconds("datawos"); model = BinaryData.fromObject(model).toObject(WaitActivity.class); - Assertions.assertEquals("lkmtrrcbu", model.name()); - Assertions.assertEquals("auxkgklqucxew", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); + Assertions.assertEquals("ihpydywwjsqdchb", model.name()); + Assertions.assertEquals("iifuyvll", model.description()); + Assertions.assertEquals(ActivityState.INACTIVE, model.state()); Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("mhkqzvarq", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ofwopzqxpk", model.userProperties().get(0).name()); + Assertions.assertEquals("eorvchsarffyd", model.dependsOn().get(0).activity()); + Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); + Assertions.assertEquals("yjbenzw", model.userProperties().get(0).name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTypePropertiesTests.java index 22a3e4f28f94c..c58ee11a282bb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTypePropertiesTests.java @@ -10,13 +10,13 @@ public final class WaitActivityTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - WaitActivityTypeProperties model - = BinaryData.fromString("{\"waitTimeInSeconds\":\"datan\"}").toObject(WaitActivityTypeProperties.class); + WaitActivityTypeProperties model = BinaryData.fromString("{\"waitTimeInSeconds\":\"dataovhcelw\"}") + .toObject(WaitActivityTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - WaitActivityTypeProperties model = new WaitActivityTypeProperties().withWaitTimeInSeconds("datan"); + WaitActivityTypeProperties model = new WaitActivityTypeProperties().withWaitTimeInSeconds("dataovhcelw"); model = BinaryData.fromObject(model).toObject(WaitActivityTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSinkTests.java index 3c52e7a4cf335..5404a184b2548 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSinkTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSinkTests.java @@ -17,27 +17,31 @@ public final class WarehouseSinkTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { WarehouseSink model = BinaryData.fromString( - "{\"type\":\"WarehouseSink\",\"preCopyScript\":\"dataioqwmhcpujygnt\",\"allowCopyCommand\":\"datae\",\"copyCommandSettings\":{\"defaultValues\":[{\"columnName\":\"datath\",\"defaultValue\":\"datawy\"},{\"columnName\":\"dataxprsocfxlrzjjffl\",\"defaultValue\":\"datamuljfaulwlz\"},{\"columnName\":\"dataygnepj\",\"defaultValue\":\"dataxqdrphiyxjq\"}],\"additionalOptions\":{\"l\":\"pz\",\"wvio\":\"chpfzsfutaapb\"}},\"tableOption\":\"datayhsorcavkfhyoig\",\"writeBehavior\":\"dataedfterat\",\"writeBatchSize\":\"datakg\",\"writeBatchTimeout\":\"datarmujizd\",\"sinkRetryCount\":\"dataepfjdiwz\",\"sinkRetryWait\":\"datamumuc\",\"maxConcurrentConnections\":\"dataq\",\"disableMetricsCollection\":\"datascva\",\"\":{\"avgoullxpaylkras\":\"datagelnjgftqk\",\"skkziebmwyod\":\"datab\",\"qtxpf\":\"datamplgdxdtxbrdbw\",\"kdoukqsc\":\"datafrfvhbbnoevkkr\"}}") + "{\"type\":\"vofo\",\"preCopyScript\":\"dataxwtnrroh\",\"allowCopyCommand\":\"datagzczjwizrulrkw\",\"copyCommandSettings\":{\"defaultValues\":[{\"columnName\":\"dataggcpqmk\",\"defaultValue\":\"databenaahdjn\"},{\"columnName\":\"datachtvpeirhst\",\"defaultValue\":\"databv\"}],\"additionalOptions\":{\"nvsu\":\"hpphjimoecqpqk\",\"tyjgxu\":\"izxdlrjspx\",\"llcdqvunvnggqacf\":\"fejiurldsft\"}},\"tableOption\":\"datauwqbendzruus\",\"writeBehavior\":\"datasttjdioevifzq\",\"writeBatchSize\":\"datapphwv\",\"writeBatchTimeout\":\"datauzpiooacj\",\"sinkRetryCount\":\"dataofsiritpqqpynrln\",\"sinkRetryWait\":\"dataxevizzcjnfyubctw\",\"maxConcurrentConnections\":\"datan\",\"disableMetricsCollection\":\"datanumpna\",\"\":{\"adhnbo\":\"datakleieafpvbsllyor\",\"bvvwdftrq\":\"dataeucctppbgzfmuyls\",\"skck\":\"dataobusurxvjdxlbs\",\"mefbnc\":\"datafxtknywx\"}}") .toObject(WarehouseSink.class); - Assertions.assertEquals("pz", model.copyCommandSettings().additionalOptions().get("l")); + Assertions.assertEquals("hpphjimoecqpqk", model.copyCommandSettings().additionalOptions().get("nvsu")); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - WarehouseSink model = new WarehouseSink().withWriteBatchSize("datakg").withWriteBatchTimeout("datarmujizd") - .withSinkRetryCount("dataepfjdiwz").withSinkRetryWait("datamumuc").withMaxConcurrentConnections("dataq") - .withDisableMetricsCollection("datascva").withPreCopyScript("dataioqwmhcpujygnt") - .withAllowCopyCommand("datae") + WarehouseSink model = new WarehouseSink().withWriteBatchSize("datapphwv") + .withWriteBatchTimeout("datauzpiooacj") + .withSinkRetryCount("dataofsiritpqqpynrln") + .withSinkRetryWait("dataxevizzcjnfyubctw") + .withMaxConcurrentConnections("datan") + .withDisableMetricsCollection("datanumpna") + .withPreCopyScript("dataxwtnrroh") + .withAllowCopyCommand("datagzczjwizrulrkw") .withCopyCommandSettings(new DWCopyCommandSettings() .withDefaultValues(Arrays.asList( - new DWCopyCommandDefaultValue().withColumnName("datath").withDefaultValue("datawy"), - new DWCopyCommandDefaultValue().withColumnName("dataxprsocfxlrzjjffl") - .withDefaultValue("datamuljfaulwlz"), - new DWCopyCommandDefaultValue().withColumnName("dataygnepj").withDefaultValue("dataxqdrphiyxjq"))) - .withAdditionalOptions(mapOf("l", "pz", "wvio", "chpfzsfutaapb"))) - .withTableOption("datayhsorcavkfhyoig").withWriteBehavior("dataedfterat"); + new DWCopyCommandDefaultValue().withColumnName("dataggcpqmk").withDefaultValue("databenaahdjn"), + new DWCopyCommandDefaultValue().withColumnName("datachtvpeirhst").withDefaultValue("databv"))) + .withAdditionalOptions( + mapOf("nvsu", "hpphjimoecqpqk", "tyjgxu", "izxdlrjspx", "llcdqvunvnggqacf", "fejiurldsft"))) + .withTableOption("datauwqbendzruus") + .withWriteBehavior("datasttjdioevifzq"); model = BinaryData.fromObject(model).toObject(WarehouseSink.class); - Assertions.assertEquals("pz", model.copyCommandSettings().additionalOptions().get("l")); + Assertions.assertEquals("hpphjimoecqpqk", model.copyCommandSettings().additionalOptions().get("nvsu")); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSourceTests.java index b7f1f789703ce..07138e18af024 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSourceTests.java @@ -12,20 +12,26 @@ public final class WarehouseSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { WarehouseSource model = BinaryData.fromString( - "{\"type\":\"WarehouseSource\",\"sqlReaderQuery\":\"dataycvcksz\",\"sqlReaderStoredProcedureName\":\"datagguucpytsxnuj\",\"storedProcedureParameters\":\"datafth\",\"isolationLevel\":\"dataiieoxlbcccc\",\"partitionOption\":\"datauabde\",\"partitionSettings\":{\"partitionColumnName\":\"databgcdxqgsteek\",\"partitionUpperBound\":\"dataksvvyvoib\",\"partitionLowerBound\":\"datauqwljmzp\"},\"queryTimeout\":\"datakrwvvhcgtct\",\"additionalColumns\":\"datadcgobkcebzrt\",\"sourceRetryCount\":\"datatmtjsklkwtnqqiqc\",\"sourceRetryWait\":\"datafxldqtma\",\"maxConcurrentConnections\":\"dataejnemrfqjhc\",\"disableMetricsCollection\":\"datawlezbfgullq\",\"\":{\"gksrorxejf\":\"datajyxcmqc\",\"ray\":\"dataarphltlf\",\"wbkxdhavegy\":\"dataxzdujpuhbaog\",\"pdatvndvwwejvqpw\":\"dataqsmlbzi\"}}") + "{\"type\":\"dkj\",\"sqlReaderQuery\":\"datafkinidyjffpuuyk\",\"sqlReaderStoredProcedureName\":\"databpn\",\"storedProcedureParameters\":\"datamlctnnsjc\",\"isolationLevel\":\"dataijvaxu\",\"partitionOption\":\"datazzp\",\"partitionSettings\":{\"partitionColumnName\":\"dataaaxglxhbnqye\",\"partitionUpperBound\":\"datanlen\",\"partitionLowerBound\":\"dataykebtvnedcclpbh\"},\"queryTimeout\":\"dataiehooxqkcayy\",\"additionalColumns\":\"datanllkyiqjtxvxgrf\",\"sourceRetryCount\":\"datavhivvoczsry\",\"sourceRetryWait\":\"datavii\",\"maxConcurrentConnections\":\"datajqpsbbxke\",\"disableMetricsCollection\":\"datamqnuyu\",\"\":{\"ekhfdlbcucwfc\":\"datan\",\"tkv\":\"dataugtcccydldavozmi\"}}") .toObject(WarehouseSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - WarehouseSource model = new WarehouseSource().withSourceRetryCount("datatmtjsklkwtnqqiqc") - .withSourceRetryWait("datafxldqtma").withMaxConcurrentConnections("dataejnemrfqjhc") - .withDisableMetricsCollection("datawlezbfgullq").withQueryTimeout("datakrwvvhcgtct") - .withAdditionalColumns("datadcgobkcebzrt").withSqlReaderQuery("dataycvcksz") - .withSqlReaderStoredProcedureName("datagguucpytsxnuj").withStoredProcedureParameters("datafth") - .withIsolationLevel("dataiieoxlbcccc").withPartitionOption("datauabde") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("databgcdxqgsteek") - .withPartitionUpperBound("dataksvvyvoib").withPartitionLowerBound("datauqwljmzp")); + WarehouseSource model = new WarehouseSource().withSourceRetryCount("datavhivvoczsry") + .withSourceRetryWait("datavii") + .withMaxConcurrentConnections("datajqpsbbxke") + .withDisableMetricsCollection("datamqnuyu") + .withQueryTimeout("dataiehooxqkcayy") + .withAdditionalColumns("datanllkyiqjtxvxgrf") + .withSqlReaderQuery("datafkinidyjffpuuyk") + .withSqlReaderStoredProcedureName("databpn") + .withStoredProcedureParameters("datamlctnnsjc") + .withIsolationLevel("dataijvaxu") + .withPartitionOption("datazzp") + .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("dataaaxglxhbnqye") + .withPartitionUpperBound("datanlen") + .withPartitionLowerBound("dataykebtvnedcclpbh")); model = BinaryData.fromObject(model).toObject(WarehouseSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTests.java index 95ece9a72afe9..9becba9acfc02 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTests.java @@ -19,33 +19,34 @@ public final class WarehouseTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { WarehouseTableDataset model = BinaryData.fromString( - "{\"type\":\"WarehouseTable\",\"typeProperties\":{\"schema\":\"datauzntbpcadd\",\"table\":\"dataqrxip\"},\"description\":\"rplf\",\"structure\":\"datavmjjfz\",\"schema\":\"dataxlbiqqebarxknf\",\"linkedServiceName\":{\"referenceName\":\"bsymagbahdbtjmku\",\"parameters\":{\"lo\":\"datarklbizrxhuqfvpa\"}},\"parameters\":{\"atucujtju\":{\"type\":\"SecureString\",\"defaultValue\":\"dataxgqtquirgopg\"},\"chquoqhqrcsk\":{\"type\":\"Object\",\"defaultValue\":\"datajxu\"},\"agvyjcdpncvfyeqy\":{\"type\":\"Float\",\"defaultValue\":\"datafhlrvuv\"},\"ivnmev\":{\"type\":\"Object\",\"defaultValue\":\"dataijcsapqhipajs\"}},\"annotations\":[\"datacuwrfgpjfv\"],\"folder\":{\"name\":\"seodvlmdzgv\"},\"\":{\"crsm\":\"datazzugctygbbmumljv\",\"umnru\":\"dataojmxwc\",\"keqjftvltjop\":\"dataq\"}}") + "{\"type\":\"cht\",\"typeProperties\":{\"schema\":\"dataveidzwnkbjqpzmod\",\"table\":\"dataqux\"},\"description\":\"apewz\",\"structure\":\"datalbxmynslcvynavwt\",\"schema\":\"datasmczroddcaqimodn\",\"linkedServiceName\":{\"referenceName\":\"jmjxkhbucmz\",\"parameters\":{\"godjfyplavb\":\"datakfjacktavce\",\"bnzot\":\"datasecedsoqwexi\",\"jqdfadgywyla\":\"dataikf\"}},\"parameters\":{\"tdgj\":{\"type\":\"Object\",\"defaultValue\":\"datavohy\"}},\"annotations\":[\"datatkogfggylyzolrv\",\"datas\",\"dataseqjteoa\"],\"folder\":{\"name\":\"mg\"},\"\":{\"tarirdzdgvqofl\":\"datarjybpvsoba\",\"mzqsx\":\"dataukegougxpyp\"}}") .toObject(WarehouseTableDataset.class); - Assertions.assertEquals("rplf", model.description()); - Assertions.assertEquals("bsymagbahdbtjmku", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("atucujtju").type()); - Assertions.assertEquals("seodvlmdzgv", model.folder().name()); + Assertions.assertEquals("apewz", model.description()); + Assertions.assertEquals("jmjxkhbucmz", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("tdgj").type()); + Assertions.assertEquals("mg", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - WarehouseTableDataset model = new WarehouseTableDataset().withDescription("rplf").withStructure("datavmjjfz") - .withSchema("dataxlbiqqebarxknf") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bsymagbahdbtjmku") - .withParameters(mapOf("lo", "datarklbizrxhuqfvpa"))) - .withParameters(mapOf("atucujtju", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataxgqtquirgopg"), - "chquoqhqrcsk", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datajxu"), - "agvyjcdpncvfyeqy", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datafhlrvuv"), "ivnmev", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataijcsapqhipajs"))) - .withAnnotations(Arrays.asList("datacuwrfgpjfv")).withFolder(new DatasetFolder().withName("seodvlmdzgv")) - .withSchemaTypePropertiesSchema("datauzntbpcadd").withTable("dataqrxip"); + WarehouseTableDataset model + = new WarehouseTableDataset().withDescription("apewz") + .withStructure("datalbxmynslcvynavwt") + .withSchema("datasmczroddcaqimodn") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("jmjxkhbucmz") + .withParameters(mapOf("godjfyplavb", "datakfjacktavce", "bnzot", "datasecedsoqwexi", "jqdfadgywyla", + "dataikf"))) + .withParameters(mapOf("tdgj", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datavohy"))) + .withAnnotations(Arrays.asList("datatkogfggylyzolrv", "datas", "dataseqjteoa")) + .withFolder(new DatasetFolder().withName("mg")) + .withSchemaTypePropertiesSchema("dataveidzwnkbjqpzmod") + .withTable("dataqux"); model = BinaryData.fromObject(model).toObject(WarehouseTableDataset.class); - Assertions.assertEquals("rplf", model.description()); - Assertions.assertEquals("bsymagbahdbtjmku", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("atucujtju").type()); - Assertions.assertEquals("seodvlmdzgv", model.folder().name()); + Assertions.assertEquals("apewz", model.description()); + Assertions.assertEquals("jmjxkhbucmz", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("tdgj").type()); + Assertions.assertEquals("mg", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTypePropertiesTests.java index c63bd9ce90158..19517aa9ce75a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTypePropertiesTests.java @@ -11,14 +11,15 @@ public final class WarehouseTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { WarehouseTableDatasetTypeProperties model - = BinaryData.fromString("{\"schema\":\"datavpkbz\",\"table\":\"datanowpajfhxsmu\"}") + = BinaryData.fromString("{\"schema\":\"datamnxrxkulytivviyq\",\"table\":\"databxxyfozbgodywxj\"}") .toObject(WarehouseTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { WarehouseTableDatasetTypeProperties model - = new WarehouseTableDatasetTypeProperties().withSchema("datavpkbz").withTable("datanowpajfhxsmu"); + = new WarehouseTableDatasetTypeProperties().withSchema("datamnxrxkulytivviyq") + .withTable("databxxyfozbgodywxj"); model = BinaryData.fromObject(model).toObject(WarehouseTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebAnonymousAuthenticationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebAnonymousAuthenticationTests.java index 03e20f15516d7..1b060f29b2537 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebAnonymousAuthenticationTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebAnonymousAuthenticationTests.java @@ -11,13 +11,13 @@ public final class WebAnonymousAuthenticationTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { WebAnonymousAuthentication model - = BinaryData.fromString("{\"authenticationType\":\"Anonymous\",\"url\":\"datahjkxg\"}") + = BinaryData.fromString("{\"authenticationType\":\"Anonymous\",\"url\":\"datazuzxoeouf\"}") .toObject(WebAnonymousAuthentication.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - WebAnonymousAuthentication model = new WebAnonymousAuthentication().withUrl("datahjkxg"); + WebAnonymousAuthentication model = new WebAnonymousAuthentication().withUrl("datazuzxoeouf"); model = BinaryData.fromObject(model).toObject(WebAnonymousAuthentication.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTests.java index 10cbe8860a9cf..8e95774d44bb3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTests.java @@ -8,8 +8,8 @@ import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; import com.azure.resourcemanager.datafactory.models.ParameterSpecification; import com.azure.resourcemanager.datafactory.models.ParameterType; +import com.azure.resourcemanager.datafactory.models.WebAnonymousAuthentication; import com.azure.resourcemanager.datafactory.models.WebLinkedService; -import com.azure.resourcemanager.datafactory.models.WebLinkedServiceTypeProperties; import java.util.Arrays; import java.util.HashMap; import java.util.Map; @@ -19,36 +19,32 @@ public final class WebLinkedServiceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { WebLinkedService model = BinaryData.fromString( - "{\"type\":\"Web\",\"typeProperties\":{\"authenticationType\":\"WebLinkedServiceTypeProperties\",\"url\":\"dataalkqlvbkz\"},\"connectVia\":{\"referenceName\":\"yrnww\",\"parameters\":{\"crigbjbelnqalbs\":\"datainiyoizuww\",\"bwxuypcuriwll\":\"dataqxajsiueaibqjbi\",\"lwjxrymiysgh\":\"datatclveqdqtzsh\",\"vdulmka\":\"datacmqdlkkwbdek\"}},\"description\":\"n\",\"parameters\":{\"fvoz\":{\"type\":\"Float\",\"defaultValue\":\"datatpialrqhwcxxccf\"},\"nosclujywwumbus\":{\"type\":\"SecureString\",\"defaultValue\":\"datavrexitpzri\"},\"uahokiclrmmudv\":{\"type\":\"Array\",\"defaultValue\":\"databncljkhmso\"},\"tlizdoys\":{\"type\":\"Array\",\"defaultValue\":\"databscidkwznw\"}},\"annotations\":[\"databogdj\",\"datauybcp\",\"datadvuotkvkbpmk\",\"datapbnkcwauylkbdsk\"],\"\":{\"zylemp\":\"datafrtlukaf\",\"npw\":\"datahyuxxeike\"}}") + "{\"type\":\"kd\",\"typeProperties\":{\"authenticationType\":\"Anonymous\",\"url\":\"datamffcttkprxypxt\"},\"connectVia\":{\"referenceName\":\"bafiqxomevketa\",\"parameters\":{\"toai\":\"datancxtnbut\",\"hzqgbaqvqeyla\":\"dataign\",\"yfnbxw\":\"datakobkrg\",\"szhxothfyifjufzl\":\"datalh\"}},\"description\":\"jsmtghm\",\"parameters\":{\"zprycixjgyamo\":{\"type\":\"SecureString\",\"defaultValue\":\"datacqsxyrywd\"},\"wqvckhmoudmca\":{\"type\":\"String\",\"defaultValue\":\"datawwykfytjkztdc\"}},\"annotations\":[\"datajfuvmjtxwazyvibn\",\"dataebffkgfiyd\",\"datarjmwaa\"],\"\":{\"jnaotavwmrfqt\":\"datakhptyhziqeo\",\"cyrwvoohmcw\":\"datagdfecju\"}}") .toObject(WebLinkedService.class); - Assertions.assertEquals("yrnww", model.connectVia().referenceName()); - Assertions.assertEquals("n", model.description()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("fvoz").type()); + Assertions.assertEquals("bafiqxomevketa", model.connectVia().referenceName()); + Assertions.assertEquals("jsmtghm", model.description()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("zprycixjgyamo").type()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - WebLinkedService model - = new WebLinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("yrnww") - .withParameters(mapOf("crigbjbelnqalbs", "datainiyoizuww", "bwxuypcuriwll", "dataqxajsiueaibqjbi", - "lwjxrymiysgh", "datatclveqdqtzsh", "vdulmka", "datacmqdlkkwbdek"))) - .withDescription("n") - .withParameters(mapOf("fvoz", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datatpialrqhwcxxccf"), - "nosclujywwumbus", + WebLinkedService model = new WebLinkedService() + .withConnectVia(new IntegrationRuntimeReference().withReferenceName("bafiqxomevketa") + .withParameters(mapOf("toai", "datancxtnbut", "hzqgbaqvqeyla", "dataign", "yfnbxw", "datakobkrg", + "szhxothfyifjufzl", "datalh"))) + .withDescription("jsmtghm") + .withParameters( + mapOf("zprycixjgyamo", new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datavrexitpzri"), - "uahokiclrmmudv", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("databncljkhmso"), - "tlizdoys", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("databscidkwznw"))) - .withAnnotations(Arrays.asList("databogdj", "datauybcp", "datadvuotkvkbpmk", "datapbnkcwauylkbdsk")) - .withTypeProperties(new WebLinkedServiceTypeProperties().withUrl("dataalkqlvbkz")); + .withDefaultValue("datacqsxyrywd"), + "wqvckhmoudmca", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datawwykfytjkztdc"))) + .withAnnotations(Arrays.asList("datajfuvmjtxwazyvibn", "dataebffkgfiyd", "datarjmwaa")) + .withTypeProperties(new WebAnonymousAuthentication().withUrl("datamffcttkprxypxt")); model = BinaryData.fromObject(model).toObject(WebLinkedService.class); - Assertions.assertEquals("yrnww", model.connectVia().referenceName()); - Assertions.assertEquals("n", model.description()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("fvoz").type()); + Assertions.assertEquals("bafiqxomevketa", model.connectVia().referenceName()); + Assertions.assertEquals("jsmtghm", model.description()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("zprycixjgyamo").type()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTypePropertiesTests.java index fe47b057a2fe5..22ee24e2a01a4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTypePropertiesTests.java @@ -5,19 +5,20 @@ package com.azure.resourcemanager.datafactory.generated; import com.azure.core.util.BinaryData; +import com.azure.resourcemanager.datafactory.models.WebBasicAuthentication; import com.azure.resourcemanager.datafactory.models.WebLinkedServiceTypeProperties; public final class WebLinkedServiceTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { - WebLinkedServiceTypeProperties model = BinaryData - .fromString("{\"authenticationType\":\"WebLinkedServiceTypeProperties\",\"url\":\"datanhqafuvvys\"}") - .toObject(WebLinkedServiceTypeProperties.class); + WebLinkedServiceTypeProperties model + = BinaryData.fromString("{\"authenticationType\":\"Basic\",\"url\":\"dataczypslfqgfwonb\"}") + .toObject(WebLinkedServiceTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - WebLinkedServiceTypeProperties model = new WebLinkedServiceTypeProperties().withUrl("datanhqafuvvys"); + WebLinkedServiceTypeProperties model = new WebBasicAuthentication().withUrl("dataczypslfqgfwonb"); model = BinaryData.fromObject(model).toObject(WebLinkedServiceTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebSourceTests.java index 761da82b38e94..134f31b43ad54 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebSourceTests.java @@ -11,15 +11,17 @@ public final class WebSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { WebSource model = BinaryData.fromString( - "{\"type\":\"WebSource\",\"additionalColumns\":\"datahboplavgfbvro\",\"sourceRetryCount\":\"datauexqweyslwlppoh\",\"sourceRetryWait\":\"datafgalexy\",\"maxConcurrentConnections\":\"datagkadtwd\",\"disableMetricsCollection\":\"databjx\",\"\":{\"jkwltnsnhuvmok\":\"dataxcjdobsgvz\",\"dnlodkqrqnkptixa\":\"datahsclpnb\",\"zmhoplqtzgt\":\"dataoyzgaevrygggcc\"}}") + "{\"type\":\"ugjalmzpfyl\",\"additionalColumns\":\"datadbeanigozjrcx\",\"sourceRetryCount\":\"datavwwvznp\",\"sourceRetryWait\":\"datacizropzgjleecffb\",\"maxConcurrentConnections\":\"datakvb\",\"disableMetricsCollection\":\"datastqwnpegoupdq\",\"\":{\"xo\":\"datavdfaqcql\"}}") .toObject(WebSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - WebSource model = new WebSource().withSourceRetryCount("datauexqweyslwlppoh").withSourceRetryWait("datafgalexy") - .withMaxConcurrentConnections("datagkadtwd").withDisableMetricsCollection("databjx") - .withAdditionalColumns("datahboplavgfbvro"); + WebSource model = new WebSource().withSourceRetryCount("datavwwvznp") + .withSourceRetryWait("datacizropzgjleecffb") + .withMaxConcurrentConnections("datakvb") + .withDisableMetricsCollection("datastqwnpegoupdq") + .withAdditionalColumns("datadbeanigozjrcx"); model = BinaryData.fromObject(model).toObject(WebSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTests.java index 59c9a280cc296..a73daf50cc9d1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTests.java @@ -19,32 +19,37 @@ public final class WebTableDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { WebTableDataset model = BinaryData.fromString( - "{\"type\":\"WebTable\",\"typeProperties\":{\"index\":\"datavapeakfdmc\",\"path\":\"datal\"},\"description\":\"lxkyoddoq\",\"structure\":\"datanqtrkicwhqyr\",\"schema\":\"datamndkrwwmurhvif\",\"linkedServiceName\":{\"referenceName\":\"eqfsrnackitl\",\"parameters\":{\"juniln\":\"databylpzjeldaq\",\"tnpkbvzpk\":\"datajhwcbrdsyp\",\"ldxuczlhvbqyczn\":\"datadngvnqdjgsbtwg\"}},\"parameters\":{\"g\":{\"type\":\"String\",\"defaultValue\":\"dataiuvcqoqkqwucqs\"},\"myvwp\":{\"type\":\"Object\",\"defaultValue\":\"datawtvmijccpkkjl\"}},\"annotations\":[\"dataaih\",\"datannlb\",\"dataxjppcbqetfzfppv\",\"datalzayjwdun\"],\"folder\":{\"name\":\"prklatwiuujxsuj\"},\"\":{\"cymgbfmdquyyaes\":\"datagxeegxbnjnczepd\",\"kihai\":\"datajxnavpyxqbkxdtb\",\"ozcgoeozlib\":\"datazkefkzlxv\"}}") + "{\"type\":\"vsimalbmti\",\"typeProperties\":{\"index\":\"dataensjfl\",\"path\":\"dataftvvqtmvif\"},\"description\":\"gosnxajptcdfmzxa\",\"structure\":\"datalhm\",\"schema\":\"datacnnlsbnucqxhp\",\"linkedServiceName\":{\"referenceName\":\"q\",\"parameters\":{\"oxovlz\":\"datavejoy\",\"rykkxakruuptiicg\":\"datakleldkqdlqqhnt\"}},\"parameters\":{\"ek\":{\"type\":\"Int\",\"defaultValue\":\"dataxccnpxiemacmzt\"},\"rtb\":{\"type\":\"Object\",\"defaultValue\":\"datanbrysgktfmocnqbb\"},\"ohocqxugjxugdcr\":{\"type\":\"String\",\"defaultValue\":\"dataghfuifwxudy\"},\"bdmddg\":{\"type\":\"Float\",\"defaultValue\":\"dataplvhmhurosdjl\"}},\"annotations\":[\"datauyaorservpv\",\"datasorsbegcl\",\"dataex\"],\"folder\":{\"name\":\"qzyhzydyvtuqvi\"},\"\":{\"jx\":\"datansskydigt\",\"cwuzs\":\"dataocsvjekejchxzj\"}}") .toObject(WebTableDataset.class); - Assertions.assertEquals("lxkyoddoq", model.description()); - Assertions.assertEquals("eqfsrnackitl", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("g").type()); - Assertions.assertEquals("prklatwiuujxsuj", model.folder().name()); + Assertions.assertEquals("gosnxajptcdfmzxa", model.description()); + Assertions.assertEquals("q", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("ek").type()); + Assertions.assertEquals("qzyhzydyvtuqvi", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - WebTableDataset model = new WebTableDataset().withDescription("lxkyoddoq").withStructure("datanqtrkicwhqyr") - .withSchema("datamndkrwwmurhvif") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("eqfsrnackitl") - .withParameters(mapOf("juniln", "databylpzjeldaq", "tnpkbvzpk", "datajhwcbrdsyp", "ldxuczlhvbqyczn", - "datadngvnqdjgsbtwg"))) - .withParameters(mapOf("g", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataiuvcqoqkqwucqs"), - "myvwp", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datawtvmijccpkkjl"))) - .withAnnotations(Arrays.asList("dataaih", "datannlb", "dataxjppcbqetfzfppv", "datalzayjwdun")) - .withFolder(new DatasetFolder().withName("prklatwiuujxsuj")).withIndex("datavapeakfdmc").withPath("datal"); + WebTableDataset model = new WebTableDataset().withDescription("gosnxajptcdfmzxa") + .withStructure("datalhm") + .withSchema("datacnnlsbnucqxhp") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("q") + .withParameters(mapOf("oxovlz", "datavejoy", "rykkxakruuptiicg", "datakleldkqdlqqhnt"))) + .withParameters(mapOf("ek", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataxccnpxiemacmzt"), "rtb", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datanbrysgktfmocnqbb"), + "ohocqxugjxugdcr", + new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataghfuifwxudy"), + "bdmddg", + new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataplvhmhurosdjl"))) + .withAnnotations(Arrays.asList("datauyaorservpv", "datasorsbegcl", "dataex")) + .withFolder(new DatasetFolder().withName("qzyhzydyvtuqvi")) + .withIndex("dataensjfl") + .withPath("dataftvvqtmvif"); model = BinaryData.fromObject(model).toObject(WebTableDataset.class); - Assertions.assertEquals("lxkyoddoq", model.description()); - Assertions.assertEquals("eqfsrnackitl", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("g").type()); - Assertions.assertEquals("prklatwiuujxsuj", model.folder().name()); + Assertions.assertEquals("gosnxajptcdfmzxa", model.description()); + Assertions.assertEquals("q", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("ek").type()); + Assertions.assertEquals("qzyhzydyvtuqvi", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTypePropertiesTests.java index 31fd776282132..cea5b1a4a598e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTypePropertiesTests.java @@ -11,14 +11,14 @@ public final class WebTableDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { WebTableDatasetTypeProperties model - = BinaryData.fromString("{\"index\":\"databnunzuysajvvqlho\",\"path\":\"dataon\"}") + = BinaryData.fromString("{\"index\":\"datayfyixecmasjnfgng\",\"path\":\"dataojeeyvfxbfckmo\"}") .toObject(WebTableDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { WebTableDatasetTypeProperties model - = new WebTableDatasetTypeProperties().withIndex("databnunzuysajvvqlho").withPath("dataon"); + = new WebTableDatasetTypeProperties().withIndex("datayfyixecmasjnfgng").withPath("dataojeeyvfxbfckmo"); model = BinaryData.fromObject(model).toObject(WebTableDatasetTypeProperties.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WranglingDataFlowTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WranglingDataFlowTests.java index 13573235880dd..a75e6b976221e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WranglingDataFlowTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WranglingDataFlowTests.java @@ -21,55 +21,59 @@ public final class WranglingDataFlowTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { WranglingDataFlow model = BinaryData.fromString( - "{\"type\":\"WranglingDataFlow\",\"typeProperties\":{\"sources\":[{\"script\":\"dawsxmrsz\",\"schemaLinkedService\":{\"referenceName\":\"nimx\",\"parameters\":{\"mnb\":\"dataerxrzutylcurza\",\"bjmbnvynfaooeac\":\"dataqaeht\"}},\"name\":\"edcgl\",\"description\":\"akd\",\"dataset\":{\"referenceName\":\"dahzllrqm\",\"parameters\":{\"oiduyqypff\":\"databyx\",\"yhbrjjta\":\"datanoiicsu\",\"sxxhdodp\":\"dataxrdsjrholuqwg\"}},\"linkedService\":{\"referenceName\":\"yblvtbdmvsbyi\",\"parameters\":{\"jfb\":\"datalqpvekmk\",\"gdusxurs\":\"datatlo\",\"iqrizfwihvaan\":\"dataivuxcjkcoqwczs\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"nhjrfdmfd\",\"datasetParameters\":\"datab\",\"parameters\":{\"d\":\"dataxjfwt\",\"fedyuep\":\"datakkauigvmuafmc\",\"eocfkumcfjxok\":\"datavpltidajjvy\",\"svfnkwm\":\"dataelsy\"},\"\":{\"ugjqyckgtxkrdt\":\"datajekrknfd\",\"jdkl\":\"datalcr\",\"svobchkxfp\":\"datatcsubmzoo\",\"nkkw\":\"datahdyslbklglm\"}}}],\"script\":\"qshwyqxridt\",\"documentLocale\":\"saqjmkgx\"},\"description\":\"queu\",\"annotations\":[\"dataztpziizevjykof\",\"dataezefkhkqtwqlepjj\",\"datakca\",\"datafwzcntogffjwaj\"],\"folder\":{\"name\":\"wzvaqkifmxaw\"}}") + "{\"type\":\"lzy\",\"typeProperties\":{\"sources\":[{\"script\":\"gygnhrkombc\",\"schemaLinkedService\":{\"referenceName\":\"ajdopggor\",\"parameters\":{\"fyrlmwkptskwxj\":\"dataqtrotpvclp\",\"bputmgvmuyakml\":\"datavhxccbmkakmkoo\",\"flzqjimejtgzjxx\":\"dataktfowzkroyrdur\"}},\"name\":\"fejlzuqloiw\",\"description\":\"yyzivrmitcd\",\"dataset\":{\"referenceName\":\"hchwhrktjleif\",\"parameters\":{\"fnsmycjowly\":\"dataiplh\",\"zbuw\":\"datayzmudsqcmhnx\"}},\"linkedService\":{\"referenceName\":\"machbkv\",\"parameters\":{\"plehmumk\":\"databjrmvgo\",\"dpr\":\"datadllc\",\"goxsstci\":\"datanhkgq\",\"rrynjcwmhly\":\"datarak\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ukxrke\",\"datasetParameters\":\"datapequlrlzaudgjtf\",\"parameters\":{\"dwnhczbutoucgjti\":\"dataakkuc\",\"qqwwvgwks\":\"datajwayhi\",\"abv\":\"datavlizedvb\",\"wkhojqttbspvkhg\":\"datasrgekzyqxadyfhb\"},\"\":{\"sf\":\"datajsgyzstujrzxrk\",\"qwrldaxur\":\"datarlduyehiiittugy\"}}}],\"script\":\"azcsozjvx\",\"documentLocale\":\"ciggbnvtxof\"},\"description\":\"hoeamoeobdo\",\"annotations\":[\"datafpnimtwuuhaueg\",\"datakwmnfeub\"],\"folder\":{\"name\":\"yrkwfugiph\"}}") .toObject(WranglingDataFlow.class); - Assertions.assertEquals("queu", model.description()); - Assertions.assertEquals("wzvaqkifmxaw", model.folder().name()); - Assertions.assertEquals("edcgl", model.sources().get(0).name()); - Assertions.assertEquals("akd", model.sources().get(0).description()); - Assertions.assertEquals("dahzllrqm", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("yblvtbdmvsbyi", model.sources().get(0).linkedService().referenceName()); + Assertions.assertEquals("hoeamoeobdo", model.description()); + Assertions.assertEquals("yrkwfugiph", model.folder().name()); + Assertions.assertEquals("fejlzuqloiw", model.sources().get(0).name()); + Assertions.assertEquals("yyzivrmitcd", model.sources().get(0).description()); + Assertions.assertEquals("hchwhrktjleif", model.sources().get(0).dataset().referenceName()); + Assertions.assertEquals("machbkv", model.sources().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("nhjrfdmfd", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("nimx", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("dawsxmrsz", model.sources().get(0).script()); - Assertions.assertEquals("qshwyqxridt", model.script()); - Assertions.assertEquals("saqjmkgx", model.documentLocale()); + Assertions.assertEquals("ukxrke", model.sources().get(0).flowlet().referenceName()); + Assertions.assertEquals("ajdopggor", model.sources().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("gygnhrkombc", model.sources().get(0).script()); + Assertions.assertEquals("azcsozjvx", model.script()); + Assertions.assertEquals("ciggbnvtxof", model.documentLocale()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - WranglingDataFlow model = new WranglingDataFlow().withDescription("queu") - .withAnnotations( - Arrays.asList("dataztpziizevjykof", "dataezefkhkqtwqlepjj", "datakca", "datafwzcntogffjwaj")) - .withFolder(new DataFlowFolder().withName("wzvaqkifmxaw")) - .withSources(Arrays.asList(new PowerQuerySource().withName("edcgl").withDescription("akd") - .withDataset(new DatasetReference().withReferenceName("dahzllrqm").withParameters( - mapOf("oiduyqypff", "databyx", "yhbrjjta", "datanoiicsu", "sxxhdodp", "dataxrdsjrholuqwg"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("yblvtbdmvsbyi").withParameters( - mapOf("jfb", "datalqpvekmk", "gdusxurs", "datatlo", "iqrizfwihvaan", "dataivuxcjkcoqwczs"))) + WranglingDataFlow model = new WranglingDataFlow().withDescription("hoeamoeobdo") + .withAnnotations(Arrays.asList("datafpnimtwuuhaueg", "datakwmnfeub")) + .withFolder(new DataFlowFolder().withName("yrkwfugiph")) + .withSources(Arrays.asList(new PowerQuerySource().withName("fejlzuqloiw") + .withDescription("yyzivrmitcd") + .withDataset(new DatasetReference().withReferenceName("hchwhrktjleif") + .withParameters(mapOf("fnsmycjowly", "dataiplh", "zbuw", "datayzmudsqcmhnx"))) + .withLinkedService(new LinkedServiceReference().withReferenceName("machbkv") + .withParameters(mapOf("plehmumk", "databjrmvgo", "dpr", "datadllc", "goxsstci", "datanhkgq", + "rrynjcwmhly", "datarak"))) .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("nhjrfdmfd").withDatasetParameters("datab") - .withParameters(mapOf("d", "dataxjfwt", "fedyuep", "datakkauigvmuafmc", "eocfkumcfjxok", - "datavpltidajjvy", "svfnkwm", "dataelsy")) + .withReferenceName("ukxrke") + .withDatasetParameters("datapequlrlzaudgjtf") + .withParameters(mapOf("dwnhczbutoucgjti", "dataakkuc", "qqwwvgwks", "datajwayhi", "abv", + "datavlizedvb", "wkhojqttbspvkhg", "datasrgekzyqxadyfhb")) .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("nimx") - .withParameters(mapOf("mnb", "dataerxrzutylcurza", "bjmbnvynfaooeac", "dataqaeht"))) - .withScript("dawsxmrsz"))) - .withScript("qshwyqxridt").withDocumentLocale("saqjmkgx"); + .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ajdopggor") + .withParameters(mapOf("fyrlmwkptskwxj", "dataqtrotpvclp", "bputmgvmuyakml", "datavhxccbmkakmkoo", + "flzqjimejtgzjxx", "dataktfowzkroyrdur"))) + .withScript("gygnhrkombc"))) + .withScript("azcsozjvx") + .withDocumentLocale("ciggbnvtxof"); model = BinaryData.fromObject(model).toObject(WranglingDataFlow.class); - Assertions.assertEquals("queu", model.description()); - Assertions.assertEquals("wzvaqkifmxaw", model.folder().name()); - Assertions.assertEquals("edcgl", model.sources().get(0).name()); - Assertions.assertEquals("akd", model.sources().get(0).description()); - Assertions.assertEquals("dahzllrqm", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("yblvtbdmvsbyi", model.sources().get(0).linkedService().referenceName()); + Assertions.assertEquals("hoeamoeobdo", model.description()); + Assertions.assertEquals("yrkwfugiph", model.folder().name()); + Assertions.assertEquals("fejlzuqloiw", model.sources().get(0).name()); + Assertions.assertEquals("yyzivrmitcd", model.sources().get(0).description()); + Assertions.assertEquals("hchwhrktjleif", model.sources().get(0).dataset().referenceName()); + Assertions.assertEquals("machbkv", model.sources().get(0).linkedService().referenceName()); Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("nhjrfdmfd", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("nimx", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("dawsxmrsz", model.sources().get(0).script()); - Assertions.assertEquals("qshwyqxridt", model.script()); - Assertions.assertEquals("saqjmkgx", model.documentLocale()); + Assertions.assertEquals("ukxrke", model.sources().get(0).flowlet().referenceName()); + Assertions.assertEquals("ajdopggor", model.sources().get(0).schemaLinkedService().referenceName()); + Assertions.assertEquals("gygnhrkombc", model.sources().get(0).script()); + Assertions.assertEquals("azcsozjvx", model.script()); + Assertions.assertEquals("ciggbnvtxof", model.documentLocale()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroObjectDatasetTests.java index 07d0894c826b4..55c55db5cd792 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroObjectDatasetTests.java @@ -19,32 +19,33 @@ public final class XeroObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { XeroObjectDataset model = BinaryData.fromString( - "{\"type\":\"XeroObject\",\"typeProperties\":{\"tableName\":\"datadnpfcghdttowqx\"},\"description\":\"pbzxpzl\",\"structure\":\"datavhatiywtcvzuzp\",\"schema\":\"dataeomotq\",\"linkedServiceName\":{\"referenceName\":\"ql\",\"parameters\":{\"gq\":\"datai\",\"dpfvlsqmmetwtla\":\"datazk\"}},\"parameters\":{\"cgrllyyfsmoc\":{\"type\":\"String\",\"defaultValue\":\"dataefbdpnuvh\"},\"kgdskwvb\":{\"type\":\"SecureString\",\"defaultValue\":\"datarchmetvzhuugd\"}},\"annotations\":[\"datawwayqts\",\"datanyotgnmze\",\"datacreluedcmk\"],\"folder\":{\"name\":\"heexzhhllxwk\"},\"\":{\"tkqiymmddslwnlg\":\"dataxdjklfsd\",\"ybnnnlpqdnnska\":\"datadlhmks\"}}") + "{\"type\":\"rcifflxqqn\",\"typeProperties\":{\"tableName\":\"datacowken\"},\"description\":\"tcuyuwgnyjdiuj\",\"structure\":\"datacwmlfz\",\"schema\":\"dataibfmcoxbktu\",\"linkedServiceName\":{\"referenceName\":\"jkuf\",\"parameters\":{\"mnobbai\":\"datadgnmei\",\"zaisfofg\":\"datacfbfy\",\"ifmyzb\":\"datamrkm\"}},\"parameters\":{\"lptbdponhblqi\":{\"type\":\"SecureString\",\"defaultValue\":\"datahcmt\"},\"bpcwtwtrchkcmru\":{\"type\":\"Array\",\"defaultValue\":\"datauqfpzjz\"}},\"annotations\":[\"dataesqsqmie\",\"dataxipwqchfpt\",\"datakkvjjl\"],\"folder\":{\"name\":\"cu\"},\"\":{\"xpxxizchmbuzg\":\"dataqokbgumuej\",\"fsu\":\"datanrkjkng\",\"zlsbufn\":\"dataaybh\",\"zmzuicsggsxznbp\":\"databvcntpoeeytrsl\"}}") .toObject(XeroObjectDataset.class); - Assertions.assertEquals("pbzxpzl", model.description()); - Assertions.assertEquals("ql", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("cgrllyyfsmoc").type()); - Assertions.assertEquals("heexzhhllxwk", model.folder().name()); + Assertions.assertEquals("tcuyuwgnyjdiuj", model.description()); + Assertions.assertEquals("jkuf", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("lptbdponhblqi").type()); + Assertions.assertEquals("cu", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - XeroObjectDataset model = new XeroObjectDataset().withDescription("pbzxpzl").withStructure("datavhatiywtcvzuzp") - .withSchema("dataeomotq") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ql") - .withParameters(mapOf("gq", "datai", "dpfvlsqmmetwtla", "datazk"))) - .withParameters(mapOf("cgrllyyfsmoc", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataefbdpnuvh"), - "kgdskwvb", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datarchmetvzhuugd"))) - .withAnnotations(Arrays.asList("datawwayqts", "datanyotgnmze", "datacreluedcmk")) - .withFolder(new DatasetFolder().withName("heexzhhllxwk")).withTableName("datadnpfcghdttowqx"); + XeroObjectDataset model = new XeroObjectDataset().withDescription("tcuyuwgnyjdiuj") + .withStructure("datacwmlfz") + .withSchema("dataibfmcoxbktu") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("jkuf") + .withParameters(mapOf("mnobbai", "datadgnmei", "zaisfofg", "datacfbfy", "ifmyzb", "datamrkm"))) + .withParameters(mapOf("lptbdponhblqi", + new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datahcmt"), + "bpcwtwtrchkcmru", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datauqfpzjz"))) + .withAnnotations(Arrays.asList("dataesqsqmie", "dataxipwqchfpt", "datakkvjjl")) + .withFolder(new DatasetFolder().withName("cu")) + .withTableName("datacowken"); model = BinaryData.fromObject(model).toObject(XeroObjectDataset.class); - Assertions.assertEquals("pbzxpzl", model.description()); - Assertions.assertEquals("ql", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("cgrllyyfsmoc").type()); - Assertions.assertEquals("heexzhhllxwk", model.folder().name()); + Assertions.assertEquals("tcuyuwgnyjdiuj", model.description()); + Assertions.assertEquals("jkuf", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("lptbdponhblqi").type()); + Assertions.assertEquals("cu", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroSourceTests.java index f220f1b448e2b..781d76e1e992b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroSourceTests.java @@ -11,15 +11,19 @@ public final class XeroSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { XeroSource model = BinaryData.fromString( - "{\"type\":\"XeroSource\",\"query\":\"databetzydtgpvnczf\",\"queryTimeout\":\"dataybjku\",\"additionalColumns\":\"dataajkyrhucbfkaqlp\",\"sourceRetryCount\":\"dataptero\",\"sourceRetryWait\":\"dataqaktao\",\"maxConcurrentConnections\":\"datagefobcqvzmyw\",\"disableMetricsCollection\":\"datayns\",\"\":{\"kklzabauvncln\":\"dataosqvojgol\",\"ikireetvjfizafd\":\"dataaoidjhoykgtyvrn\",\"csipfwlye\":\"datajhnuvndgrolgxa\",\"rzfppopwxxdgzhn\":\"dataajdpjmqteirrjjm\"}}") + "{\"type\":\"jlzkymcgtbpbfbgf\",\"query\":\"databhydcq\",\"queryTimeout\":\"dataq\",\"additionalColumns\":\"dataozmbapjvbzablmp\",\"sourceRetryCount\":\"dataqwlvsefvkxxd\",\"sourceRetryWait\":\"databnqmhrw\",\"maxConcurrentConnections\":\"dataexwgrflqb\",\"disableMetricsCollection\":\"dataxudsmdg\",\"\":{\"tjd\":\"datak\",\"ficzw\":\"datadasomxwsflylols\"}}") .toObject(XeroSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - XeroSource model = new XeroSource().withSourceRetryCount("dataptero").withSourceRetryWait("dataqaktao") - .withMaxConcurrentConnections("datagefobcqvzmyw").withDisableMetricsCollection("datayns") - .withQueryTimeout("dataybjku").withAdditionalColumns("dataajkyrhucbfkaqlp").withQuery("databetzydtgpvnczf"); + XeroSource model = new XeroSource().withSourceRetryCount("dataqwlvsefvkxxd") + .withSourceRetryWait("databnqmhrw") + .withMaxConcurrentConnections("dataexwgrflqb") + .withDisableMetricsCollection("dataxudsmdg") + .withQueryTimeout("dataq") + .withAdditionalColumns("dataozmbapjvbzablmp") + .withQuery("databhydcq"); model = BinaryData.fromObject(model).toObject(XeroSource.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTests.java index 3c63a4068df1e..dba260f24de4b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTests.java @@ -21,35 +21,38 @@ public final class XmlDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { XmlDataset model = BinaryData.fromString( - "{\"type\":\"Xml\",\"typeProperties\":{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datahkun\",\"fileName\":\"datafjh\",\"\":{\"nwqsttewu\":\"datatpkvegeattbzkgtz\"}},\"encodingName\":\"dataysjeufjx\",\"nullValue\":\"datapditfnonpi\",\"compression\":{\"type\":\"dataxlvrhprrvbwonleq\",\"level\":\"datavtlrvbstph\",\"\":{\"u\":\"datauninttlnrjdszd\",\"vgp\":\"dataiciqppo\",\"uhwfwj\":\"datae\",\"tfnressfepgck\":\"dataoxuo\"}}},\"description\":\"jmgvsnvbtqdxf\",\"structure\":\"dataym\",\"schema\":\"datahjluqllbsupubdx\",\"linkedServiceName\":{\"referenceName\":\"kdlhzwhc\",\"parameters\":{\"zhrjqfyaytvsly\":\"databosjjfd\",\"uarlcjiwgsxfaioc\":\"datakcgn\"}},\"parameters\":{\"bpaefzqsymjw\":{\"type\":\"String\",\"defaultValue\":\"datajgnfgrzxbar\"},\"crtfodqhuauzm\":{\"type\":\"Array\",\"defaultValue\":\"datacytesmf\"}},\"annotations\":[\"datartrfzh\",\"dataezvhj\"],\"folder\":{\"name\":\"dyyrudma\"},\"\":{\"ssgfenffdxbvwfqj\":\"datatvdkxb\",\"xndmuvar\":\"datahivdrija\"}}") + "{\"type\":\"tpfcudvafnbfbqv\",\"typeProperties\":{\"location\":{\"type\":\"i\",\"folderPath\":\"datazlrz\",\"fileName\":\"dataasd\",\"\":{\"tgkiqlarh\":\"datamjqmv\",\"ffzjwztsmpchggry\":\"datatwvcazekdzd\"}},\"encodingName\":\"datagfyatig\",\"nullValue\":\"datafrrkdknczgorywnv\",\"compression\":{\"type\":\"datatv\",\"level\":\"dataevdlh\",\"\":{\"bpyhssrlvkpkp\":\"dataka\",\"ccebxxopyicyvspe\":\"dataocm\",\"hgajkrdy\":\"datalhwyykgvrccpumd\"}}},\"description\":\"qnxhgk\",\"structure\":\"datadwzejp\",\"schema\":\"datawz\",\"linkedServiceName\":{\"referenceName\":\"cmbpwdlu\",\"parameters\":{\"ffbvtzldzchub\":\"dataprldidwm\"}},\"parameters\":{\"hfrbzakpjt\":{\"type\":\"Object\",\"defaultValue\":\"datazuvigvl\"},\"qxynqj\":{\"type\":\"Array\",\"defaultValue\":\"dataaqpojpsucmximc\"}},\"annotations\":[\"datatkyvscbgn\",\"datac\",\"datausxhircpg\",\"datavsvkkjbjolpyo\"],\"folder\":{\"name\":\"vuznadvhm\"},\"\":{\"owxxbh\":\"dataoi\",\"ksikawanvmwdv\":\"datapsyioqemqwtqszzg\",\"mpnbnfgyweoj\":\"datajqcrbk\",\"yawkch\":\"dataepgcmahiwf\"}}") .toObject(XmlDataset.class); - Assertions.assertEquals("jmgvsnvbtqdxf", model.description()); - Assertions.assertEquals("kdlhzwhc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("bpaefzqsymjw").type()); - Assertions.assertEquals("dyyrudma", model.folder().name()); + Assertions.assertEquals("qnxhgk", model.description()); + Assertions.assertEquals("cmbpwdlu", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("hfrbzakpjt").type()); + Assertions.assertEquals("vuznadvhm", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - XmlDataset model = new XmlDataset().withDescription("jmgvsnvbtqdxf").withStructure("dataym") - .withSchema("datahjluqllbsupubdx") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("kdlhzwhc") - .withParameters(mapOf("zhrjqfyaytvsly", "databosjjfd", "uarlcjiwgsxfaioc", "datakcgn"))) - .withParameters(mapOf("bpaefzqsymjw", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datajgnfgrzxbar"), - "crtfodqhuauzm", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datacytesmf"))) - .withAnnotations(Arrays.asList("datartrfzh", "dataezvhj")) - .withFolder(new DatasetFolder().withName("dyyrudma")) - .withLocation(new DatasetLocation().withFolderPath("datahkun").withFileName("datafjh") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withEncodingName("dataysjeufjx").withNullValue("datapditfnonpi").withCompression(new DatasetCompression() - .withType("dataxlvrhprrvbwonleq").withLevel("datavtlrvbstph").withAdditionalProperties(mapOf())); + XmlDataset model = new XmlDataset().withDescription("qnxhgk") + .withStructure("datadwzejp") + .withSchema("datawz") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cmbpwdlu") + .withParameters(mapOf("ffbvtzldzchub", "dataprldidwm"))) + .withParameters(mapOf("hfrbzakpjt", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datazuvigvl"), "qxynqj", + new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataaqpojpsucmximc"))) + .withAnnotations(Arrays.asList("datatkyvscbgn", "datac", "datausxhircpg", "datavsvkkjbjolpyo")) + .withFolder(new DatasetFolder().withName("vuznadvhm")) + .withLocation(new DatasetLocation().withFolderPath("datazlrz") + .withFileName("dataasd") + .withAdditionalProperties(mapOf("type", "i"))) + .withEncodingName("datagfyatig") + .withNullValue("datafrrkdknczgorywnv") + .withCompression( + new DatasetCompression().withType("datatv").withLevel("dataevdlh").withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(XmlDataset.class); - Assertions.assertEquals("jmgvsnvbtqdxf", model.description()); - Assertions.assertEquals("kdlhzwhc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("bpaefzqsymjw").type()); - Assertions.assertEquals("dyyrudma", model.folder().name()); + Assertions.assertEquals("qnxhgk", model.description()); + Assertions.assertEquals("cmbpwdlu", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("hfrbzakpjt").type()); + Assertions.assertEquals("vuznadvhm", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTypePropertiesTests.java index b52b2c45700c4..6f0d55b6a052f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTypePropertiesTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTypePropertiesTests.java @@ -15,17 +15,21 @@ public final class XmlDatasetTypePropertiesTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { XmlDatasetTypeProperties model = BinaryData.fromString( - "{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datamzjotprrmuh\",\"fileName\":\"datahtuzlxiwyopgar\",\"\":{\"qyvzesipiysnjq\":\"datatwrapczdo\",\"dcndazabun\":\"dataowa\"}},\"encodingName\":\"datase\",\"nullValue\":\"dataaupwhlzyckrem\",\"compression\":{\"type\":\"datalmsvdo\",\"level\":\"datairxxhyrkqako\",\"\":{\"inwteyrqsh\":\"datafreprfvm\",\"lblfs\":\"dataxbcejop\"}}}") + "{\"location\":{\"type\":\"pitskshfyftti\",\"folderPath\":\"dataep\",\"fileName\":\"datahju\",\"\":{\"hblivwehsudym\":\"datagbggcjx\",\"m\":\"datambhdo\",\"xexatmdmnrs\":\"datangkqlgxzduvxd\"}},\"encodingName\":\"dataxo\",\"nullValue\":\"dataxyddmiplois\",\"compression\":{\"type\":\"datazsoxznntwgk\",\"level\":\"dataoh\",\"\":{\"bdjzghximkg\":\"datapzupzwwy\",\"ot\":\"datamxpqkjnpyriwn\"}}}") .toObject(XmlDatasetTypeProperties.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { XmlDatasetTypeProperties model = new XmlDatasetTypeProperties() - .withLocation(new DatasetLocation().withFolderPath("datamzjotprrmuh").withFileName("datahtuzlxiwyopgar") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withEncodingName("datase").withNullValue("dataaupwhlzyckrem").withCompression(new DatasetCompression() - .withType("datalmsvdo").withLevel("datairxxhyrkqako").withAdditionalProperties(mapOf())); + .withLocation(new DatasetLocation().withFolderPath("dataep") + .withFileName("datahju") + .withAdditionalProperties(mapOf("type", "pitskshfyftti"))) + .withEncodingName("dataxo") + .withNullValue("dataxyddmiplois") + .withCompression(new DatasetCompression().withType("datazsoxznntwgk") + .withLevel("dataoh") + .withAdditionalProperties(mapOf())); model = BinaryData.fromObject(model).toObject(XmlDatasetTypeProperties.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlReadSettingsTests.java index 3d962e7b777c6..3ca3373f1ce12 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlReadSettingsTests.java @@ -14,7 +14,7 @@ public final class XmlReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { XmlReadSettings model = BinaryData.fromString( - "{\"type\":\"XmlReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"cbuzudkqoeoukvi\":\"dataubziibuabpvdwhvn\",\"fhdyasklmy\":\"datakgbkzqbo\",\"awljatvfddq\":\"datahclxwede\",\"xcqcwbxxvcvek\":\"datapduttqjtszq\"}},\"validationMode\":\"datadrum\",\"detectDataType\":\"datakdwwq\",\"namespaces\":\"datalfvmwuyar\",\"namespacePrefixes\":\"datasvtzotmwxq\",\"\":{\"wrtmjskb\":\"datadanfexlawkeqjhz\",\"mvounbyvsfqu\":\"dataenjnady\",\"xqbknoxjhedwh\":\"datar\",\"rpajbiig\":\"datamwb\"}}") + "{\"type\":\"rxenp\",\"compressionProperties\":{\"type\":\"eqotvocjktihnwyv\",\"\":{\"dmtfnbv\":\"datag\",\"cd\":\"dataxqqlbmiqbdiahj\",\"yn\":\"datapdlxwsfddyqp\",\"lj\":\"dataowmjsur\"}},\"validationMode\":\"datasjhh\",\"detectDataType\":\"datawizqvgadolep\",\"namespaces\":\"datalzjhaqxfams\",\"namespacePrefixes\":\"datacesvvrabbyfhzy\",\"\":{\"bwxhlqioqh\":\"datanlbrcydwrcjta\"}}") .toObject(XmlReadSettings.class); } @@ -22,9 +22,11 @@ public void testDeserialize() throws Exception { public void testSerialize() throws Exception { XmlReadSettings model = new XmlReadSettings() .withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings"))) - .withValidationMode("datadrum").withDetectDataType("datakdwwq").withNamespaces("datalfvmwuyar") - .withNamespacePrefixes("datasvtzotmwxq"); + new CompressionReadSettings().withAdditionalProperties(mapOf("type", "eqotvocjktihnwyv"))) + .withValidationMode("datasjhh") + .withDetectDataType("datawizqvgadolep") + .withNamespaces("datalzjhaqxfams") + .withNamespacePrefixes("datacesvvrabbyfhzy"); model = BinaryData.fromObject(model).toObject(XmlReadSettings.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlSourceTests.java index 86fe70d75b106..d861b294dda72 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlSourceTests.java @@ -16,23 +16,27 @@ public final class XmlSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { XmlSource model = BinaryData.fromString( - "{\"type\":\"XmlSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datanlihbku\",\"disableMetricsCollection\":\"dataeywyftvy\",\"\":{\"scyzvv\":\"dataqzjfvbnyyjvz\",\"ssgbscq\":\"dataxmy\",\"qiparctshe\":\"dataeixazebmmjaigax\",\"fawhoosrsol\":\"datagtdvhokx\"}},\"formatSettings\":{\"type\":\"XmlReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"nudifierxx\":\"dataoejbgiqhjpe\"}},\"validationMode\":\"datasdvuirqfks\",\"detectDataType\":\"datalfgmdoaihlvrsqc\",\"namespaces\":\"datamirybwga\",\"namespacePrefixes\":\"datavwkynemazgtbynx\",\"\":{\"zvuzxx\":\"dataawexgeqo\",\"hbobuovsvwnpcx\":\"dataojj\"}},\"additionalColumns\":\"datamtvparyubny\",\"sourceRetryCount\":\"datalpz\",\"sourceRetryWait\":\"datavotuc\",\"maxConcurrentConnections\":\"databp\",\"disableMetricsCollection\":\"dataumqzftzoe\",\"\":{\"nuqsgertxicemgs\":\"datanvnooklgr\",\"edbsl\":\"datacbbdokph\",\"k\":\"datanunpxswmcc\"}}") + "{\"type\":\"bijoehhqwwsgqzi\",\"storeSettings\":{\"type\":\"awectzjjgvcbtq\",\"maxConcurrentConnections\":\"datapnkyvujhejy\",\"disableMetricsCollection\":\"datavlguysbrn\",\"\":{\"jshtcfnb\":\"dataqhmuqyzxkormr\"}},\"formatSettings\":{\"type\":\"zuaxtbr\",\"compressionProperties\":{\"type\":\"hxpdqoehub\",\"\":{\"glynbqpeo\":\"datazxnuxamxikhrxi\",\"wtihtnywgtsodnx\":\"dataecbog\"}},\"validationMode\":\"datarjtwjimcfrhtz\",\"detectDataType\":\"datauvoaxqo\",\"namespaces\":\"datalp\",\"namespacePrefixes\":\"datapbzyqbggxcyra\",\"\":{\"wxslzqlgxxbnrur\":\"dataurxlp\",\"upckhfbmdemohlsh\":\"datanwbjjy\",\"mwaej\":\"dataaaoofltbsay\",\"gabsfjrjzdq\":\"datazkqcmddcftnxyrtq\"}},\"additionalColumns\":\"datagorvgdibepg\",\"sourceRetryCount\":\"dataoetwjssyazm\",\"sourceRetryWait\":\"dataux\",\"maxConcurrentConnections\":\"dataokckxfk\",\"disableMetricsCollection\":\"dataqkby\",\"\":{\"gxlyvebvxjguwts\":\"dataeawucmqfurbtb\",\"fzv\":\"dataijwiznb\",\"ljjawsaskullvt\":\"dataxtykjrdxlximvr\"}}") .toObject(XmlSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - XmlSource model = new XmlSource().withSourceRetryCount("datalpz").withSourceRetryWait("datavotuc") - .withMaxConcurrentConnections("databp").withDisableMetricsCollection("dataumqzftzoe") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datanlihbku") - .withDisableMetricsCollection("dataeywyftvy") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) + XmlSource model = new XmlSource().withSourceRetryCount("dataoetwjssyazm") + .withSourceRetryWait("dataux") + .withMaxConcurrentConnections("dataokckxfk") + .withDisableMetricsCollection("dataqkby") + .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datapnkyvujhejy") + .withDisableMetricsCollection("datavlguysbrn") + .withAdditionalProperties(mapOf("type", "awectzjjgvcbtq"))) .withFormatSettings(new XmlReadSettings() .withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings"))) - .withValidationMode("datasdvuirqfks").withDetectDataType("datalfgmdoaihlvrsqc") - .withNamespaces("datamirybwga").withNamespacePrefixes("datavwkynemazgtbynx")) - .withAdditionalColumns("datamtvparyubny"); + new CompressionReadSettings().withAdditionalProperties(mapOf("type", "hxpdqoehub"))) + .withValidationMode("datarjtwjimcfrhtz") + .withDetectDataType("datauvoaxqo") + .withNamespaces("datalp") + .withNamespacePrefixes("datapbzyqbggxcyra")) + .withAdditionalColumns("datagorvgdibepg"); model = BinaryData.fromObject(model).toObject(XmlSource.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZipDeflateReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZipDeflateReadSettingsTests.java index 61a6b69235d0d..cee283072f69e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZipDeflateReadSettingsTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZipDeflateReadSettingsTests.java @@ -11,13 +11,13 @@ public final class ZipDeflateReadSettingsTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ZipDeflateReadSettings model = BinaryData.fromString( - "{\"type\":\"ZipDeflateReadSettings\",\"preserveZipFileNameAsFolder\":\"databq\",\"\":{\"wiavmqutgxd\":\"dataxzyj\",\"rskyl\":\"dataznfokcb\"}}") + "{\"type\":\"puilf\",\"preserveZipFileNameAsFolder\":\"datafnd\",\"\":{\"fxoffckejxomngu\":\"databcgdp\",\"mgn\":\"datawxxynttrnksvx\",\"hmwfoummdomv\":\"dataiycxuyzrnngnm\"}}") .toObject(ZipDeflateReadSettings.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ZipDeflateReadSettings model = new ZipDeflateReadSettings().withPreserveZipFileNameAsFolder("databq"); + ZipDeflateReadSettings model = new ZipDeflateReadSettings().withPreserveZipFileNameAsFolder("datafnd"); model = BinaryData.fromObject(model).toObject(ZipDeflateReadSettings.class); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoObjectDatasetTests.java index fa1d671b8e5f8..146d554f0e768 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoObjectDatasetTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoObjectDatasetTests.java @@ -19,32 +19,36 @@ public final class ZohoObjectDatasetTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ZohoObjectDataset model = BinaryData.fromString( - "{\"type\":\"ZohoObject\",\"typeProperties\":{\"tableName\":\"datamrslwknrd\"},\"description\":\"mbjern\",\"structure\":\"datazywx\",\"schema\":\"dataaq\",\"linkedServiceName\":{\"referenceName\":\"tkdeetnnef\",\"parameters\":{\"fwqjzybmfqdnpp\":\"datalkszuxjmrzsxwa\",\"vamuvkgd\":\"datacfguam\",\"spjvsyydjlhd\":\"datapjbblukgctv\"}},\"parameters\":{\"ulojwumfjdymeq\":{\"type\":\"Array\",\"defaultValue\":\"datavyeegx\"},\"nxemhqpzhnatw\":{\"type\":\"Bool\",\"defaultValue\":\"datapfyxdjspn\"}},\"annotations\":[\"datamcvdjlwwefevtwll\",\"dataypmjc\",\"datay\",\"datafwgkzuhk\"],\"folder\":{\"name\":\"jkckwbqwjyfmmk\"},\"\":{\"oerohextigukfk\":\"datarooyzhobnvyuepa\",\"enlqtqyvlfbs\":\"datasycbdymbnp\"}}") + "{\"type\":\"ma\",\"typeProperties\":{\"tableName\":\"dataldic\"},\"description\":\"fwl\",\"structure\":\"dataitvkyahfoyfzoi\",\"schema\":\"dataaeprevcjukfal\",\"linkedServiceName\":{\"referenceName\":\"ceechc\",\"parameters\":{\"nylfhicrjriyb\":\"dataqbeqpucnu\",\"fsnqocybrh\":\"databydrlqllb\",\"oqjfeamzkuxdgpks\":\"datagiknrlugseiqb\"}},\"parameters\":{\"ldrqqv\":{\"type\":\"Int\",\"defaultValue\":\"datanvnj\"},\"xlxedthfwlnvq\":{\"type\":\"Object\",\"defaultValue\":\"datausq\"},\"blnlmpuyypaggpai\":{\"type\":\"Object\",\"defaultValue\":\"datayfi\"},\"zwloqrmgdhy\":{\"type\":\"Int\",\"defaultValue\":\"datae\"}},\"annotations\":[\"datavlxtywukhjdspl\",\"dataitxrrgkwiyoyh\"],\"folder\":{\"name\":\"vxcodwkwoytcac\"},\"\":{\"lfwfiikqcdnzs\":\"dataizfue\",\"gne\":\"dataiu\",\"mcrxlyzoaho\":\"datao\"}}") .toObject(ZohoObjectDataset.class); - Assertions.assertEquals("mbjern", model.description()); - Assertions.assertEquals("tkdeetnnef", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("ulojwumfjdymeq").type()); - Assertions.assertEquals("jkckwbqwjyfmmk", model.folder().name()); + Assertions.assertEquals("fwl", model.description()); + Assertions.assertEquals("ceechc", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("ldrqqv").type()); + Assertions.assertEquals("vxcodwkwoytcac", model.folder().name()); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ZohoObjectDataset model - = new ZohoObjectDataset().withDescription("mbjern").withStructure("datazywx").withSchema("dataaq") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("tkdeetnnef") - .withParameters(mapOf("fwqjzybmfqdnpp", "datalkszuxjmrzsxwa", "vamuvkgd", "datacfguam", - "spjvsyydjlhd", "datapjbblukgctv"))) - .withParameters(mapOf("ulojwumfjdymeq", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datavyeegx"), - "nxemhqpzhnatw", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datapfyxdjspn"))) - .withAnnotations(Arrays.asList("datamcvdjlwwefevtwll", "dataypmjc", "datay", "datafwgkzuhk")) - .withFolder(new DatasetFolder().withName("jkckwbqwjyfmmk")).withTableName("datamrslwknrd"); + ZohoObjectDataset model = new ZohoObjectDataset().withDescription("fwl") + .withStructure("dataitvkyahfoyfzoi") + .withSchema("dataaeprevcjukfal") + .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ceechc") + .withParameters(mapOf("nylfhicrjriyb", "dataqbeqpucnu", "fsnqocybrh", "databydrlqllb", + "oqjfeamzkuxdgpks", "datagiknrlugseiqb"))) + .withParameters(mapOf("ldrqqv", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datanvnj"), "xlxedthfwlnvq", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datausq"), + "blnlmpuyypaggpai", + new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datayfi"), "zwloqrmgdhy", + new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datae"))) + .withAnnotations(Arrays.asList("datavlxtywukhjdspl", "dataitxrrgkwiyoyh")) + .withFolder(new DatasetFolder().withName("vxcodwkwoytcac")) + .withTableName("dataldic"); model = BinaryData.fromObject(model).toObject(ZohoObjectDataset.class); - Assertions.assertEquals("mbjern", model.description()); - Assertions.assertEquals("tkdeetnnef", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("ulojwumfjdymeq").type()); - Assertions.assertEquals("jkckwbqwjyfmmk", model.folder().name()); + Assertions.assertEquals("fwl", model.description()); + Assertions.assertEquals("ceechc", model.linkedServiceName().referenceName()); + Assertions.assertEquals(ParameterType.INT, model.parameters().get("ldrqqv").type()); + Assertions.assertEquals("vxcodwkwoytcac", model.folder().name()); } // Use "Map.of" if available diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoSourceTests.java index 9d59cb25bced3..465200cdf7bee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoSourceTests.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoSourceTests.java @@ -11,15 +11,19 @@ public final class ZohoSourceTests { @org.junit.jupiter.api.Test public void testDeserialize() throws Exception { ZohoSource model = BinaryData.fromString( - "{\"type\":\"ZohoSource\",\"query\":\"databuwauytq\",\"queryTimeout\":\"datagaxloafws\",\"additionalColumns\":\"dataxqrokw\",\"sourceRetryCount\":\"dataipn\",\"sourceRetryWait\":\"dataql\",\"maxConcurrentConnections\":\"datarhctbrvegdamoy\",\"disableMetricsCollection\":\"datafjpkezqjizbyczme\",\"\":{\"destarulnhbqt\":\"datacgvlnpjjbyryrktu\",\"xhcrffj\":\"datayh\",\"svzhlkeot\":\"dataexupcuizvx\"}}") + "{\"type\":\"lnxwdppi\",\"query\":\"dataeqdj\",\"queryTimeout\":\"datavtwfbqxoqnv\",\"additionalColumns\":\"datahd\",\"sourceRetryCount\":\"datanntoloezpt\",\"sourceRetryWait\":\"datarpzvimx\",\"maxConcurrentConnections\":\"dataxcaczc\",\"disableMetricsCollection\":\"dataomrpucytj\",\"\":{\"nxdyskyrhsijx\":\"dataqwyw\"}}") .toObject(ZohoSource.class); } @org.junit.jupiter.api.Test public void testSerialize() throws Exception { - ZohoSource model = new ZohoSource().withSourceRetryCount("dataipn").withSourceRetryWait("dataql") - .withMaxConcurrentConnections("datarhctbrvegdamoy").withDisableMetricsCollection("datafjpkezqjizbyczme") - .withQueryTimeout("datagaxloafws").withAdditionalColumns("dataxqrokw").withQuery("databuwauytq"); + ZohoSource model = new ZohoSource().withSourceRetryCount("datanntoloezpt") + .withSourceRetryWait("datarpzvimx") + .withMaxConcurrentConnections("dataxcaczc") + .withDisableMetricsCollection("dataomrpucytj") + .withQueryTimeout("datavtwfbqxoqnv") + .withAdditionalColumns("datahd") + .withQuery("dataeqdj"); model = BinaryData.fromObject(model).toObject(ZohoSource.class); } }