diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index f09edb728..82cc21f8f 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-c3a3e3055fe11cb9683f398a665c225a03563ff1
\ No newline at end of file
+universe:/home/parth.bansal/vn0/universe
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
index 38a03117f..08fb422c6 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -65,6 +65,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissions
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceDatabase.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceDatabaseDatabasePermission.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpace.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpaceGenieSpacePermission.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJob.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJobJobPermission.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecret.java linguist-generated=true
@@ -76,6 +78,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSql
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurable.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatusUpdateState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java linguist-generated=true
@@ -84,6 +89,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.ja
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsSettingsAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsSettingsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsSettingsService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AsyncUpdateAppRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeSize.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java linguist-generated=true
@@ -97,6 +104,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissi
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppUpdateRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetCustomTemplateRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsResponse.java linguist-generated=true
@@ -139,8 +147,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudge
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateUsagePolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteUsagePolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java linguist-generated=true
@@ -152,12 +162,15 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetCo
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryConfigurationResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetUsagePolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LimitConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfigStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java linguist-generated=true
@@ -173,10 +186,15 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudge
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateUsagePolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicy.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java linguist-generated=true
@@ -192,13 +210,23 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStor
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignmentResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredentialInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreAssignmentResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteStorageCredentialResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsGetMetastoreResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsListMetastoresResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java linguist-generated=true
-databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignmentResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredentialResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java linguist-generated=true
@@ -239,8 +267,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Connections
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfoState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccessRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccessRequestResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsMetastore.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsStorageCredential.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java linguist-generated=true
@@ -310,6 +342,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyList.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DestinationType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlagInheritedFromType.java linguist-generated=true
@@ -322,6 +356,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAs
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignmentsAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignmentsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignmentsService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnvironmentSettings.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageExternalMetadata.java linguist-generated=true
@@ -581,6 +616,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTa
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccessRequestDestinationsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsMetastore.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsStorageCredential.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java linguist-generated=true
@@ -700,6 +737,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttribut
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAvailability.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributes.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAvailability.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/BaseEnvironmentType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelCommand.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClientsTypes.java linguist-generated=true
@@ -750,6 +788,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStat
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateContext.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateDefaultBaseEnvironmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java linguist-generated=true
@@ -761,7 +800,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEv
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetailsEventType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataSecurityMode.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironment.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCache.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCacheStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteCluster.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteDefaultBaseEnvironmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteGlobalInitScriptRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePool.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicy.java linguist-generated=true
@@ -795,6 +838,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterP
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetDefaultBaseEnvironmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEvents.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsOrder.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponse.java linguist-generated=true
@@ -860,6 +904,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListCluster
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortBy.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByDirection.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByField.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListGlobalInitScriptsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstancePools.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstanceProfilesResponse.java linguist-generated=true
@@ -872,9 +918,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListSortOrd
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LocalFileInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MaterializedEnvironment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MavenLibrary.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteCluster.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java linguist-generated=true
@@ -888,6 +936,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamil
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PythonPyPiLibrary.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RCranLibrary.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RefreshDefaultBaseEnvironmentsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveInstanceProfile.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeCluster.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java linguist-generated=true
@@ -907,11 +956,16 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLi
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultBaseEnvironmentRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultDefaultBaseEnvironmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java linguist-generated=true
@@ -920,18 +974,25 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboar
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DashboardView.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversationSummary.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieDeleteConversationMessageRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieDeleteConversationRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedbackRating.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java linguist-generated=true
@@ -954,7 +1015,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSta
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSuggestedQuestionsAttachment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java linguist-generated=true
@@ -977,8 +1040,17 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageE
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachmentParameter.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SchedulePauseStatus.java linguist-generated=true
@@ -986,19 +1058,30 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscrib
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscription.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestination.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseBranchRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseEndpointRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseProjectRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CustomTag.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseBranch.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpoint.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointPoolerMode.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointSettings.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointState.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRef.java linguist-generated=true
@@ -1007,27 +1090,47 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseIn
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleIdentityType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleMembershipRole.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProject.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectCustomTag.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectDefaultEndpointSettings.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectSettings.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseBranchRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseEndpointRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRoleRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseProjectRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeltaTableSyncInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FailoverDatabaseInstanceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseBranchRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseEndpointRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRoleRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseProjectRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java linguist-generated=true
@@ -1036,6 +1139,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/Provisioni
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaims.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaimsPermissionSet.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedResource.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RestartDatabaseEndpointRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java linguist-generated=true
@@ -1047,9 +1151,49 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTabl
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseBranchRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseCatalogRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseEndpointRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRoleRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseProjectRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateSyncedDatabaseTableRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AggregationGranularity.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionJobType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateMonitorRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateRefreshRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedule.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedulePauseStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetric.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetricType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteMonitorRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteRefreshRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetMonitorRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetRefreshRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceLogConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceProblemType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Monitor.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationDestination.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationSettings.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Refresh.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshState.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshTrigger.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/SnapshotConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/TimeSeriesConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java linguist-generated=true
@@ -1231,9 +1375,43 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermis
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Impl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Service.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailLocalRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailLocalRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetWorkspaceAccessDetailLocalRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetWorkspaceAccessDetailRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/Group.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsLocalRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/PrincipalType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ResolveGroupProxyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ResolveGroupRequest.java linguist-generated=true
@@ -1246,6 +1424,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ResolveUserRe
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ResolveUserResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ServicePrincipal.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/State.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserProxyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailLocalRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/User.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UserName.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceAccessDetail.java linguist-generated=true
@@ -1348,6 +1534,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsReques
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookOutput.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfo.java linguist-generated=true
@@ -1706,6 +1894,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersion
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequestResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelResponse.java linguist-generated=true
@@ -1903,6 +2093,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccoun
databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequestResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigger.java linguist-generated=true
@@ -1969,6 +2162,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PostgresC
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PostgresSlotConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequestResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java linguist-generated=true
@@ -1986,10 +2181,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfoCause.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfoState.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateMode.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfoState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsCredentials.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureKeyInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureWorkspaceInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java linguist-generated=true
@@ -2007,7 +2204,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Creden
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingComputeMode.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainer.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingStorageMode.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKey.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteCredentialRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteEncryptionKeyRequest.java linguist-generated=true
@@ -2021,7 +2220,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Encryp
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EndpointUseCase.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ErrorType.java linguist-generated=true
-databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpCommonNetworkConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfo.java linguist-generated=true
@@ -2035,6 +2234,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetVpc
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfigConnectivityType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/KeyAccessConfiguration.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/KeyUseCase.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java linguist-generated=true
@@ -2064,11 +2264,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEnd
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WarningType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspaceNetwork.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspaceStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionJobType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java linguist-generated=true
@@ -2188,6 +2390,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndp
databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingModelWorkloadType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateInferenceEndpointNotifications.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateInferenceEndpointNotificationsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/V1ResponseChoiceElement.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnable.java linguist-generated=true
@@ -2359,6 +2563,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTo
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GcpEndpointSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GenericWebhookConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java linguist-generated=true
@@ -2434,6 +2639,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressT
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java linguist-generated=true
@@ -2897,15 +3103,25 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesServi
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Widget.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPosition.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/CreateTagAssignmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/CreateTagPolicyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagAssignmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagPolicyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/GetTagAssignmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/GetTagPolicyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagPoliciesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagPoliciesResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignment.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPolicy.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/UpdateTagAssignmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/UpdateTagPolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/Value.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java linguist-generated=true
@@ -2952,6 +3168,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateVectorIndexUsagePolicyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateVectorIndexUsagePolicyResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResult.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java linguist-generated=true
@@ -2982,6 +3200,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRep
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScope.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecret.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportOutputs.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetAclRequest.java linguist-generated=true
@@ -3050,3 +3269,34 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Workspace
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescription.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/ComplexQueryParam.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/CreateResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Impl.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Service.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/Resource.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/GetResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Impl.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Service.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RequiredFields.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/Resource.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/TestEnum.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CancelOperationRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DatabricksServiceExceptionWithDetailsProto.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/ErrorCode.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetOperationRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetTestResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingImpl.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingService.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/Operation.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/TestResource.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/TestResourceOperationMetadata.java linguist-generated=true
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
index 5461ba07e..7ddbd1230 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
@@ -16,6 +16,8 @@
import com.databricks.sdk.service.billing.LogDeliveryService;
import com.databricks.sdk.service.billing.UsageDashboardsAPI;
import com.databricks.sdk.service.billing.UsageDashboardsService;
+import com.databricks.sdk.service.billing.UsagePolicyAPI;
+import com.databricks.sdk.service.billing.UsagePolicyService;
import com.databricks.sdk.service.catalog.AccountMetastoreAssignmentsAPI;
import com.databricks.sdk.service.catalog.AccountMetastoreAssignmentsService;
import com.databricks.sdk.service.catalog.AccountMetastoresAPI;
@@ -115,6 +117,7 @@ public class AccountClient {
private StorageAPI storageAPI;
private AccountStorageCredentialsAPI storageCredentialsAPI;
private UsageDashboardsAPI usageDashboardsAPI;
+ private UsagePolicyAPI usagePolicyAPI;
private AccountUsersV2API usersV2API;
private VpcEndpointsAPI vpcEndpointsAPI;
private WorkspaceAssignmentAPI workspaceAssignmentAPI;
@@ -160,6 +163,7 @@ public AccountClient(DatabricksConfig config) {
storageAPI = new StorageAPI(apiClient);
storageCredentialsAPI = new AccountStorageCredentialsAPI(apiClient);
usageDashboardsAPI = new UsageDashboardsAPI(apiClient);
+ usagePolicyAPI = new UsagePolicyAPI(apiClient);
usersV2API = new AccountUsersV2API(apiClient);
vpcEndpointsAPI = new VpcEndpointsAPI(apiClient);
workspaceAssignmentAPI = new WorkspaceAssignmentAPI(apiClient);
@@ -591,6 +595,11 @@ public UsageDashboardsAPI usageDashboards() {
return usageDashboardsAPI;
}
+ /** A service serves REST API about Usage policies */
+ public UsagePolicyAPI usagePolicy() {
+ return usagePolicyAPI;
+ }
+
/**
* User identities recognized by Databricks and represented by email addresses.
*
@@ -1009,6 +1018,17 @@ public AccountClient withUsageDashboardsAPI(UsageDashboardsAPI usageDashboards)
return this;
}
+ /** Replace the default UsagePolicyService with a custom implementation. */
+ public AccountClient withUsagePolicyImpl(UsagePolicyService usagePolicy) {
+ return this.withUsagePolicyAPI(new UsagePolicyAPI(usagePolicy));
+ }
+
+ /** Replace the default UsagePolicyAPI with a custom implementation. */
+ public AccountClient withUsagePolicyAPI(UsagePolicyAPI usagePolicy) {
+ this.usagePolicyAPI = usagePolicy;
+ return this;
+ }
+
/** Replace the default AccountUsersV2Service with a custom implementation. */
public AccountClient withUsersV2Impl(AccountUsersV2Service accountUsersV2) {
return this.withUsersV2API(new AccountUsersV2API(accountUsersV2));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index bac85f5a9..e8c91ce19 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -101,8 +101,14 @@
import com.databricks.sdk.service.dashboards.LakeviewEmbeddedAPI;
import com.databricks.sdk.service.dashboards.LakeviewEmbeddedService;
import com.databricks.sdk.service.dashboards.LakeviewService;
+import com.databricks.sdk.service.dashboards.QueryExecutionAPI;
+import com.databricks.sdk.service.dashboards.QueryExecutionService;
import com.databricks.sdk.service.database.DatabaseAPI;
+import com.databricks.sdk.service.database.DatabaseProjectAPI;
+import com.databricks.sdk.service.database.DatabaseProjectService;
import com.databricks.sdk.service.database.DatabaseService;
+import com.databricks.sdk.service.dataquality.DataQualityAPI;
+import com.databricks.sdk.service.dataquality.DataQualityService;
import com.databricks.sdk.service.files.DbfsService;
import com.databricks.sdk.service.files.FilesAPI;
import com.databricks.sdk.service.files.FilesService;
@@ -236,6 +242,8 @@
import com.databricks.sdk.service.sql.StatementExecutionService;
import com.databricks.sdk.service.sql.WarehousesAPI;
import com.databricks.sdk.service.sql.WarehousesService;
+import com.databricks.sdk.service.tags.TagAssignmentsAPI;
+import com.databricks.sdk.service.tags.TagAssignmentsService;
import com.databricks.sdk.service.tags.TagPoliciesAPI;
import com.databricks.sdk.service.tags.TagPoliciesService;
import com.databricks.sdk.service.vectorsearch.VectorSearchEndpointsAPI;
@@ -286,8 +294,10 @@ public class WorkspaceClient {
private CurrentUserAPI currentUserAPI;
private DashboardWidgetsAPI dashboardWidgetsAPI;
private DashboardsAPI dashboardsAPI;
+ private DataQualityAPI dataQualityAPI;
private DataSourcesAPI dataSourcesAPI;
private DatabaseAPI databaseAPI;
+ private DatabaseProjectAPI databaseProjectAPI;
private DbfsExt dbfsAPI;
private DbsqlPermissionsAPI dbsqlPermissionsAPI;
private EntityTagAssignmentsAPI entityTagAssignmentsAPI;
@@ -337,6 +347,7 @@ public class WorkspaceClient {
private QualityMonitorsAPI qualityMonitorsAPI;
private QueriesAPI queriesAPI;
private QueriesLegacyAPI queriesLegacyAPI;
+ private QueryExecutionAPI queryExecutionAPI;
private QueryHistoryAPI queryHistoryAPI;
private QueryVisualizationsAPI queryVisualizationsAPI;
private QueryVisualizationsLegacyAPI queryVisualizationsLegacyAPI;
@@ -361,6 +372,7 @@ public class WorkspaceClient {
private SystemSchemasAPI systemSchemasAPI;
private TableConstraintsAPI tableConstraintsAPI;
private TablesAPI tablesAPI;
+ private TagAssignmentsAPI tagAssignmentsAPI;
private TagPoliciesAPI tagPoliciesAPI;
private TemporaryPathCredentialsAPI temporaryPathCredentialsAPI;
private TemporaryTableCredentialsAPI temporaryTableCredentialsAPI;
@@ -416,8 +428,10 @@ public WorkspaceClient(DatabricksConfig config) {
currentUserAPI = new CurrentUserAPI(apiClient);
dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient);
dashboardsAPI = new DashboardsAPI(apiClient);
+ dataQualityAPI = new DataQualityAPI(apiClient);
dataSourcesAPI = new DataSourcesAPI(apiClient);
databaseAPI = new DatabaseAPI(apiClient);
+ databaseProjectAPI = new DatabaseProjectAPI(apiClient);
dbfsAPI = new DbfsExt(apiClient);
dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient);
entityTagAssignmentsAPI = new EntityTagAssignmentsAPI(apiClient);
@@ -467,6 +481,7 @@ public WorkspaceClient(DatabricksConfig config) {
qualityMonitorsAPI = new QualityMonitorsAPI(apiClient);
queriesAPI = new QueriesAPI(apiClient);
queriesLegacyAPI = new QueriesLegacyAPI(apiClient);
+ queryExecutionAPI = new QueryExecutionAPI(apiClient);
queryHistoryAPI = new QueryHistoryAPI(apiClient);
queryVisualizationsAPI = new QueryVisualizationsAPI(apiClient);
queryVisualizationsLegacyAPI = new QueryVisualizationsLegacyAPI(apiClient);
@@ -492,6 +507,7 @@ public WorkspaceClient(DatabricksConfig config) {
systemSchemasAPI = new SystemSchemasAPI(apiClient);
tableConstraintsAPI = new TableConstraintsAPI(apiClient);
tablesAPI = new TablesAPI(apiClient);
+ tagAssignmentsAPI = new TagAssignmentsAPI(apiClient);
tagPoliciesAPI = new TagPoliciesAPI(apiClient);
temporaryPathCredentialsAPI = new TemporaryPathCredentialsAPI(apiClient);
temporaryTableCredentialsAPI = new TemporaryTableCredentialsAPI(apiClient);
@@ -801,6 +817,11 @@ public DashboardsAPI dashboards() {
return dashboardsAPI;
}
+ /** Manage the data quality of Unity Catalog objects (currently support `schema` and `table`) */
+ public DataQualityAPI dataQuality() {
+ return dataQualityAPI;
+ }
+
/**
* This API is provided to assist you in making new query objects. When creating a query object,
* you may optionally specify a `data_source_id` for the SQL warehouse against which it will run.
@@ -824,6 +845,11 @@ public DatabaseAPI database() {
return databaseAPI;
}
+ /** Database Projects provide access to a database via REST API or direct SQL. */
+ public DatabaseProjectAPI databaseProject() {
+ return databaseProjectAPI;
+ }
+
/**
* DBFS API makes it simple to interact with various data sources without having to include a
* users credentials every time to read a file.
@@ -1436,6 +1462,11 @@ public QueriesLegacyAPI queriesLegacy() {
return queriesLegacyAPI;
}
+ /** Query execution APIs for AI / BI Dashboards */
+ public QueryExecutionAPI queryExecution() {
+ return queryExecutionAPI;
+ }
+
/**
* A service responsible for storing and retrieving the list of queries run against SQL endpoints
* and serverless compute.
@@ -1555,8 +1586,8 @@ public RedashConfigAPI redashConfig() {
* version metadata (comments, aliases) create a new model version, or update permissions on the
* registered model, users must be owners of the registered model.
*
- *
Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging,
- * grants) that specify a securable type, use "FUNCTION" as the securable type.
+ *
Note: The securable type for models is FUNCTION. When using REST APIs (e.g. tagging, grants)
+ * that specify a securable type, use FUNCTION as the securable type.
*/
public RegisteredModelsAPI registeredModels() {
return registeredModelsAPI;
@@ -1727,16 +1758,16 @@ public SharesAPI shares() {
* has not yet finished. This can be set to either `CONTINUE`, to fallback to asynchronous mode,
* or it can be set to `CANCEL`, which cancels the statement.
*
- *
In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call
- * waits up to 30 seconds; if the statement execution finishes within this time, the result data
- * is returned directly in the response. If the execution takes longer than 30 seconds, the
- * execution is canceled and the call returns with a `CANCELED` state. - Asynchronous mode -
- * `wait_timeout=0s` (`on_wait_timeout` is ignored) - The call doesn't wait for the statement to
- * finish but returns directly with a statement ID. The status of the statement execution can be
- * polled by issuing :method:statementexecution/getStatement with the statement ID. Once the
+ *
In summary: - **Synchronous mode** (`wait_timeout=30s` and `on_wait_timeout=CANCEL`): The
+ * call waits up to 30 seconds; if the statement execution finishes within this time, the result
+ * data is returned directly in the response. If the execution takes longer than 30 seconds, the
+ * execution is canceled and the call returns with a `CANCELED` state. - **Asynchronous mode**
+ * (`wait_timeout=0s` and `on_wait_timeout` is ignored): The call doesn't wait for the statement
+ * to finish but returns directly with a statement ID. The status of the statement execution can
+ * be polled by issuing :method:statementexecution/getStatement with the statement ID. Once the
* execution has succeeded, this call also returns the result and metadata in the response. -
- * Hybrid mode (default) - `wait_timeout=10s` and `on_wait_timeout=CONTINUE` - The call waits for
- * up to 10 seconds; if the statement execution finishes within this time, the result data is
+ * **[Default] Hybrid mode** (`wait_timeout=10s` and `on_wait_timeout=CONTINUE`): The call waits
+ * for up to 10 seconds; if the statement execution finishes within this time, the result data is
* returned directly in the response. If the execution takes longer than 10 seconds, a statement
* ID is returned. The statement ID can be used to fetch status and results in the same way as in
* the asynchronous mode.
@@ -1853,6 +1884,11 @@ public TablesAPI tables() {
return tablesAPI;
}
+ /** Manage tag assignments on workspace-scoped objects. */
+ public TagAssignmentsAPI tagAssignments() {
+ return tagAssignmentsAPI;
+ }
+
/**
* The Tag Policy API allows you to manage policies for governed tags in Databricks. Permissions
* for tag policies can be managed using the [Account Access Control Proxy API].
@@ -2409,6 +2445,17 @@ public WorkspaceClient withDashboardsAPI(DashboardsAPI dashboards) {
return this;
}
+ /** Replace the default DataQualityService with a custom implementation. */
+ public WorkspaceClient withDataQualityImpl(DataQualityService dataQuality) {
+ return this.withDataQualityAPI(new DataQualityAPI(dataQuality));
+ }
+
+ /** Replace the default DataQualityAPI with a custom implementation. */
+ public WorkspaceClient withDataQualityAPI(DataQualityAPI dataQuality) {
+ this.dataQualityAPI = dataQuality;
+ return this;
+ }
+
/** Replace the default DataSourcesService with a custom implementation. */
public WorkspaceClient withDataSourcesImpl(DataSourcesService dataSources) {
return this.withDataSourcesAPI(new DataSourcesAPI(dataSources));
@@ -2431,6 +2478,17 @@ public WorkspaceClient withDatabaseAPI(DatabaseAPI database) {
return this;
}
+ /** Replace the default DatabaseProjectService with a custom implementation. */
+ public WorkspaceClient withDatabaseProjectImpl(DatabaseProjectService databaseProject) {
+ return this.withDatabaseProjectAPI(new DatabaseProjectAPI(databaseProject));
+ }
+
+ /** Replace the default DatabaseProjectAPI with a custom implementation. */
+ public WorkspaceClient withDatabaseProjectAPI(DatabaseProjectAPI databaseProject) {
+ this.databaseProjectAPI = databaseProject;
+ return this;
+ }
+
/** Replace the default DbfsService with a custom implementation. */
public WorkspaceClient withDbfsImpl(DbfsService dbfs) {
return this.withDbfsAPI(new DbfsExt(dbfs));
@@ -2993,6 +3051,17 @@ public WorkspaceClient withQueriesLegacyAPI(QueriesLegacyAPI queriesLegacy) {
return this;
}
+ /** Replace the default QueryExecutionService with a custom implementation. */
+ public WorkspaceClient withQueryExecutionImpl(QueryExecutionService queryExecution) {
+ return this.withQueryExecutionAPI(new QueryExecutionAPI(queryExecution));
+ }
+
+ /** Replace the default QueryExecutionAPI with a custom implementation. */
+ public WorkspaceClient withQueryExecutionAPI(QueryExecutionAPI queryExecution) {
+ this.queryExecutionAPI = queryExecution;
+ return this;
+ }
+
/** Replace the default QueryHistoryService with a custom implementation. */
public WorkspaceClient withQueryHistoryImpl(QueryHistoryService queryHistory) {
return this.withQueryHistoryAPI(new QueryHistoryAPI(queryHistory));
@@ -3272,6 +3341,17 @@ public WorkspaceClient withTablesAPI(TablesAPI tables) {
return this;
}
+ /** Replace the default TagAssignmentsService with a custom implementation. */
+ public WorkspaceClient withTagAssignmentsImpl(TagAssignmentsService tagAssignments) {
+ return this.withTagAssignmentsAPI(new TagAssignmentsAPI(tagAssignments));
+ }
+
+ /** Replace the default TagAssignmentsAPI with a custom implementation. */
+ public WorkspaceClient withTagAssignmentsAPI(TagAssignmentsAPI tagAssignments) {
+ this.tagAssignmentsAPI = tagAssignments;
+ return this;
+ }
+
/** Replace the default TagPoliciesService with a custom implementation. */
public WorkspaceClient withTagPoliciesImpl(TagPoliciesService tagPolicies) {
return this.withTagPoliciesAPI(new TagPoliciesAPI(tagPolicies));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
index 97b6f3b19..d7ef3d38a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
@@ -25,6 +25,10 @@ public class App {
@JsonProperty("budget_policy_id")
private String budgetPolicyId;
+ /** */
+ @JsonProperty("compute_size")
+ private ComputeSize computeSize;
+
/** */
@JsonProperty("compute_status")
private ComputeStatus computeStatus;
@@ -52,6 +56,10 @@ public class App {
@JsonProperty("effective_budget_policy_id")
private String effectiveBudgetPolicyId;
+ /** */
+ @JsonProperty("effective_usage_policy_id")
+ private String effectiveUsagePolicyId;
+
/** The effective api scopes granted to the user access token. */
@JsonProperty("effective_user_api_scopes")
private Collection effectiveUserApiScopes;
@@ -110,6 +118,10 @@ public class App {
@JsonProperty("url")
private String url;
+ /** */
+ @JsonProperty("usage_policy_id")
+ private String usagePolicyId;
+
/** */
@JsonProperty("user_api_scopes")
private Collection userApiScopes;
@@ -141,6 +153,15 @@ public String getBudgetPolicyId() {
return budgetPolicyId;
}
+ public App setComputeSize(ComputeSize computeSize) {
+ this.computeSize = computeSize;
+ return this;
+ }
+
+ public ComputeSize getComputeSize() {
+ return computeSize;
+ }
+
public App setComputeStatus(ComputeStatus computeStatus) {
this.computeStatus = computeStatus;
return this;
@@ -195,6 +216,15 @@ public String getEffectiveBudgetPolicyId() {
return effectiveBudgetPolicyId;
}
+ public App setEffectiveUsagePolicyId(String effectiveUsagePolicyId) {
+ this.effectiveUsagePolicyId = effectiveUsagePolicyId;
+ return this;
+ }
+
+ public String getEffectiveUsagePolicyId() {
+ return effectiveUsagePolicyId;
+ }
+
public App setEffectiveUserApiScopes(Collection effectiveUserApiScopes) {
this.effectiveUserApiScopes = effectiveUserApiScopes;
return this;
@@ -312,6 +342,15 @@ public String getUrl() {
return url;
}
+ public App setUsagePolicyId(String usagePolicyId) {
+ this.usagePolicyId = usagePolicyId;
+ return this;
+ }
+
+ public String getUsagePolicyId() {
+ return usagePolicyId;
+ }
+
public App setUserApiScopes(Collection userApiScopes) {
this.userApiScopes = userApiScopes;
return this;
@@ -329,12 +368,14 @@ public boolean equals(Object o) {
return Objects.equals(activeDeployment, that.activeDeployment)
&& Objects.equals(appStatus, that.appStatus)
&& Objects.equals(budgetPolicyId, that.budgetPolicyId)
+ && Objects.equals(computeSize, that.computeSize)
&& Objects.equals(computeStatus, that.computeStatus)
&& Objects.equals(createTime, that.createTime)
&& Objects.equals(creator, that.creator)
&& Objects.equals(defaultSourceCodePath, that.defaultSourceCodePath)
&& Objects.equals(description, that.description)
&& Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId)
+ && Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId)
&& Objects.equals(effectiveUserApiScopes, that.effectiveUserApiScopes)
&& Objects.equals(id, that.id)
&& Objects.equals(name, that.name)
@@ -348,6 +389,7 @@ public boolean equals(Object o) {
&& Objects.equals(updateTime, that.updateTime)
&& Objects.equals(updater, that.updater)
&& Objects.equals(url, that.url)
+ && Objects.equals(usagePolicyId, that.usagePolicyId)
&& Objects.equals(userApiScopes, that.userApiScopes);
}
@@ -357,12 +399,14 @@ public int hashCode() {
activeDeployment,
appStatus,
budgetPolicyId,
+ computeSize,
computeStatus,
createTime,
creator,
defaultSourceCodePath,
description,
effectiveBudgetPolicyId,
+ effectiveUsagePolicyId,
effectiveUserApiScopes,
id,
name,
@@ -376,6 +420,7 @@ public int hashCode() {
updateTime,
updater,
url,
+ usagePolicyId,
userApiScopes);
}
@@ -385,12 +430,14 @@ public String toString() {
.add("activeDeployment", activeDeployment)
.add("appStatus", appStatus)
.add("budgetPolicyId", budgetPolicyId)
+ .add("computeSize", computeSize)
.add("computeStatus", computeStatus)
.add("createTime", createTime)
.add("creator", creator)
.add("defaultSourceCodePath", defaultSourceCodePath)
.add("description", description)
.add("effectiveBudgetPolicyId", effectiveBudgetPolicyId)
+ .add("effectiveUsagePolicyId", effectiveUsagePolicyId)
.add("effectiveUserApiScopes", effectiveUserApiScopes)
.add("id", id)
.add("name", name)
@@ -404,6 +451,7 @@ public String toString() {
.add("updateTime", updateTime)
.add("updater", updater)
.add("url", url)
+ .add("usagePolicyId", usagePolicyId)
.add("userApiScopes", userApiScopes)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java
index 1e8acf263..2761c1651 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java
@@ -17,6 +17,10 @@ public class AppResource {
@JsonProperty("description")
private String description;
+ /** */
+ @JsonProperty("genie_space")
+ private AppResourceGenieSpace genieSpace;
+
/** */
@JsonProperty("job")
private AppResourceJob job;
@@ -59,6 +63,15 @@ public String getDescription() {
return description;
}
+ public AppResource setGenieSpace(AppResourceGenieSpace genieSpace) {
+ this.genieSpace = genieSpace;
+ return this;
+ }
+
+ public AppResourceGenieSpace getGenieSpace() {
+ return genieSpace;
+ }
+
public AppResource setJob(AppResourceJob job) {
this.job = job;
return this;
@@ -120,6 +133,7 @@ public boolean equals(Object o) {
AppResource that = (AppResource) o;
return Objects.equals(database, that.database)
&& Objects.equals(description, that.description)
+ && Objects.equals(genieSpace, that.genieSpace)
&& Objects.equals(job, that.job)
&& Objects.equals(name, that.name)
&& Objects.equals(secret, that.secret)
@@ -131,7 +145,15 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- database, description, job, name, secret, servingEndpoint, sqlWarehouse, ucSecurable);
+ database,
+ description,
+ genieSpace,
+ job,
+ name,
+ secret,
+ servingEndpoint,
+ sqlWarehouse,
+ ucSecurable);
}
@Override
@@ -139,6 +161,7 @@ public String toString() {
return new ToStringer(AppResource.class)
.add("database", database)
.add("description", description)
+ .add("genieSpace", genieSpace)
.add("job", job)
.add("name", name)
.add("secret", secret)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpace.java
new file mode 100755
index 000000000..00045cdbd
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpace.java
@@ -0,0 +1,74 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.apps;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AppResourceGenieSpace {
+ /** */
+ @JsonProperty("name")
+ private String name;
+
+ /** */
+ @JsonProperty("permission")
+ private AppResourceGenieSpaceGenieSpacePermission permission;
+
+ /** */
+ @JsonProperty("space_id")
+ private String spaceId;
+
+ public AppResourceGenieSpace setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public AppResourceGenieSpace setPermission(AppResourceGenieSpaceGenieSpacePermission permission) {
+ this.permission = permission;
+ return this;
+ }
+
+ public AppResourceGenieSpaceGenieSpacePermission getPermission() {
+ return permission;
+ }
+
+ public AppResourceGenieSpace setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AppResourceGenieSpace that = (AppResourceGenieSpace) o;
+ return Objects.equals(name, that.name)
+ && Objects.equals(permission, that.permission)
+ && Objects.equals(spaceId, that.spaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, permission, spaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AppResourceGenieSpace.class)
+ .add("name", name)
+ .add("permission", permission)
+ .add("spaceId", spaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpaceGenieSpacePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpaceGenieSpacePermission.java
new file mode 100755
index 000000000..c93785cc3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpaceGenieSpacePermission.java
@@ -0,0 +1,13 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.apps;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum AppResourceGenieSpaceGenieSpacePermission {
+ CAN_EDIT,
+ CAN_MANAGE,
+ CAN_RUN,
+ CAN_VIEW,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java
new file mode 100755
index 000000000..b34c390e7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java
@@ -0,0 +1,136 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.apps;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class AppUpdate {
+ /** */
+ @JsonProperty("budget_policy_id")
+ private String budgetPolicyId;
+
+ /** */
+ @JsonProperty("compute_size")
+ private ComputeSize computeSize;
+
+ /** */
+ @JsonProperty("description")
+ private String description;
+
+ /** */
+ @JsonProperty("resources")
+ private Collection resources;
+
+ /** */
+ @JsonProperty("status")
+ private AppUpdateUpdateStatus status;
+
+ /** */
+ @JsonProperty("usage_policy_id")
+ private String usagePolicyId;
+
+ /** */
+ @JsonProperty("user_api_scopes")
+ private Collection userApiScopes;
+
+ public AppUpdate setBudgetPolicyId(String budgetPolicyId) {
+ this.budgetPolicyId = budgetPolicyId;
+ return this;
+ }
+
+ public String getBudgetPolicyId() {
+ return budgetPolicyId;
+ }
+
+ public AppUpdate setComputeSize(ComputeSize computeSize) {
+ this.computeSize = computeSize;
+ return this;
+ }
+
+ public ComputeSize getComputeSize() {
+ return computeSize;
+ }
+
+ public AppUpdate setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public AppUpdate setResources(Collection resources) {
+ this.resources = resources;
+ return this;
+ }
+
+ public Collection getResources() {
+ return resources;
+ }
+
+ public AppUpdate setStatus(AppUpdateUpdateStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public AppUpdateUpdateStatus getStatus() {
+ return status;
+ }
+
+ public AppUpdate setUsagePolicyId(String usagePolicyId) {
+ this.usagePolicyId = usagePolicyId;
+ return this;
+ }
+
+ public String getUsagePolicyId() {
+ return usagePolicyId;
+ }
+
+ public AppUpdate setUserApiScopes(Collection userApiScopes) {
+ this.userApiScopes = userApiScopes;
+ return this;
+ }
+
+ public Collection getUserApiScopes() {
+ return userApiScopes;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AppUpdate that = (AppUpdate) o;
+ return Objects.equals(budgetPolicyId, that.budgetPolicyId)
+ && Objects.equals(computeSize, that.computeSize)
+ && Objects.equals(description, that.description)
+ && Objects.equals(resources, that.resources)
+ && Objects.equals(status, that.status)
+ && Objects.equals(usagePolicyId, that.usagePolicyId)
+ && Objects.equals(userApiScopes, that.userApiScopes);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ budgetPolicyId, computeSize, description, resources, status, usagePolicyId, userApiScopes);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AppUpdate.class)
+ .add("budgetPolicyId", budgetPolicyId)
+ .add("computeSize", computeSize)
+ .add("description", description)
+ .add("resources", resources)
+ .add("status", status)
+ .add("usagePolicyId", usagePolicyId)
+ .add("userApiScopes", userApiScopes)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatus.java
new file mode 100755
index 000000000..d666a314b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatus.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.apps;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AppUpdateUpdateStatus {
+ /** */
+ @JsonProperty("message")
+ private String message;
+
+ /** */
+ @JsonProperty("state")
+ private AppUpdateUpdateStatusUpdateState state;
+
+ public AppUpdateUpdateStatus setMessage(String message) {
+ this.message = message;
+ return this;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public AppUpdateUpdateStatus setState(AppUpdateUpdateStatusUpdateState state) {
+ this.state = state;
+ return this;
+ }
+
+ public AppUpdateUpdateStatusUpdateState getState() {
+ return state;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AppUpdateUpdateStatus that = (AppUpdateUpdateStatus) o;
+ return Objects.equals(message, that.message) && Objects.equals(state, that.state);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(message, state);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AppUpdateUpdateStatus.class)
+ .add("message", message)
+ .add("state", state)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatusUpdateState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatusUpdateState.java
new file mode 100755
index 000000000..d86091f69
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatusUpdateState.java
@@ -0,0 +1,13 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.apps;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum AppUpdateUpdateStatusUpdateState {
+ FAILED,
+ IN_PROGRESS,
+ NOT_UPDATED,
+ SUCCEEDED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
index 1394408c5..dee0d4745 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
@@ -81,6 +81,55 @@ public App waitGetAppActive(String name, Duration timeout, Consumer callbac
throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage));
}
+ public AppUpdate waitGetUpdateAppSucceeded(String appName) throws TimeoutException {
+ return waitGetUpdateAppSucceeded(appName, Duration.ofMinutes(20), null);
+ }
+
+ public AppUpdate waitGetUpdateAppSucceeded(
+ String appName, Duration timeout, Consumer callback) throws TimeoutException {
+ long deadline = System.currentTimeMillis() + timeout.toMillis();
+ java.util.List targetStates =
+ Arrays.asList(AppUpdateUpdateStatusUpdateState.SUCCEEDED);
+ java.util.List failureStates =
+ Arrays.asList(AppUpdateUpdateStatusUpdateState.FAILED);
+ String statusMessage = "polling...";
+ int attempt = 1;
+ while (System.currentTimeMillis() < deadline) {
+ AppUpdate poll = getUpdate(new GetAppUpdateRequest().setAppName(appName));
+ AppUpdateUpdateStatusUpdateState status = poll.getStatus().getState();
+ statusMessage = String.format("current status: %s", status);
+ if (poll.getStatus() != null) {
+ statusMessage = poll.getStatus().getMessage();
+ }
+ if (targetStates.contains(status)) {
+ return poll;
+ }
+ if (callback != null) {
+ callback.accept(poll);
+ }
+ if (failureStates.contains(status)) {
+ String msg = String.format("failed to reach SUCCEEDED, got %s: %s", status, statusMessage);
+ throw new IllegalStateException(msg);
+ }
+
+ String prefix = String.format("appName=%s", appName);
+ int sleep = attempt;
+ if (sleep > 10) {
+ // sleep 10s max per attempt
+ sleep = 10;
+ }
+ LOG.info("{}: ({}) {} (sleeping ~{}s)", prefix, status, statusMessage, sleep);
+ try {
+ Thread.sleep((long) (sleep * 1000L + Math.random() * 1000));
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new DatabricksException("Current thread was interrupted", e);
+ }
+ attempt++;
+ }
+ throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage));
+ }
+
public AppDeployment waitGetDeploymentAppSucceeded(String appName, String deploymentId)
throws TimeoutException {
return waitGetDeploymentAppSucceeded(appName, deploymentId, Duration.ofMinutes(20), null);
@@ -186,6 +235,17 @@ public Wait create(CreateAppRequest request) {
(timeout, callback) -> waitGetAppActive(response.getName(), timeout, callback), response);
}
+ /**
+ * Creates an app update and starts the update process. The update process is asynchronous and the
+ * status of the update can be checked with the GetAppUpdate method.
+ */
+ public Wait createUpdate(AsyncUpdateAppRequest request) {
+ AppUpdate response = impl.createUpdate(request);
+ return new Wait<>(
+ (timeout, callback) -> waitGetUpdateAppSucceeded(request.getAppName(), timeout, callback),
+ response);
+ }
+
public App delete(String name) {
return delete(new DeleteAppRequest().setName(name));
}
@@ -242,6 +302,15 @@ public AppPermissions getPermissions(GetAppPermissionsRequest request) {
return impl.getPermissions(request);
}
+ public AppUpdate getUpdate(String appName) {
+ return getUpdate(new GetAppUpdateRequest().setAppName(appName));
+ }
+
+ /** Gets the status of an app update. */
+ public AppUpdate getUpdate(GetAppUpdateRequest request) {
+ return impl.getUpdate(request);
+ }
+
/** Lists all apps in the workspace. */
public Iterable list(ListAppsRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java
index 0e6ec9ff6..047d71e54 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java
@@ -30,6 +30,20 @@ public App create(CreateAppRequest request) {
}
}
+ @Override
+ public AppUpdate createUpdate(AsyncUpdateAppRequest request) {
+ String path = String.format("/api/2.0/apps/%s/update", request.getAppName());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, AppUpdate.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public App delete(DeleteAppRequest request) {
String path = String.format("/api/2.0/apps/%s", request.getName());
@@ -112,6 +126,19 @@ public AppPermissions getPermissions(GetAppPermissionsRequest request) {
}
}
+ @Override
+ public AppUpdate getUpdate(GetAppUpdateRequest request) {
+ String path = String.format("/api/2.0/apps/%s/update", request.getAppName());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, AppUpdate.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public ListAppsResponse list(ListAppsRequest request) {
String path = "/api/2.0/apps";
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java
index 9e5b895bf..31742387d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java
@@ -16,6 +16,12 @@ public interface AppsService {
/** Creates a new app. */
App create(CreateAppRequest createAppRequest);
+ /**
+ * Creates an app update and starts the update process. The update process is asynchronous and the
+ * status of the update can be checked with the GetAppUpdate method.
+ */
+ AppUpdate createUpdate(AsyncUpdateAppRequest asyncUpdateAppRequest);
+
/** Deletes an app. */
App delete(DeleteAppRequest deleteAppRequest);
@@ -35,6 +41,9 @@ GetAppPermissionLevelsResponse getPermissionLevels(
/** Gets the permissions of an app. Apps can inherit permissions from their root object. */
AppPermissions getPermissions(GetAppPermissionsRequest getAppPermissionsRequest);
+ /** Gets the status of an app update. */
+ AppUpdate getUpdate(GetAppUpdateRequest getAppUpdateRequest);
+
/** Lists all apps in the workspace. */
ListAppsResponse list(ListAppsRequest listAppsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AsyncUpdateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AsyncUpdateAppRequest.java
new file mode 100755
index 000000000..136f919eb
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AsyncUpdateAppRequest.java
@@ -0,0 +1,84 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.apps;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AsyncUpdateAppRequest {
+ /** */
+ @JsonProperty("app")
+ private App app;
+
+ /** */
+ @JsonIgnore private String appName;
+
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ * A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
+ */
+ @JsonProperty("update_mask")
+ private String updateMask;
+
+ public AsyncUpdateAppRequest setApp(App app) {
+ this.app = app;
+ return this;
+ }
+
+ public App getApp() {
+ return app;
+ }
+
+ public AsyncUpdateAppRequest setAppName(String appName) {
+ this.appName = appName;
+ return this;
+ }
+
+ public String getAppName() {
+ return appName;
+ }
+
+ public AsyncUpdateAppRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AsyncUpdateAppRequest that = (AsyncUpdateAppRequest) o;
+ return Objects.equals(app, that.app)
+ && Objects.equals(appName, that.appName)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(app, appName, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AsyncUpdateAppRequest.class)
+ .add("app", app)
+ .add("appName", appName)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeSize.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeSize.java
new file mode 100755
index 000000000..ff5b63350
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeSize.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.apps;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum ComputeSize {
+ LARGE,
+ LIQUID,
+ MEDIUM,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppUpdateRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppUpdateRequest.java
new file mode 100755
index 000000000..152df04b8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppUpdateRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.apps;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetAppUpdateRequest {
+ /** The name of the app. */
+ @JsonIgnore private String appName;
+
+ public GetAppUpdateRequest setAppName(String appName) {
+ this.appName = appName;
+ return this;
+ }
+
+ public String getAppName() {
+ return appName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetAppUpdateRequest that = (GetAppUpdateRequest) o;
+ return Objects.equals(appName, that.appName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(appName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetAppUpdateRequest.class).add("appName", appName).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateUsagePolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateUsagePolicyRequest.java
new file mode 100755
index 000000000..be656f0d3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateUsagePolicyRequest.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** A request to create a UsagePolicy. */
+@Generated
+public class CreateUsagePolicyRequest {
+ /** The policy to create. `policy_id` needs to be empty as it will be generated */
+ @JsonProperty("policy")
+ private UsagePolicy policy;
+
+ /** A unique identifier for this request. Restricted to 36 ASCII characters. */
+ @JsonProperty("request_id")
+ private String requestId;
+
+ public CreateUsagePolicyRequest setPolicy(UsagePolicy policy) {
+ this.policy = policy;
+ return this;
+ }
+
+ public UsagePolicy getPolicy() {
+ return policy;
+ }
+
+ public CreateUsagePolicyRequest setRequestId(String requestId) {
+ this.requestId = requestId;
+ return this;
+ }
+
+ public String getRequestId() {
+ return requestId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateUsagePolicyRequest that = (CreateUsagePolicyRequest) o;
+ return Objects.equals(policy, that.policy) && Objects.equals(requestId, that.requestId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(policy, requestId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateUsagePolicyRequest.class)
+ .add("policy", policy)
+ .add("requestId", requestId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteUsagePolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteUsagePolicyRequest.java
new file mode 100755
index 000000000..0d52acd6f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteUsagePolicyRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteUsagePolicyRequest {
+ /** The Id of the policy. */
+ @JsonIgnore private String policyId;
+
+ public DeleteUsagePolicyRequest setPolicyId(String policyId) {
+ this.policyId = policyId;
+ return this;
+ }
+
+ public String getPolicyId() {
+ return policyId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteUsagePolicyRequest that = (DeleteUsagePolicyRequest) o;
+ return Objects.equals(policyId, that.policyId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(policyId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteUsagePolicyRequest.class).add("policyId", policyId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetUsagePolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetUsagePolicyRequest.java
new file mode 100755
index 000000000..db044183d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetUsagePolicyRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetUsagePolicyRequest {
+ /** The Id of the policy. */
+ @JsonIgnore private String policyId;
+
+ public GetUsagePolicyRequest setPolicyId(String policyId) {
+ this.policyId = policyId;
+ return this;
+ }
+
+ public String getPolicyId() {
+ return policyId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetUsagePolicyRequest that = (GetUsagePolicyRequest) o;
+ return Objects.equals(policyId, that.policyId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(policyId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetUsagePolicyRequest.class).add("policyId", policyId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesRequest.java
new file mode 100755
index 000000000..81936b444
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesRequest.java
@@ -0,0 +1,94 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListUsagePoliciesRequest {
+ /** A filter to apply to the list of policies. */
+ @JsonIgnore
+ @QueryParam("filter_by")
+ private Filter filterBy;
+
+ /** The maximum number of usage policies to return. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /** A page token, received from a previous `ListUsagePolicies` call. */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ /** The sort specification. */
+ @JsonIgnore
+ @QueryParam("sort_spec")
+ private SortSpec sortSpec;
+
+ public ListUsagePoliciesRequest setFilterBy(Filter filterBy) {
+ this.filterBy = filterBy;
+ return this;
+ }
+
+ public Filter getFilterBy() {
+ return filterBy;
+ }
+
+ public ListUsagePoliciesRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListUsagePoliciesRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public ListUsagePoliciesRequest setSortSpec(SortSpec sortSpec) {
+ this.sortSpec = sortSpec;
+ return this;
+ }
+
+ public SortSpec getSortSpec() {
+ return sortSpec;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListUsagePoliciesRequest that = (ListUsagePoliciesRequest) o;
+ return Objects.equals(filterBy, that.filterBy)
+ && Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(sortSpec, that.sortSpec);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(filterBy, pageSize, pageToken, sortSpec);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListUsagePoliciesRequest.class)
+ .add("filterBy", filterBy)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .add("sortSpec", sortSpec)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesResponse.java
new file mode 100755
index 000000000..6fb7a443c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesResponse.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** A list of usage policies. */
+@Generated
+public class ListUsagePoliciesResponse {
+ /** A token that can be sent as `page_token` to retrieve the next page. */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** */
+ @JsonProperty("policies")
+ private Collection policies;
+
+ /** A token that can be sent as `page_token` to retrieve the previous page. */
+ @JsonProperty("previous_page_token")
+ private String previousPageToken;
+
+ public ListUsagePoliciesResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ public ListUsagePoliciesResponse setPolicies(Collection policies) {
+ this.policies = policies;
+ return this;
+ }
+
+ public Collection getPolicies() {
+ return policies;
+ }
+
+ public ListUsagePoliciesResponse setPreviousPageToken(String previousPageToken) {
+ this.previousPageToken = previousPageToken;
+ return this;
+ }
+
+ public String getPreviousPageToken() {
+ return previousPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListUsagePoliciesResponse that = (ListUsagePoliciesResponse) o;
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(policies, that.policies)
+ && Objects.equals(previousPageToken, that.previousPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nextPageToken, policies, previousPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListUsagePoliciesResponse.class)
+ .add("nextPageToken", nextPageToken)
+ .add("policies", policies)
+ .add("previousPageToken", previousPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java
index baafb73c5..d213220e6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java
@@ -27,6 +27,11 @@ public class UpdateBudgetPolicyRequest {
/** The Id of the policy. This field is generated by Databricks and globally unique. */
@JsonIgnore private String policyId;
+ /** Field mask specifying which fields to update. When not provided, all fields are updated. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
public UpdateBudgetPolicyRequest setLimitConfig(LimitConfig limitConfig) {
this.limitConfig = limitConfig;
return this;
@@ -54,6 +59,15 @@ public String getPolicyId() {
return policyId;
}
+ public UpdateBudgetPolicyRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -61,12 +75,13 @@ public boolean equals(Object o) {
UpdateBudgetPolicyRequest that = (UpdateBudgetPolicyRequest) o;
return Objects.equals(limitConfig, that.limitConfig)
&& Objects.equals(policy, that.policy)
- && Objects.equals(policyId, that.policyId);
+ && Objects.equals(policyId, that.policyId)
+ && Objects.equals(updateMask, that.updateMask);
}
@Override
public int hashCode() {
- return Objects.hash(limitConfig, policy, policyId);
+ return Objects.hash(limitConfig, policy, policyId, updateMask);
}
@Override
@@ -75,6 +90,7 @@ public String toString() {
.add("limitConfig", limitConfig)
.add("policy", policy)
.add("policyId", policyId)
+ .add("updateMask", updateMask)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateUsagePolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateUsagePolicyRequest.java
new file mode 100755
index 000000000..b8a7f1824
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateUsagePolicyRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateUsagePolicyRequest {
+ /** DEPRECATED. This is redundant field as LimitConfig is part of the UsagePolicy */
+ @JsonIgnore
+ @QueryParam("limit_config")
+ private LimitConfig limitConfig;
+
+ /** The policy to update. `creator_user_id` cannot be specified in the request. */
+ @JsonProperty("policy")
+ private UsagePolicy policy;
+
+ /** The Id of the policy. This field is generated by Databricks and globally unique. */
+ @JsonIgnore private String policyId;
+
+ public UpdateUsagePolicyRequest setLimitConfig(LimitConfig limitConfig) {
+ this.limitConfig = limitConfig;
+ return this;
+ }
+
+ public LimitConfig getLimitConfig() {
+ return limitConfig;
+ }
+
+ public UpdateUsagePolicyRequest setPolicy(UsagePolicy policy) {
+ this.policy = policy;
+ return this;
+ }
+
+ public UsagePolicy getPolicy() {
+ return policy;
+ }
+
+ public UpdateUsagePolicyRequest setPolicyId(String policyId) {
+ this.policyId = policyId;
+ return this;
+ }
+
+ public String getPolicyId() {
+ return policyId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateUsagePolicyRequest that = (UpdateUsagePolicyRequest) o;
+ return Objects.equals(limitConfig, that.limitConfig)
+ && Objects.equals(policy, that.policy)
+ && Objects.equals(policyId, that.policyId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(limitConfig, policy, policyId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateUsagePolicyRequest.class)
+ .add("limitConfig", limitConfig)
+ .add("policy", policy)
+ .add("policyId", policyId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicy.java
new file mode 100755
index 000000000..8481284e1
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicy.java
@@ -0,0 +1,92 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Contains the UsagePolicy details (same structure as BudgetPolicy) */
+@Generated
+public class UsagePolicy {
+ /** List of workspaces that this usage policy will be exclusively bound to. */
+ @JsonProperty("binding_workspace_ids")
+ private Collection bindingWorkspaceIds;
+
+ /** A list of tags defined by the customer. At most 20 entries are allowed per policy. */
+ @JsonProperty("custom_tags")
+ private Collection customTags;
+
+ /** The Id of the policy. This field is generated by Databricks and globally unique. */
+ @JsonProperty("policy_id")
+ private String policyId;
+
+ /** The name of the policy. */
+ @JsonProperty("policy_name")
+ private String policyName;
+
+ public UsagePolicy setBindingWorkspaceIds(Collection bindingWorkspaceIds) {
+ this.bindingWorkspaceIds = bindingWorkspaceIds;
+ return this;
+ }
+
+ public Collection getBindingWorkspaceIds() {
+ return bindingWorkspaceIds;
+ }
+
+ public UsagePolicy setCustomTags(
+ Collection customTags) {
+ this.customTags = customTags;
+ return this;
+ }
+
+ public Collection getCustomTags() {
+ return customTags;
+ }
+
+ public UsagePolicy setPolicyId(String policyId) {
+ this.policyId = policyId;
+ return this;
+ }
+
+ public String getPolicyId() {
+ return policyId;
+ }
+
+ public UsagePolicy setPolicyName(String policyName) {
+ this.policyName = policyName;
+ return this;
+ }
+
+ public String getPolicyName() {
+ return policyName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UsagePolicy that = (UsagePolicy) o;
+ return Objects.equals(bindingWorkspaceIds, that.bindingWorkspaceIds)
+ && Objects.equals(customTags, that.customTags)
+ && Objects.equals(policyId, that.policyId)
+ && Objects.equals(policyName, that.policyName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(bindingWorkspaceIds, customTags, policyId, policyName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UsagePolicy.class)
+ .add("bindingWorkspaceIds", bindingWorkspaceIds)
+ .add("customTags", customTags)
+ .add("policyId", policyId)
+ .add("policyName", policyName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyAPI.java
new file mode 100755
index 000000000..b0ec7d9f0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyAPI.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** A service serves REST API about Usage policies */
+@Generated
+public class UsagePolicyAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(UsagePolicyAPI.class);
+
+ private final UsagePolicyService impl;
+
+ /** Regular-use constructor */
+ public UsagePolicyAPI(ApiClient apiClient) {
+ impl = new UsagePolicyImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public UsagePolicyAPI(UsagePolicyService mock) {
+ impl = mock;
+ }
+
+ /** Creates a new usage policy. */
+ public UsagePolicy create(CreateUsagePolicyRequest request) {
+ return impl.create(request);
+ }
+
+ public void delete(String policyId) {
+ delete(new DeleteUsagePolicyRequest().setPolicyId(policyId));
+ }
+
+ /** Deletes a usage policy */
+ public void delete(DeleteUsagePolicyRequest request) {
+ impl.delete(request);
+ }
+
+ public UsagePolicy get(String policyId) {
+ return get(new GetUsagePolicyRequest().setPolicyId(policyId));
+ }
+
+ /** Retrieves a usage policy by it's ID. */
+ public UsagePolicy get(GetUsagePolicyRequest request) {
+ return impl.get(request);
+ }
+
+ /**
+ * Lists all usage policies. Policies are returned in the alphabetically ascending order of their
+ * names.
+ */
+ public Iterable list(ListUsagePoliciesRequest request) {
+ return new Paginator<>(
+ request,
+ impl::list,
+ ListUsagePoliciesResponse::getPolicies,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ /** Updates a usage policy */
+ public UsagePolicy update(UpdateUsagePolicyRequest request) {
+ return impl.update(request);
+ }
+
+ public UsagePolicyService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyImpl.java
new file mode 100755
index 000000000..75072e20c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyImpl.java
@@ -0,0 +1,96 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of UsagePolicy */
+@Generated
+class UsagePolicyImpl implements UsagePolicyService {
+ private final ApiClient apiClient;
+
+ public UsagePolicyImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public UsagePolicy create(CreateUsagePolicyRequest request) {
+ String path =
+ String.format("/api/2.1/accounts/%s/usage-policies", apiClient.configuredAccountID());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, UsagePolicy.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void delete(DeleteUsagePolicyRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/accounts/%s/usage-policies/%s",
+ apiClient.configuredAccountID(), request.getPolicyId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public UsagePolicy get(GetUsagePolicyRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/accounts/%s/usage-policies/%s",
+ apiClient.configuredAccountID(), request.getPolicyId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, UsagePolicy.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListUsagePoliciesResponse list(ListUsagePoliciesRequest request) {
+ String path =
+ String.format("/api/2.1/accounts/%s/usage-policies", apiClient.configuredAccountID());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListUsagePoliciesResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public UsagePolicy update(UpdateUsagePolicyRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/accounts/%s/usage-policies/%s",
+ apiClient.configuredAccountID(), request.getPolicyId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getPolicy()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, UsagePolicy.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyService.java
new file mode 100755
index 000000000..d9edca42f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyService.java
@@ -0,0 +1,32 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * A service serves REST API about Usage policies
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface UsagePolicyService {
+ /** Creates a new usage policy. */
+ UsagePolicy create(CreateUsagePolicyRequest createUsagePolicyRequest);
+
+ /** Deletes a usage policy */
+ void delete(DeleteUsagePolicyRequest deleteUsagePolicyRequest);
+
+ /** Retrieves a usage policy by it's ID. */
+ UsagePolicy get(GetUsagePolicyRequest getUsagePolicyRequest);
+
+ /**
+ * Lists all usage policies. Policies are returned in the alphabetically ascending order of their
+ * names.
+ */
+ ListUsagePoliciesResponse list(ListUsagePoliciesRequest listUsagePoliciesRequest);
+
+ /** Updates a usage policy */
+ UsagePolicy update(UpdateUsagePolicyRequest updateUsagePolicyRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java
index 61feaf2f3..6e1cd7c9e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java
@@ -25,20 +25,22 @@ public AccountMetastoreAssignmentsAPI(AccountMetastoreAssignmentsService mock) {
}
/** Creates an assignment to a metastore for a workspace */
- public void create(AccountsCreateMetastoreAssignment request) {
- impl.create(request);
+ public AccountsCreateMetastoreAssignmentResponse create(
+ AccountsCreateMetastoreAssignment request) {
+ return impl.create(request);
}
- public void delete(long workspaceId, String metastoreId) {
- delete(
+ public AccountsDeleteMetastoreAssignmentResponse delete(long workspaceId, String metastoreId) {
+ return delete(
new DeleteAccountMetastoreAssignmentRequest()
.setWorkspaceId(workspaceId)
.setMetastoreId(metastoreId));
}
/** Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. */
- public void delete(DeleteAccountMetastoreAssignmentRequest request) {
- impl.delete(request);
+ public AccountsDeleteMetastoreAssignmentResponse delete(
+ DeleteAccountMetastoreAssignmentRequest request) {
+ return impl.delete(request);
}
public AccountsMetastoreAssignment get(long workspaceId) {
@@ -47,7 +49,7 @@ public AccountsMetastoreAssignment get(long workspaceId) {
/**
* Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is
- * assigned a metastore, the mappig will be returned. If no metastore is assigned to the
+ * assigned a metastore, the mapping will be returned. If no metastore is assigned to the
* workspace, the assignment will not be found and a 404 returned.
*/
public AccountsMetastoreAssignment get(GetAccountMetastoreAssignmentRequest request) {
@@ -71,8 +73,9 @@ public Iterable list(ListAccountMetastoreAssignmentsRequest request) {
* Updates an assignment to a metastore for a workspace. Currently, only the default catalog may
* be updated.
*/
- public void update(AccountsUpdateMetastoreAssignment request) {
- impl.update(request);
+ public AccountsUpdateMetastoreAssignmentResponse update(
+ AccountsUpdateMetastoreAssignment request) {
+ return impl.update(request);
}
public AccountMetastoreAssignmentsService impl() {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java
index f0418c91e..d9696ba4d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java
@@ -17,7 +17,8 @@ public AccountMetastoreAssignmentsImpl(ApiClient apiClient) {
}
@Override
- public void create(AccountsCreateMetastoreAssignment request) {
+ public AccountsCreateMetastoreAssignmentResponse create(
+ AccountsCreateMetastoreAssignment request) {
String path =
String.format(
"/api/2.0/accounts/%s/workspaces/%s/metastores/%s",
@@ -27,14 +28,15 @@ public void create(AccountsCreateMetastoreAssignment request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, Void.class);
+ return apiClient.execute(req, AccountsCreateMetastoreAssignmentResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public void delete(DeleteAccountMetastoreAssignmentRequest request) {
+ public AccountsDeleteMetastoreAssignmentResponse delete(
+ DeleteAccountMetastoreAssignmentRequest request) {
String path =
String.format(
"/api/2.0/accounts/%s/workspaces/%s/metastores/%s",
@@ -43,7 +45,7 @@ public void delete(DeleteAccountMetastoreAssignmentRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, Void.class);
+ return apiClient.execute(req, AccountsDeleteMetastoreAssignmentResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -83,7 +85,8 @@ public ListAccountMetastoreAssignmentsResponse list(
}
@Override
- public void update(AccountsUpdateMetastoreAssignment request) {
+ public AccountsUpdateMetastoreAssignmentResponse update(
+ AccountsUpdateMetastoreAssignment request) {
String path =
String.format(
"/api/2.0/accounts/%s/workspaces/%s/metastores/%s",
@@ -93,7 +96,7 @@ public void update(AccountsUpdateMetastoreAssignment request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, Void.class);
+ return apiClient.execute(req, AccountsUpdateMetastoreAssignmentResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java
index e1b89bd09..6ad225224 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java
@@ -13,14 +13,16 @@
@Generated
public interface AccountMetastoreAssignmentsService {
/** Creates an assignment to a metastore for a workspace */
- void create(AccountsCreateMetastoreAssignment accountsCreateMetastoreAssignment);
+ AccountsCreateMetastoreAssignmentResponse create(
+ AccountsCreateMetastoreAssignment accountsCreateMetastoreAssignment);
/** Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. */
- void delete(DeleteAccountMetastoreAssignmentRequest deleteAccountMetastoreAssignmentRequest);
+ AccountsDeleteMetastoreAssignmentResponse delete(
+ DeleteAccountMetastoreAssignmentRequest deleteAccountMetastoreAssignmentRequest);
/**
* Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is
- * assigned a metastore, the mappig will be returned. If no metastore is assigned to the
+ * assigned a metastore, the mapping will be returned. If no metastore is assigned to the
* workspace, the assignment will not be found and a 404 returned.
*/
AccountsMetastoreAssignment get(
@@ -34,5 +36,6 @@ ListAccountMetastoreAssignmentsResponse list(
* Updates an assignment to a metastore for a workspace. Currently, only the default catalog may
* be updated.
*/
- void update(AccountsUpdateMetastoreAssignment accountsUpdateMetastoreAssignment);
+ AccountsUpdateMetastoreAssignmentResponse update(
+ AccountsUpdateMetastoreAssignment accountsUpdateMetastoreAssignment);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java
index 601b17d68..52fd325f0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java
@@ -28,36 +28,39 @@ public AccountMetastoresAPI(AccountMetastoresService mock) {
}
/** Creates a Unity Catalog metastore. */
- public AccountsMetastoreInfo create(AccountsCreateMetastore request) {
+ public AccountsCreateMetastoreResponse create(AccountsCreateMetastore request) {
return impl.create(request);
}
- public void delete(String metastoreId) {
- delete(new DeleteAccountMetastoreRequest().setMetastoreId(metastoreId));
+ public AccountsDeleteMetastoreResponse delete(String metastoreId) {
+ return delete(new DeleteAccountMetastoreRequest().setMetastoreId(metastoreId));
}
/** Deletes a Unity Catalog metastore for an account, both specified by ID. */
- public void delete(DeleteAccountMetastoreRequest request) {
- impl.delete(request);
+ public AccountsDeleteMetastoreResponse delete(DeleteAccountMetastoreRequest request) {
+ return impl.delete(request);
}
- public AccountsMetastoreInfo get(String metastoreId) {
+ public AccountsGetMetastoreResponse get(String metastoreId) {
return get(new GetAccountMetastoreRequest().setMetastoreId(metastoreId));
}
/** Gets a Unity Catalog metastore from an account, both specified by ID. */
- public AccountsMetastoreInfo get(GetAccountMetastoreRequest request) {
+ public AccountsGetMetastoreResponse get(GetAccountMetastoreRequest request) {
return impl.get(request);
}
/** Gets all Unity Catalog metastores associated with an account specified by ID. */
public Iterable list() {
return new Paginator<>(
- null, (Void v) -> impl.list(), ListMetastoresResponse::getMetastores, response -> null);
+ null,
+ (Void v) -> impl.list(),
+ AccountsListMetastoresResponse::getMetastores,
+ response -> null);
}
/** Updates an existing Unity Catalog metastore. */
- public AccountsMetastoreInfo update(AccountsUpdateMetastore request) {
+ public AccountsUpdateMetastoreResponse update(AccountsUpdateMetastore request) {
return impl.update(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java
index 56328c4f0..e983809d0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java
@@ -17,21 +17,21 @@ public AccountMetastoresImpl(ApiClient apiClient) {
}
@Override
- public AccountsMetastoreInfo create(AccountsCreateMetastore request) {
+ public AccountsCreateMetastoreResponse create(AccountsCreateMetastore request) {
String path = String.format("/api/2.0/accounts/%s/metastores", apiClient.configuredAccountID());
try {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- return apiClient.execute(req, AccountsMetastoreInfo.class);
+ return apiClient.execute(req, AccountsCreateMetastoreResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public void delete(DeleteAccountMetastoreRequest request) {
+ public AccountsDeleteMetastoreResponse delete(DeleteAccountMetastoreRequest request) {
String path =
String.format(
"/api/2.0/accounts/%s/metastores/%s",
@@ -40,14 +40,14 @@ public void delete(DeleteAccountMetastoreRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, Void.class);
+ return apiClient.execute(req, AccountsDeleteMetastoreResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public AccountsMetastoreInfo get(GetAccountMetastoreRequest request) {
+ public AccountsGetMetastoreResponse get(GetAccountMetastoreRequest request) {
String path =
String.format(
"/api/2.0/accounts/%s/metastores/%s",
@@ -56,26 +56,26 @@ public AccountsMetastoreInfo get(GetAccountMetastoreRequest request) {
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- return apiClient.execute(req, AccountsMetastoreInfo.class);
+ return apiClient.execute(req, AccountsGetMetastoreResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public ListMetastoresResponse list() {
+ public AccountsListMetastoresResponse list() {
String path = String.format("/api/2.0/accounts/%s/metastores", apiClient.configuredAccountID());
try {
Request req = new Request("GET", path);
req.withHeader("Accept", "application/json");
- return apiClient.execute(req, ListMetastoresResponse.class);
+ return apiClient.execute(req, AccountsListMetastoresResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public AccountsMetastoreInfo update(AccountsUpdateMetastore request) {
+ public AccountsUpdateMetastoreResponse update(AccountsUpdateMetastore request) {
String path =
String.format(
"/api/2.0/accounts/%s/metastores/%s",
@@ -85,7 +85,7 @@ public AccountsMetastoreInfo update(AccountsUpdateMetastore request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- return apiClient.execute(req, AccountsMetastoreInfo.class);
+ return apiClient.execute(req, AccountsUpdateMetastoreResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresService.java
index 07e71499f..1ec720114 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresService.java
@@ -14,17 +14,18 @@
@Generated
public interface AccountMetastoresService {
/** Creates a Unity Catalog metastore. */
- AccountsMetastoreInfo create(AccountsCreateMetastore accountsCreateMetastore);
+ AccountsCreateMetastoreResponse create(AccountsCreateMetastore accountsCreateMetastore);
/** Deletes a Unity Catalog metastore for an account, both specified by ID. */
- void delete(DeleteAccountMetastoreRequest deleteAccountMetastoreRequest);
+ AccountsDeleteMetastoreResponse delete(
+ DeleteAccountMetastoreRequest deleteAccountMetastoreRequest);
/** Gets a Unity Catalog metastore from an account, both specified by ID. */
- AccountsMetastoreInfo get(GetAccountMetastoreRequest getAccountMetastoreRequest);
+ AccountsGetMetastoreResponse get(GetAccountMetastoreRequest getAccountMetastoreRequest);
/** Gets all Unity Catalog metastores associated with an account specified by ID. */
- ListMetastoresResponse list();
+ AccountsListMetastoresResponse list();
/** Updates an existing Unity Catalog metastore. */
- AccountsMetastoreInfo update(AccountsUpdateMetastore accountsUpdateMetastore);
+ AccountsUpdateMetastoreResponse update(AccountsUpdateMetastore accountsUpdateMetastore);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java
index b6ff09529..254dc846d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java
@@ -25,20 +25,20 @@ public AccountStorageCredentialsAPI(AccountStorageCredentialsService mock) {
}
/**
- * Creates a new storage credential. The request object is specific to the cloud:
+ * Creates a new storage credential. The request object is specific to the cloud: - **AwsIamRole**
+ * for AWS credentials - **AzureServicePrincipal** for Azure credentials -
+ * **GcpServiceAccountKey** for GCP credentials
*
- * * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials *
- * **GcpServiceAcountKey** for GCP credentials.
- *
- *
The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on
+ *
The caller must be a metastore admin and have the `CREATE_STORAGE_CREDENTIAL` privilege on
* the metastore.
*/
- public AccountsStorageCredentialInfo create(AccountsCreateStorageCredential request) {
+ public AccountsCreateStorageCredentialInfo create(AccountsCreateStorageCredential request) {
return impl.create(request);
}
- public void delete(String metastoreId, String storageCredentialName) {
- delete(
+ public AccountsDeleteStorageCredentialResponse delete(
+ String metastoreId, String storageCredentialName) {
+ return delete(
new DeleteAccountStorageCredentialRequest()
.setMetastoreId(metastoreId)
.setStorageCredentialName(storageCredentialName));
@@ -48,8 +48,9 @@ public void delete(String metastoreId, String storageCredentialName) {
* Deletes a storage credential from the metastore. The caller must be an owner of the storage
* credential.
*/
- public void delete(DeleteAccountStorageCredentialRequest request) {
- impl.delete(request);
+ public AccountsDeleteStorageCredentialResponse delete(
+ DeleteAccountStorageCredentialRequest request) {
+ return impl.delete(request);
}
public AccountsStorageCredentialInfo get(String metastoreId, String storageCredentialName) {
@@ -82,9 +83,9 @@ public Iterable list(ListAccountStorageCredentialsRequest
/**
* Updates a storage credential on the metastore. The caller must be the owner of the storage
- * credential. If the caller is a metastore admin, only the __owner__ credential can be changed.
+ * credential. If the caller is a metastore admin, only the **owner** credential can be changed.
*/
- public AccountsStorageCredentialInfo update(AccountsUpdateStorageCredential request) {
+ public AccountsUpdateStorageCredentialResponse update(AccountsUpdateStorageCredential request) {
return impl.update(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java
index 5eb10df59..26ed48604 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java
@@ -17,7 +17,7 @@ public AccountStorageCredentialsImpl(ApiClient apiClient) {
}
@Override
- public AccountsStorageCredentialInfo create(AccountsCreateStorageCredential request) {
+ public AccountsCreateStorageCredentialInfo create(AccountsCreateStorageCredential request) {
String path =
String.format(
"/api/2.0/accounts/%s/metastores/%s/storage-credentials",
@@ -27,14 +27,15 @@ public AccountsStorageCredentialInfo create(AccountsCreateStorageCredential requ
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- return apiClient.execute(req, AccountsStorageCredentialInfo.class);
+ return apiClient.execute(req, AccountsCreateStorageCredentialInfo.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public void delete(DeleteAccountStorageCredentialRequest request) {
+ public AccountsDeleteStorageCredentialResponse delete(
+ DeleteAccountStorageCredentialRequest request) {
String path =
String.format(
"/api/2.0/accounts/%s/metastores/%s/storage-credentials/%s",
@@ -45,7 +46,7 @@ public void delete(DeleteAccountStorageCredentialRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, Void.class);
+ return apiClient.execute(req, AccountsDeleteStorageCredentialResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -86,7 +87,7 @@ public ListAccountStorageCredentialsResponse list(ListAccountStorageCredentialsR
}
@Override
- public AccountsStorageCredentialInfo update(AccountsUpdateStorageCredential request) {
+ public AccountsUpdateStorageCredentialResponse update(AccountsUpdateStorageCredential request) {
String path =
String.format(
"/api/2.0/accounts/%s/metastores/%s/storage-credentials/%s",
@@ -98,7 +99,7 @@ public AccountsStorageCredentialInfo update(AccountsUpdateStorageCredential requ
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- return apiClient.execute(req, AccountsStorageCredentialInfo.class);
+ return apiClient.execute(req, AccountsUpdateStorageCredentialResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java
index 2cceee11c..5c537dacb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java
@@ -13,22 +13,22 @@
@Generated
public interface AccountStorageCredentialsService {
/**
- * Creates a new storage credential. The request object is specific to the cloud:
+ * Creates a new storage credential. The request object is specific to the cloud: - **AwsIamRole**
+ * for AWS credentials - **AzureServicePrincipal** for Azure credentials -
+ * **GcpServiceAccountKey** for GCP credentials
*
- * * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials *
- * **GcpServiceAcountKey** for GCP credentials.
- *
- *
The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on
+ *
The caller must be a metastore admin and have the `CREATE_STORAGE_CREDENTIAL` privilege on
* the metastore.
*/
- AccountsStorageCredentialInfo create(
+ AccountsCreateStorageCredentialInfo create(
AccountsCreateStorageCredential accountsCreateStorageCredential);
/**
* Deletes a storage credential from the metastore. The caller must be an owner of the storage
* credential.
*/
- void delete(DeleteAccountStorageCredentialRequest deleteAccountStorageCredentialRequest);
+ AccountsDeleteStorageCredentialResponse delete(
+ DeleteAccountStorageCredentialRequest deleteAccountStorageCredentialRequest);
/**
* Gets a storage credential from the metastore. The caller must be a metastore admin, the owner
@@ -43,8 +43,8 @@ ListAccountStorageCredentialsResponse list(
/**
* Updates a storage credential on the metastore. The caller must be the owner of the storage
- * credential. If the caller is a metastore admin, only the __owner__ credential can be changed.
+ * credential. If the caller is a metastore admin, only the **owner** credential can be changed.
*/
- AccountsStorageCredentialInfo update(
+ AccountsUpdateStorageCredentialResponse update(
AccountsUpdateStorageCredential accountsUpdateStorageCredential);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java
index b297cbf98..e86dbfa1e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java
@@ -7,18 +7,19 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Properties of the new metastore. */
@Generated
public class AccountsCreateMetastore {
/** */
@JsonProperty("metastore_info")
- private CreateMetastore metastoreInfo;
+ private CreateAccountsMetastore metastoreInfo;
- public AccountsCreateMetastore setMetastoreInfo(CreateMetastore metastoreInfo) {
+ public AccountsCreateMetastore setMetastoreInfo(CreateAccountsMetastore metastoreInfo) {
this.metastoreInfo = metastoreInfo;
return this;
}
- public CreateMetastore getMetastoreInfo() {
+ public CreateAccountsMetastore getMetastoreInfo() {
return metastoreInfo;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java
index fa3e7a1e7..5ce5863fe 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java
@@ -8,6 +8,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** The mapping from workspace to metastore. */
@Generated
public class AccountsCreateMetastoreAssignment {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignmentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignmentResponse.java
new file mode 100755
index 000000000..d8b003eaf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignmentResponse.java
@@ -0,0 +1,29 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** The metastore assignment was successfully created. */
+@Generated
+public class AccountsCreateMetastoreAssignmentResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AccountsCreateMetastoreAssignmentResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreResponse.java
similarity index 76%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfo.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreResponse.java
index 249aeb544..b6848ebe1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreResponse.java
@@ -8,12 +8,12 @@
import java.util.Objects;
@Generated
-public class AccountsMetastoreInfo {
+public class AccountsCreateMetastoreResponse {
/** */
@JsonProperty("metastore_info")
private MetastoreInfo metastoreInfo;
- public AccountsMetastoreInfo setMetastoreInfo(MetastoreInfo metastoreInfo) {
+ public AccountsCreateMetastoreResponse setMetastoreInfo(MetastoreInfo metastoreInfo) {
this.metastoreInfo = metastoreInfo;
return this;
}
@@ -26,7 +26,7 @@ public MetastoreInfo getMetastoreInfo() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- AccountsMetastoreInfo that = (AccountsMetastoreInfo) o;
+ AccountsCreateMetastoreResponse that = (AccountsCreateMetastoreResponse) o;
return Objects.equals(metastoreInfo, that.metastoreInfo);
}
@@ -37,7 +37,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(AccountsMetastoreInfo.class)
+ return new ToStringer(AccountsCreateMetastoreResponse.class)
.add("metastoreInfo", metastoreInfo)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java
index c1c33ea9f..a19caa490 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java
@@ -12,17 +12,25 @@
public class AccountsCreateStorageCredential {
/** */
@JsonProperty("credential_info")
- private CreateStorageCredential credentialInfo;
+ private CreateAccountsStorageCredential credentialInfo;
/** Unity Catalog metastore ID */
@JsonIgnore private String metastoreId;
- public AccountsCreateStorageCredential setCredentialInfo(CreateStorageCredential credentialInfo) {
+ /**
+ * Optional, default false. Supplying true to this argument skips validation of the created set of
+ * credentials.
+ */
+ @JsonProperty("skip_validation")
+ private Boolean skipValidation;
+
+ public AccountsCreateStorageCredential setCredentialInfo(
+ CreateAccountsStorageCredential credentialInfo) {
this.credentialInfo = credentialInfo;
return this;
}
- public CreateStorageCredential getCredentialInfo() {
+ public CreateAccountsStorageCredential getCredentialInfo() {
return credentialInfo;
}
@@ -35,18 +43,28 @@ public String getMetastoreId() {
return metastoreId;
}
+ public AccountsCreateStorageCredential setSkipValidation(Boolean skipValidation) {
+ this.skipValidation = skipValidation;
+ return this;
+ }
+
+ public Boolean getSkipValidation() {
+ return skipValidation;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AccountsCreateStorageCredential that = (AccountsCreateStorageCredential) o;
return Objects.equals(credentialInfo, that.credentialInfo)
- && Objects.equals(metastoreId, that.metastoreId);
+ && Objects.equals(metastoreId, that.metastoreId)
+ && Objects.equals(skipValidation, that.skipValidation);
}
@Override
public int hashCode() {
- return Objects.hash(credentialInfo, metastoreId);
+ return Objects.hash(credentialInfo, metastoreId, skipValidation);
}
@Override
@@ -54,6 +72,7 @@ public String toString() {
return new ToStringer(AccountsCreateStorageCredential.class)
.add("credentialInfo", credentialInfo)
.add("metastoreId", metastoreId)
+ .add("skipValidation", skipValidation)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredentialInfo.java
new file mode 100755
index 000000000..420d976e3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredentialInfo.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AccountsCreateStorageCredentialInfo {
+ /** */
+ @JsonProperty("credential_info")
+ private StorageCredentialInfo credentialInfo;
+
+ public AccountsCreateStorageCredentialInfo setCredentialInfo(
+ StorageCredentialInfo credentialInfo) {
+ this.credentialInfo = credentialInfo;
+ return this;
+ }
+
+ public StorageCredentialInfo getCredentialInfo() {
+ return credentialInfo;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AccountsCreateStorageCredentialInfo that = (AccountsCreateStorageCredentialInfo) o;
+ return Objects.equals(credentialInfo, that.credentialInfo);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(credentialInfo);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AccountsCreateStorageCredentialInfo.class)
+ .add("credentialInfo", credentialInfo)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreAssignmentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreAssignmentResponse.java
new file mode 100755
index 000000000..bb80b9155
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreAssignmentResponse.java
@@ -0,0 +1,29 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** The metastore assignment was successfully deleted. */
+@Generated
+public class AccountsDeleteMetastoreAssignmentResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AccountsDeleteMetastoreAssignmentResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreResponse.java
new file mode 100755
index 000000000..c764feedf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreResponse.java
@@ -0,0 +1,29 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** The metastore was successfully deleted. */
+@Generated
+public class AccountsDeleteMetastoreResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AccountsDeleteMetastoreResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteStorageCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteStorageCredentialResponse.java
new file mode 100755
index 000000000..125aa3bdf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteStorageCredentialResponse.java
@@ -0,0 +1,29 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** The storage credential was successfully deleted. */
+@Generated
+public class AccountsDeleteStorageCredentialResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AccountsDeleteStorageCredentialResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsGetMetastoreResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsGetMetastoreResponse.java
new file mode 100755
index 000000000..2da0eb3f3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsGetMetastoreResponse.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** The metastore was successfully returned. */
+@Generated
+public class AccountsGetMetastoreResponse {
+ /** */
+ @JsonProperty("metastore_info")
+ private MetastoreInfo metastoreInfo;
+
+ public AccountsGetMetastoreResponse setMetastoreInfo(MetastoreInfo metastoreInfo) {
+ this.metastoreInfo = metastoreInfo;
+ return this;
+ }
+
+ public MetastoreInfo getMetastoreInfo() {
+ return metastoreInfo;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AccountsGetMetastoreResponse that = (AccountsGetMetastoreResponse) o;
+ return Objects.equals(metastoreInfo, that.metastoreInfo);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(metastoreInfo);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AccountsGetMetastoreResponse.class)
+ .add("metastoreInfo", metastoreInfo)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsListMetastoresResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsListMetastoresResponse.java
new file mode 100755
index 000000000..95620fe4d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsListMetastoresResponse.java
@@ -0,0 +1,46 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Metastores were returned successfully. */
+@Generated
+public class AccountsListMetastoresResponse {
+ /** An array of metastore information objects. */
+ @JsonProperty("metastores")
+ private Collection metastores;
+
+ public AccountsListMetastoresResponse setMetastores(Collection metastores) {
+ this.metastores = metastores;
+ return this;
+ }
+
+ public Collection getMetastores() {
+ return metastores;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AccountsListMetastoresResponse that = (AccountsListMetastoresResponse) o;
+ return Objects.equals(metastores, that.metastores);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(metastores);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AccountsListMetastoresResponse.class)
+ .add("metastores", metastores)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java
index bf989d674..fbe83d2bd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java
@@ -7,6 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** The workspace metastore assignment was successfully returned. */
@Generated
public class AccountsMetastoreAssignment {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java
index 696342a98..f3e5074e9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java
@@ -7,6 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** The storage credential was successfully retrieved. */
@Generated
public class AccountsStorageCredentialInfo {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java
index 044d8c6f2..74a421445 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java
@@ -8,14 +8,15 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Properties of the metastore to change. */
@Generated
public class AccountsUpdateMetastore {
/** Unity Catalog metastore ID */
@JsonIgnore private String metastoreId;
- /** */
+ /** Properties of the metastore to change. */
@JsonProperty("metastore_info")
- private UpdateMetastore metastoreInfo;
+ private UpdateAccountsMetastore metastoreInfo;
public AccountsUpdateMetastore setMetastoreId(String metastoreId) {
this.metastoreId = metastoreId;
@@ -26,12 +27,12 @@ public String getMetastoreId() {
return metastoreId;
}
- public AccountsUpdateMetastore setMetastoreInfo(UpdateMetastore metastoreInfo) {
+ public AccountsUpdateMetastore setMetastoreInfo(UpdateAccountsMetastore metastoreInfo) {
this.metastoreInfo = metastoreInfo;
return this;
}
- public UpdateMetastore getMetastoreInfo() {
+ public UpdateAccountsMetastore getMetastoreInfo() {
return metastoreInfo;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java
index 3ce7c6f48..a50a5eab0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java
@@ -8,6 +8,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** The metastore assignment to update. */
@Generated
public class AccountsUpdateMetastoreAssignment {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignmentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignmentResponse.java
new file mode 100755
index 000000000..648dbfa47
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignmentResponse.java
@@ -0,0 +1,29 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** The metastore assignment was successfully updated. */
+@Generated
+public class AccountsUpdateMetastoreAssignmentResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AccountsUpdateMetastoreAssignmentResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreResponse.java
new file mode 100755
index 000000000..4a4d5d560
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreResponse.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** The metastore update request succeeded. */
+@Generated
+public class AccountsUpdateMetastoreResponse {
+ /** */
+ @JsonProperty("metastore_info")
+ private MetastoreInfo metastoreInfo;
+
+ public AccountsUpdateMetastoreResponse setMetastoreInfo(MetastoreInfo metastoreInfo) {
+ this.metastoreInfo = metastoreInfo;
+ return this;
+ }
+
+ public MetastoreInfo getMetastoreInfo() {
+ return metastoreInfo;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AccountsUpdateMetastoreResponse that = (AccountsUpdateMetastoreResponse) o;
+ return Objects.equals(metastoreInfo, that.metastoreInfo);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(metastoreInfo);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AccountsUpdateMetastoreResponse.class)
+ .add("metastoreInfo", metastoreInfo)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java
index bca8ab349..f654e4d3b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java
@@ -8,24 +8,32 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** The storage credential to update. */
@Generated
public class AccountsUpdateStorageCredential {
/** */
@JsonProperty("credential_info")
- private UpdateStorageCredential credentialInfo;
+ private UpdateAccountsStorageCredential credentialInfo;
/** Unity Catalog metastore ID */
@JsonIgnore private String metastoreId;
+ /**
+ * Optional. Supplying true to this argument skips validation of the updated set of credentials.
+ */
+ @JsonProperty("skip_validation")
+ private Boolean skipValidation;
+
/** Name of the storage credential. */
@JsonIgnore private String storageCredentialName;
- public AccountsUpdateStorageCredential setCredentialInfo(UpdateStorageCredential credentialInfo) {
+ public AccountsUpdateStorageCredential setCredentialInfo(
+ UpdateAccountsStorageCredential credentialInfo) {
this.credentialInfo = credentialInfo;
return this;
}
- public UpdateStorageCredential getCredentialInfo() {
+ public UpdateAccountsStorageCredential getCredentialInfo() {
return credentialInfo;
}
@@ -38,6 +46,15 @@ public String getMetastoreId() {
return metastoreId;
}
+ public AccountsUpdateStorageCredential setSkipValidation(Boolean skipValidation) {
+ this.skipValidation = skipValidation;
+ return this;
+ }
+
+ public Boolean getSkipValidation() {
+ return skipValidation;
+ }
+
public AccountsUpdateStorageCredential setStorageCredentialName(String storageCredentialName) {
this.storageCredentialName = storageCredentialName;
return this;
@@ -54,12 +71,13 @@ public boolean equals(Object o) {
AccountsUpdateStorageCredential that = (AccountsUpdateStorageCredential) o;
return Objects.equals(credentialInfo, that.credentialInfo)
&& Objects.equals(metastoreId, that.metastoreId)
+ && Objects.equals(skipValidation, that.skipValidation)
&& Objects.equals(storageCredentialName, that.storageCredentialName);
}
@Override
public int hashCode() {
- return Objects.hash(credentialInfo, metastoreId, storageCredentialName);
+ return Objects.hash(credentialInfo, metastoreId, skipValidation, storageCredentialName);
}
@Override
@@ -67,6 +85,7 @@ public String toString() {
return new ToStringer(AccountsUpdateStorageCredential.class)
.add("credentialInfo", credentialInfo)
.add("metastoreId", metastoreId)
+ .add("skipValidation", skipValidation)
.add("storageCredentialName", storageCredentialName)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredentialResponse.java
new file mode 100755
index 000000000..0f1796167
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredentialResponse.java
@@ -0,0 +1,46 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** The storage credential was successfully updated. */
+@Generated
+public class AccountsUpdateStorageCredentialResponse {
+ /** */
+ @JsonProperty("credential_info")
+ private StorageCredentialInfo credentialInfo;
+
+ public AccountsUpdateStorageCredentialResponse setCredentialInfo(
+ StorageCredentialInfo credentialInfo) {
+ this.credentialInfo = credentialInfo;
+ return this;
+ }
+
+ public StorageCredentialInfo getCredentialInfo() {
+ return credentialInfo;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AccountsUpdateStorageCredentialResponse that = (AccountsUpdateStorageCredentialResponse) o;
+ return Objects.equals(credentialInfo, that.credentialInfo);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(credentialInfo);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AccountsUpdateStorageCredentialResponse.class)
+ .add("credentialInfo", credentialInfo)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java
index 170f10432..e400d5bdb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java
@@ -29,6 +29,10 @@ public class CatalogInfo {
@JsonProperty("connection_name")
private String connectionName;
+ /** Status of conversion of FOREIGN catalog to UC Native catalog. */
+ @JsonProperty("conversion_info")
+ private ConversionInfo conversionInfo;
+
/** Time at which this catalog was created, in epoch milliseconds. */
@JsonProperty("created_at")
private Long createdAt;
@@ -37,6 +41,10 @@ public class CatalogInfo {
@JsonProperty("created_by")
private String createdBy;
+ /** Disaster Recovery replication state snapshot. */
+ @JsonProperty("dr_replication_info")
+ private DrReplicationInfo drReplicationInfo;
+
/** */
@JsonProperty("effective_predictive_optimization_flag")
private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag;
@@ -149,6 +157,15 @@ public String getConnectionName() {
return connectionName;
}
+ public CatalogInfo setConversionInfo(ConversionInfo conversionInfo) {
+ this.conversionInfo = conversionInfo;
+ return this;
+ }
+
+ public ConversionInfo getConversionInfo() {
+ return conversionInfo;
+ }
+
public CatalogInfo setCreatedAt(Long createdAt) {
this.createdAt = createdAt;
return this;
@@ -167,6 +184,15 @@ public String getCreatedBy() {
return createdBy;
}
+ public CatalogInfo setDrReplicationInfo(DrReplicationInfo drReplicationInfo) {
+ this.drReplicationInfo = drReplicationInfo;
+ return this;
+ }
+
+ public DrReplicationInfo getDrReplicationInfo() {
+ return drReplicationInfo;
+ }
+
public CatalogInfo setEffectivePredictiveOptimizationFlag(
EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag) {
this.effectivePredictiveOptimizationFlag = effectivePredictiveOptimizationFlag;
@@ -331,8 +357,10 @@ public boolean equals(Object o) {
&& Objects.equals(catalogType, that.catalogType)
&& Objects.equals(comment, that.comment)
&& Objects.equals(connectionName, that.connectionName)
+ && Objects.equals(conversionInfo, that.conversionInfo)
&& Objects.equals(createdAt, that.createdAt)
&& Objects.equals(createdBy, that.createdBy)
+ && Objects.equals(drReplicationInfo, that.drReplicationInfo)
&& Objects.equals(
effectivePredictiveOptimizationFlag, that.effectivePredictiveOptimizationFlag)
&& Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization)
@@ -360,8 +388,10 @@ public int hashCode() {
catalogType,
comment,
connectionName,
+ conversionInfo,
createdAt,
createdBy,
+ drReplicationInfo,
effectivePredictiveOptimizationFlag,
enablePredictiveOptimization,
fullName,
@@ -388,8 +418,10 @@ public String toString() {
.add("catalogType", catalogType)
.add("comment", comment)
.add("connectionName", connectionName)
+ .add("conversionInfo", conversionInfo)
.add("createdAt", createdAt)
.add("createdBy", createdBy)
+ .add("drReplicationInfo", drReplicationInfo)
.add("effectivePredictiveOptimizationFlag", effectivePredictiveOptimizationFlag)
.add("enablePredictiveOptimization", enablePredictiveOptimization)
.add("fullName", fullName)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
index c5852d711..a9bd905b4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
@@ -69,6 +69,14 @@ public CatalogInfo get(GetCatalogRequest request) {
* will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has
* the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering
* of the elements in the array.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
public Iterable list(ListCatalogsRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java
index 870e321f2..26c5dc9fa 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java
@@ -41,6 +41,14 @@ public interface CatalogsService {
* will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has
* the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering
* of the elements in the array.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
ListCatalogsResponse list(ListCatalogsRequest listCatalogsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java
index 948ca6bb2..a60688b06 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java
@@ -35,6 +35,10 @@ public class ConnectionInfo {
@JsonProperty("credential_type")
private CredentialType credentialType;
+ /** [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. */
+ @JsonProperty("environment_settings")
+ private EnvironmentSettings environmentSettings;
+
/** Full name of connection. */
@JsonProperty("full_name")
private String fullName;
@@ -137,6 +141,15 @@ public CredentialType getCredentialType() {
return credentialType;
}
+ public ConnectionInfo setEnvironmentSettings(EnvironmentSettings environmentSettings) {
+ this.environmentSettings = environmentSettings;
+ return this;
+ }
+
+ public EnvironmentSettings getEnvironmentSettings() {
+ return environmentSettings;
+ }
+
public ConnectionInfo setFullName(String fullName) {
this.fullName = fullName;
return this;
@@ -256,6 +269,7 @@ public boolean equals(Object o) {
&& Objects.equals(createdAt, that.createdAt)
&& Objects.equals(createdBy, that.createdBy)
&& Objects.equals(credentialType, that.credentialType)
+ && Objects.equals(environmentSettings, that.environmentSettings)
&& Objects.equals(fullName, that.fullName)
&& Objects.equals(metastoreId, that.metastoreId)
&& Objects.equals(name, that.name)
@@ -279,6 +293,7 @@ public int hashCode() {
createdAt,
createdBy,
credentialType,
+ environmentSettings,
fullName,
metastoreId,
name,
@@ -302,6 +317,7 @@ public String toString() {
.add("createdAt", createdAt)
.add("createdBy", createdBy)
.add("credentialType", credentialType)
+ .add("environmentSettings", environmentSettings)
.add("fullName", fullName)
.add("metastoreId", metastoreId)
.add("name", name)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
index f2bdbb8e4..3386cd766 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
@@ -62,7 +62,17 @@ public ConnectionInfo get(GetConnectionRequest request) {
return impl.get(request);
}
- /** List all connections. */
+ /**
+ * List all connections.
+ *
+ *
NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
+ */
public Iterable list(ListConnectionsRequest request) {
return new Paginator<>(
request,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java
index 45bea7e36..6800d3aef 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java
@@ -34,7 +34,17 @@ public interface ConnectionsService {
/** Gets a connection from it's name. */
ConnectionInfo get(GetConnectionRequest getConnectionRequest);
- /** List all connections. */
+ /**
+ * List all connections.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
+ */
ListConnectionsResponse list(ListConnectionsRequest listConnectionsRequest);
/** Updates the connection that matches the supplied name. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfo.java
new file mode 100755
index 000000000..da4dac820
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfo.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Status of conversion of FOREIGN entity into UC Native entity. */
+@Generated
+public class ConversionInfo {
+ /** The conversion state of the resource. */
+ @JsonProperty("state")
+ private ConversionInfoState state;
+
+ public ConversionInfo setState(ConversionInfoState state) {
+ this.state = state;
+ return this;
+ }
+
+ public ConversionInfoState getState() {
+ return state;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ConversionInfo that = (ConversionInfo) o;
+ return Objects.equals(state, that.state);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(state);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ConversionInfo.class).add("state", state).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfoState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfoState.java
new file mode 100755
index 000000000..0b3566efa
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfoState.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum ConversionInfoState {
+ COMPLETED,
+ IN_PROGRESS,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsMetastore.java
new file mode 100755
index 000000000..2d82924c0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsMetastore.java
@@ -0,0 +1,74 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateAccountsMetastore {
+ /** The user-specified name of the metastore. */
+ @JsonProperty("name")
+ private String name;
+
+ /** Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). */
+ @JsonProperty("region")
+ private String region;
+
+ /** The storage root URL for metastore */
+ @JsonProperty("storage_root")
+ private String storageRoot;
+
+ public CreateAccountsMetastore setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public CreateAccountsMetastore setRegion(String region) {
+ this.region = region;
+ return this;
+ }
+
+ public String getRegion() {
+ return region;
+ }
+
+ public CreateAccountsMetastore setStorageRoot(String storageRoot) {
+ this.storageRoot = storageRoot;
+ return this;
+ }
+
+ public String getStorageRoot() {
+ return storageRoot;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateAccountsMetastore that = (CreateAccountsMetastore) o;
+ return Objects.equals(name, that.name)
+ && Objects.equals(region, that.region)
+ && Objects.equals(storageRoot, that.storageRoot);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, region, storageRoot);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateAccountsMetastore.class)
+ .add("name", name)
+ .add("region", region)
+ .add("storageRoot", storageRoot)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsStorageCredential.java
new file mode 100755
index 000000000..8e636a900
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsStorageCredential.java
@@ -0,0 +1,167 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateAccountsStorageCredential {
+ /** The AWS IAM role configuration. */
+ @JsonProperty("aws_iam_role")
+ private AwsIamRoleRequest awsIamRole;
+
+ /** The Azure managed identity configuration. */
+ @JsonProperty("azure_managed_identity")
+ private AzureManagedIdentityRequest azureManagedIdentity;
+
+ /** The Azure service principal configuration. */
+ @JsonProperty("azure_service_principal")
+ private AzureServicePrincipal azureServicePrincipal;
+
+ /** The Cloudflare API token configuration. */
+ @JsonProperty("cloudflare_api_token")
+ private CloudflareApiToken cloudflareApiToken;
+
+ /** Comment associated with the credential. */
+ @JsonProperty("comment")
+ private String comment;
+
+ /** The Databricks managed GCP service account configuration. */
+ @JsonProperty("databricks_gcp_service_account")
+ private DatabricksGcpServiceAccountRequest databricksGcpServiceAccount;
+
+ /**
+ * The credential name. The name must be unique among storage and service credentials within the
+ * metastore.
+ */
+ @JsonProperty("name")
+ private String name;
+
+ /**
+ * Whether the credential is usable only for read operations. Only applicable when purpose is
+ * **STORAGE**.
+ */
+ @JsonProperty("read_only")
+ private Boolean readOnly;
+
+ public CreateAccountsStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) {
+ this.awsIamRole = awsIamRole;
+ return this;
+ }
+
+ public AwsIamRoleRequest getAwsIamRole() {
+ return awsIamRole;
+ }
+
+ public CreateAccountsStorageCredential setAzureManagedIdentity(
+ AzureManagedIdentityRequest azureManagedIdentity) {
+ this.azureManagedIdentity = azureManagedIdentity;
+ return this;
+ }
+
+ public AzureManagedIdentityRequest getAzureManagedIdentity() {
+ return azureManagedIdentity;
+ }
+
+ public CreateAccountsStorageCredential setAzureServicePrincipal(
+ AzureServicePrincipal azureServicePrincipal) {
+ this.azureServicePrincipal = azureServicePrincipal;
+ return this;
+ }
+
+ public AzureServicePrincipal getAzureServicePrincipal() {
+ return azureServicePrincipal;
+ }
+
+ public CreateAccountsStorageCredential setCloudflareApiToken(
+ CloudflareApiToken cloudflareApiToken) {
+ this.cloudflareApiToken = cloudflareApiToken;
+ return this;
+ }
+
+ public CloudflareApiToken getCloudflareApiToken() {
+ return cloudflareApiToken;
+ }
+
+ public CreateAccountsStorageCredential setComment(String comment) {
+ this.comment = comment;
+ return this;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
+ public CreateAccountsStorageCredential setDatabricksGcpServiceAccount(
+ DatabricksGcpServiceAccountRequest databricksGcpServiceAccount) {
+ this.databricksGcpServiceAccount = databricksGcpServiceAccount;
+ return this;
+ }
+
+ public DatabricksGcpServiceAccountRequest getDatabricksGcpServiceAccount() {
+ return databricksGcpServiceAccount;
+ }
+
+ public CreateAccountsStorageCredential setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public CreateAccountsStorageCredential setReadOnly(Boolean readOnly) {
+ this.readOnly = readOnly;
+ return this;
+ }
+
+ public Boolean getReadOnly() {
+ return readOnly;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateAccountsStorageCredential that = (CreateAccountsStorageCredential) o;
+ return Objects.equals(awsIamRole, that.awsIamRole)
+ && Objects.equals(azureManagedIdentity, that.azureManagedIdentity)
+ && Objects.equals(azureServicePrincipal, that.azureServicePrincipal)
+ && Objects.equals(cloudflareApiToken, that.cloudflareApiToken)
+ && Objects.equals(comment, that.comment)
+ && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount)
+ && Objects.equals(name, that.name)
+ && Objects.equals(readOnly, that.readOnly);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ awsIamRole,
+ azureManagedIdentity,
+ azureServicePrincipal,
+ cloudflareApiToken,
+ comment,
+ databricksGcpServiceAccount,
+ name,
+ readOnly);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateAccountsStorageCredential.class)
+ .add("awsIamRole", awsIamRole)
+ .add("azureManagedIdentity", azureManagedIdentity)
+ .add("azureServicePrincipal", azureServicePrincipal)
+ .add("cloudflareApiToken", cloudflareApiToken)
+ .add("comment", comment)
+ .add("databricksGcpServiceAccount", databricksGcpServiceAccount)
+ .add("name", name)
+ .add("readOnly", readOnly)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java
index 2d8d187df..de43b40bb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java
@@ -18,6 +18,14 @@ public class CreateCatalog {
@JsonProperty("connection_name")
private String connectionName;
+ /** Status of conversion of FOREIGN catalog to UC Native catalog. */
+ @JsonProperty("conversion_info")
+ private ConversionInfo conversionInfo;
+
+ /** Disaster Recovery replication state snapshot. */
+ @JsonProperty("dr_replication_info")
+ private DrReplicationInfo drReplicationInfo;
+
/** Name of catalog. */
@JsonProperty("name")
private String name;
@@ -65,6 +73,24 @@ public String getConnectionName() {
return connectionName;
}
+ public CreateCatalog setConversionInfo(ConversionInfo conversionInfo) {
+ this.conversionInfo = conversionInfo;
+ return this;
+ }
+
+ public ConversionInfo getConversionInfo() {
+ return conversionInfo;
+ }
+
+ public CreateCatalog setDrReplicationInfo(DrReplicationInfo drReplicationInfo) {
+ this.drReplicationInfo = drReplicationInfo;
+ return this;
+ }
+
+ public DrReplicationInfo getDrReplicationInfo() {
+ return drReplicationInfo;
+ }
+
public CreateCatalog setName(String name) {
this.name = name;
return this;
@@ -126,6 +152,8 @@ public boolean equals(Object o) {
CreateCatalog that = (CreateCatalog) o;
return Objects.equals(comment, that.comment)
&& Objects.equals(connectionName, that.connectionName)
+ && Objects.equals(conversionInfo, that.conversionInfo)
+ && Objects.equals(drReplicationInfo, that.drReplicationInfo)
&& Objects.equals(name, that.name)
&& Objects.equals(options, that.options)
&& Objects.equals(properties, that.properties)
@@ -137,7 +165,16 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- comment, connectionName, name, options, properties, providerName, shareName, storageRoot);
+ comment,
+ connectionName,
+ conversionInfo,
+ drReplicationInfo,
+ name,
+ options,
+ properties,
+ providerName,
+ shareName,
+ storageRoot);
}
@Override
@@ -145,6 +182,8 @@ public String toString() {
return new ToStringer(CreateCatalog.class)
.add("comment", comment)
.add("connectionName", connectionName)
+ .add("conversionInfo", conversionInfo)
+ .add("drReplicationInfo", drReplicationInfo)
.add("name", name)
.add("options", options)
.add("properties", properties)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java
index 3eea7832c..f890b1b6e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java
@@ -18,6 +18,10 @@ public class CreateConnection {
@JsonProperty("connection_type")
private ConnectionType connectionType;
+ /** [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. */
+ @JsonProperty("environment_settings")
+ private EnvironmentSettings environmentSettings;
+
/** Name of the connection. */
@JsonProperty("name")
private String name;
@@ -52,6 +56,15 @@ public ConnectionType getConnectionType() {
return connectionType;
}
+ public CreateConnection setEnvironmentSettings(EnvironmentSettings environmentSettings) {
+ this.environmentSettings = environmentSettings;
+ return this;
+ }
+
+ public EnvironmentSettings getEnvironmentSettings() {
+ return environmentSettings;
+ }
+
public CreateConnection setName(String name) {
this.name = name;
return this;
@@ -95,6 +108,7 @@ public boolean equals(Object o) {
CreateConnection that = (CreateConnection) o;
return Objects.equals(comment, that.comment)
&& Objects.equals(connectionType, that.connectionType)
+ && Objects.equals(environmentSettings, that.environmentSettings)
&& Objects.equals(name, that.name)
&& Objects.equals(options, that.options)
&& Objects.equals(properties, that.properties)
@@ -103,7 +117,8 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(comment, connectionType, name, options, properties, readOnly);
+ return Objects.hash(
+ comment, connectionType, environmentSettings, name, options, properties, readOnly);
}
@Override
@@ -111,6 +126,7 @@ public String toString() {
return new ToStringer(CreateConnection.class)
.add("comment", comment)
.add("connectionType", connectionType)
+ .add("environmentSettings", environmentSettings)
.add("name", name)
.add("options", options)
.add("properties", properties)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java
index 83052df1f..56a9b59e3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java
@@ -9,7 +9,7 @@
@Generated
public class CreateFunction {
- /** Name of parent catalog. */
+ /** Name of parent Catalog. */
@JsonProperty("catalog_name")
private String catalogName;
@@ -33,7 +33,7 @@ public class CreateFunction {
@JsonProperty("full_data_type")
private String fullDataType;
- /** */
+ /** Function input parameters. */
@JsonProperty("input_params")
private FunctionParameterInfos inputParams;
@@ -63,8 +63,8 @@ public class CreateFunction {
/**
* Function language. When **EXTERNAL** is used, the language of the routine function should be
- * specified in the __external_language__ field, and the __return_params__ of the function cannot
- * be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be
+ * specified in the **external_language** field, and the **return_params** of the function cannot
+ * be used (as **TABLE** return type is not supported), and the **sql_data_access** field must be
* **NO_SQL**.
*/
@JsonProperty("routine_body")
@@ -74,11 +74,11 @@ public class CreateFunction {
@JsonProperty("routine_definition")
private String routineDefinition;
- /** Function dependencies. */
+ /** function dependencies. */
@JsonProperty("routine_dependencies")
private DependencyList routineDependencies;
- /** Name of parent schema relative to its parent catalog. */
+ /** Name of parent Schema relative to its parent Catalog. */
@JsonProperty("schema_name")
private String schemaName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java
index 691b8e514..721bb01f9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java
@@ -4,7 +4,6 @@
import com.databricks.sdk.support.Generated;
-/** Function parameter style. **S** is the value for SQL. */
@Generated
public enum CreateFunctionParameterStyle {
S,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRoutineBody.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRoutineBody.java
index 6132a4c2a..f5b1b42e9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRoutineBody.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRoutineBody.java
@@ -4,12 +4,6 @@
import com.databricks.sdk.support.Generated;
-/**
- * Function language. When **EXTERNAL** is used, the language of the routine function should be
- * specified in the __external_language__ field, and the __return_params__ of the function cannot be
- * used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be
- * **NO_SQL**.
- */
@Generated
public enum CreateFunctionRoutineBody {
EXTERNAL,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java
index a0b13a4ee..480b1279a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java
@@ -4,7 +4,6 @@
import com.databricks.sdk.support.Generated;
-/** The security type of the function. */
@Generated
public enum CreateFunctionSecurityType {
DEFINER,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSqlDataAccess.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSqlDataAccess.java
index d8cb91987..28cb1b373 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSqlDataAccess.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSqlDataAccess.java
@@ -4,7 +4,6 @@
import com.databricks.sdk.support.Generated;
-/** Function SQL data access. */
@Generated
public enum CreateFunctionSqlDataAccess {
CONTAINS_SQL,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java
index 520b0f60a..71a3650f9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java
@@ -5,10 +5,22 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
import java.util.Objects;
@Generated
public class CreateRegisteredModelRequest {
+ /** List of aliases associated with the registered model */
+ @JsonProperty("aliases")
+ private Collection aliases;
+
+ /**
+ * Indicates whether the principal is limited to retrieving metadata for the associated object
+ * through the BROWSE privilege when include_browse is enabled in the request.
+ */
+ @JsonProperty("browse_only")
+ private Boolean browseOnly;
+
/** The name of the catalog where the schema and the registered model reside */
@JsonProperty("catalog_name")
private String catalogName;
@@ -17,10 +29,30 @@ public class CreateRegisteredModelRequest {
@JsonProperty("comment")
private String comment;
+ /** Creation timestamp of the registered model in milliseconds since the Unix epoch */
+ @JsonProperty("created_at")
+ private Long createdAt;
+
+ /** The identifier of the user who created the registered model */
+ @JsonProperty("created_by")
+ private String createdBy;
+
+ /** The three-level (fully qualified) name of the registered model */
+ @JsonProperty("full_name")
+ private String fullName;
+
+ /** The unique identifier of the metastore */
+ @JsonProperty("metastore_id")
+ private String metastoreId;
+
/** The name of the registered model */
@JsonProperty("name")
private String name;
+ /** The identifier of the user who owns the registered model */
+ @JsonProperty("owner")
+ private String owner;
+
/** The name of the schema where the registered model resides */
@JsonProperty("schema_name")
private String schemaName;
@@ -29,6 +61,32 @@ public class CreateRegisteredModelRequest {
@JsonProperty("storage_location")
private String storageLocation;
+ /** Last-update timestamp of the registered model in milliseconds since the Unix epoch */
+ @JsonProperty("updated_at")
+ private Long updatedAt;
+
+ /** The identifier of the user who updated the registered model last time */
+ @JsonProperty("updated_by")
+ private String updatedBy;
+
+ public CreateRegisteredModelRequest setAliases(Collection aliases) {
+ this.aliases = aliases;
+ return this;
+ }
+
+ public Collection getAliases() {
+ return aliases;
+ }
+
+ public CreateRegisteredModelRequest setBrowseOnly(Boolean browseOnly) {
+ this.browseOnly = browseOnly;
+ return this;
+ }
+
+ public Boolean getBrowseOnly() {
+ return browseOnly;
+ }
+
public CreateRegisteredModelRequest setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
@@ -47,6 +105,42 @@ public String getComment() {
return comment;
}
+ public CreateRegisteredModelRequest setCreatedAt(Long createdAt) {
+ this.createdAt = createdAt;
+ return this;
+ }
+
+ public Long getCreatedAt() {
+ return createdAt;
+ }
+
+ public CreateRegisteredModelRequest setCreatedBy(String createdBy) {
+ this.createdBy = createdBy;
+ return this;
+ }
+
+ public String getCreatedBy() {
+ return createdBy;
+ }
+
+ public CreateRegisteredModelRequest setFullName(String fullName) {
+ this.fullName = fullName;
+ return this;
+ }
+
+ public String getFullName() {
+ return fullName;
+ }
+
+ public CreateRegisteredModelRequest setMetastoreId(String metastoreId) {
+ this.metastoreId = metastoreId;
+ return this;
+ }
+
+ public String getMetastoreId() {
+ return metastoreId;
+ }
+
public CreateRegisteredModelRequest setName(String name) {
this.name = name;
return this;
@@ -56,6 +150,15 @@ public String getName() {
return name;
}
+ public CreateRegisteredModelRequest setOwner(String owner) {
+ this.owner = owner;
+ return this;
+ }
+
+ public String getOwner() {
+ return owner;
+ }
+
public CreateRegisteredModelRequest setSchemaName(String schemaName) {
this.schemaName = schemaName;
return this;
@@ -74,31 +177,81 @@ public String getStorageLocation() {
return storageLocation;
}
+ public CreateRegisteredModelRequest setUpdatedAt(Long updatedAt) {
+ this.updatedAt = updatedAt;
+ return this;
+ }
+
+ public Long getUpdatedAt() {
+ return updatedAt;
+ }
+
+ public CreateRegisteredModelRequest setUpdatedBy(String updatedBy) {
+ this.updatedBy = updatedBy;
+ return this;
+ }
+
+ public String getUpdatedBy() {
+ return updatedBy;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CreateRegisteredModelRequest that = (CreateRegisteredModelRequest) o;
- return Objects.equals(catalogName, that.catalogName)
+ return Objects.equals(aliases, that.aliases)
+ && Objects.equals(browseOnly, that.browseOnly)
+ && Objects.equals(catalogName, that.catalogName)
&& Objects.equals(comment, that.comment)
+ && Objects.equals(createdAt, that.createdAt)
+ && Objects.equals(createdBy, that.createdBy)
+ && Objects.equals(fullName, that.fullName)
+ && Objects.equals(metastoreId, that.metastoreId)
&& Objects.equals(name, that.name)
+ && Objects.equals(owner, that.owner)
&& Objects.equals(schemaName, that.schemaName)
- && Objects.equals(storageLocation, that.storageLocation);
+ && Objects.equals(storageLocation, that.storageLocation)
+ && Objects.equals(updatedAt, that.updatedAt)
+ && Objects.equals(updatedBy, that.updatedBy);
}
@Override
public int hashCode() {
- return Objects.hash(catalogName, comment, name, schemaName, storageLocation);
+ return Objects.hash(
+ aliases,
+ browseOnly,
+ catalogName,
+ comment,
+ createdAt,
+ createdBy,
+ fullName,
+ metastoreId,
+ name,
+ owner,
+ schemaName,
+ storageLocation,
+ updatedAt,
+ updatedBy);
}
@Override
public String toString() {
return new ToStringer(CreateRegisteredModelRequest.class)
+ .add("aliases", aliases)
+ .add("browseOnly", browseOnly)
.add("catalogName", catalogName)
.add("comment", comment)
+ .add("createdAt", createdAt)
+ .add("createdBy", createdBy)
+ .add("fullName", fullName)
+ .add("metastoreId", metastoreId)
.add("name", name)
+ .add("owner", owner)
.add("schemaName", schemaName)
.add("storageLocation", storageLocation)
+ .add("updatedAt", updatedAt)
+ .add("updatedBy", updatedBy)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java
index 16f0ebbc6..bbe39faf4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java
@@ -29,7 +29,13 @@ public class CreateVolumeRequestContent {
@JsonProperty("storage_location")
private String storageLocation;
- /** */
+ /**
+ * The type of the volume. An external volume is located in the specified external location. A
+ * managed volume is located in the default location which is specified by the parent schema, or
+ * the parent catalog, or the Metastore. [Learn more]
+ *
+ * [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external
+ */
@JsonProperty("volume_type")
private VolumeType volumeType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java
index 3609ad11f..35a82e3ec 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java
@@ -17,7 +17,7 @@ public class DeleteFunctionRequest {
/**
* The fully-qualified name of the function (of the form
- * __catalog_name__.__schema_name__.__function__name__).
+ * __catalog_name__.__schema_name__.__function__name__) .
*/
@JsonIgnore private String name;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationInfo.java
new file mode 100755
index 000000000..e9997e150
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationInfo.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Metadata related to Disaster Recovery. */
+@Generated
+public class DrReplicationInfo {
+ /** */
+ @JsonProperty("status")
+ private DrReplicationStatus status;
+
+ public DrReplicationInfo setStatus(DrReplicationStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public DrReplicationStatus getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DrReplicationInfo that = (DrReplicationInfo) o;
+ return Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DrReplicationInfo.class).add("status", status).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationStatus.java
new file mode 100755
index 000000000..804977116
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationStatus.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum DrReplicationStatus {
+ DR_REPLICATION_STATUS_PRIMARY,
+ DR_REPLICATION_STATUS_SECONDARY,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnvironmentSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnvironmentSettings.java
new file mode 100755
index 000000000..1e8e3a92e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnvironmentSettings.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class EnvironmentSettings {
+ /** */
+ @JsonProperty("environment_version")
+ private String environmentVersion;
+
+ /** */
+ @JsonProperty("java_dependencies")
+ private Collection javaDependencies;
+
+ public EnvironmentSettings setEnvironmentVersion(String environmentVersion) {
+ this.environmentVersion = environmentVersion;
+ return this;
+ }
+
+ public String getEnvironmentVersion() {
+ return environmentVersion;
+ }
+
+ public EnvironmentSettings setJavaDependencies(Collection javaDependencies) {
+ this.javaDependencies = javaDependencies;
+ return this;
+ }
+
+ public Collection getJavaDependencies() {
+ return javaDependencies;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EnvironmentSettings that = (EnvironmentSettings) o;
+ return Objects.equals(environmentVersion, that.environmentVersion)
+ && Objects.equals(javaDependencies, that.javaDependencies);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(environmentVersion, javaDependencies);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(EnvironmentSettings.class)
+ .add("environmentVersion", environmentVersion)
+ .add("javaDependencies", javaDependencies)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
index 49699b629..1f5fbae89 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
@@ -73,6 +73,14 @@ public ExternalLocationInfo get(GetExternalLocationRequest request) {
* caller must be a metastore admin, the owner of the external location, or a user that has some
* privilege on the external location. There is no guarantee of a specific ordering of the
* elements in the array.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
public Iterable list(ListExternalLocationsRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java
index f42879409..fcfe5cc8d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java
@@ -45,6 +45,14 @@ public interface ExternalLocationsService {
* caller must be a metastore admin, the owner of the external location, or a user that has some
* privilege on the external location. There is no guarantee of a specific ordering of the
* elements in the array.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
ListExternalLocationsResponse list(ListExternalLocationsRequest listExternalLocationsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java
index 02b5d835d..69ca56bd1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java
@@ -16,7 +16,7 @@ public class FunctionInfo {
@JsonProperty("browse_only")
private Boolean browseOnly;
- /** Name of parent catalog. */
+ /** Name of parent Catalog. */
@JsonProperty("catalog_name")
private String catalogName;
@@ -48,7 +48,7 @@ public class FunctionInfo {
@JsonProperty("full_data_type")
private String fullDataType;
- /** Full name of function, in form of __catalog_name__.__schema_name__.__function__name__ */
+ /** Full name of Function, in form of **catalog_name**.**schema_name**.**function_name** */
@JsonProperty("full_name")
private String fullName;
@@ -56,7 +56,7 @@ public class FunctionInfo {
@JsonProperty("function_id")
private String functionId;
- /** */
+ /** Function input parameters. */
@JsonProperty("input_params")
private FunctionParameterInfos inputParams;
@@ -76,7 +76,7 @@ public class FunctionInfo {
@JsonProperty("name")
private String name;
- /** Username of current owner of function. */
+ /** Username of current owner of the function. */
@JsonProperty("owner")
private String owner;
@@ -94,8 +94,8 @@ public class FunctionInfo {
/**
* Function language. When **EXTERNAL** is used, the language of the routine function should be
- * specified in the __external_language__ field, and the __return_params__ of the function cannot
- * be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be
+ * specified in the **external_language** field, and the **return_params** of the function cannot
+ * be used (as **TABLE** return type is not supported), and the **sql_data_access** field must be
* **NO_SQL**.
*/
@JsonProperty("routine_body")
@@ -105,11 +105,11 @@ public class FunctionInfo {
@JsonProperty("routine_definition")
private String routineDefinition;
- /** Function dependencies. */
+ /** function dependencies. */
@JsonProperty("routine_dependencies")
private DependencyList routineDependencies;
- /** Name of parent schema relative to its parent catalog. */
+ /** Name of parent Schema relative to its parent Catalog. */
@JsonProperty("schema_name")
private String schemaName;
@@ -129,11 +129,11 @@ public class FunctionInfo {
@JsonProperty("sql_path")
private String sqlPath;
- /** Time at which this function was created, in epoch milliseconds. */
+ /** Time at which this function was last modified, in epoch milliseconds. */
@JsonProperty("updated_at")
private Long updatedAt;
- /** Username of user who last modified function. */
+ /** Username of user who last modified the function. */
@JsonProperty("updated_by")
private String updatedBy;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoParameterStyle.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoParameterStyle.java
index fab71fe1e..608574f72 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoParameterStyle.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoParameterStyle.java
@@ -4,7 +4,6 @@
import com.databricks.sdk.support.Generated;
-/** Function parameter style. **S** is the value for SQL. */
@Generated
public enum FunctionInfoParameterStyle {
S,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoRoutineBody.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoRoutineBody.java
index 24f8266e0..f69f1f670 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoRoutineBody.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoRoutineBody.java
@@ -4,12 +4,6 @@
import com.databricks.sdk.support.Generated;
-/**
- * Function language. When **EXTERNAL** is used, the language of the routine function should be
- * specified in the __external_language__ field, and the __return_params__ of the function cannot be
- * used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be
- * **NO_SQL**.
- */
@Generated
public enum FunctionInfoRoutineBody {
EXTERNAL,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java
index 5b45675b4..ce6545a69 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java
@@ -4,7 +4,6 @@
import com.databricks.sdk.support.Generated;
-/** The security type of the function. */
@Generated
public enum FunctionInfoSecurityType {
DEFINER,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSqlDataAccess.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSqlDataAccess.java
index 69b362394..fee8adcc8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSqlDataAccess.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSqlDataAccess.java
@@ -4,7 +4,6 @@
import com.databricks.sdk.support.Generated;
-/** Function SQL data access. */
@Generated
public enum FunctionInfoSqlDataAccess {
CONTAINS_SQL,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java
index 7e41e1dc0..ce5724ef9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java
@@ -13,7 +13,7 @@ public class FunctionParameterInfo {
@JsonProperty("comment")
private String comment;
- /** Name of parameter. */
+ /** Name of Parameter. */
@JsonProperty("name")
private String name;
@@ -21,11 +21,11 @@ public class FunctionParameterInfo {
@JsonProperty("parameter_default")
private String parameterDefault;
- /** */
+ /** Function parameter mode. */
@JsonProperty("parameter_mode")
private FunctionParameterMode parameterMode;
- /** */
+ /** Function parameter type. */
@JsonProperty("parameter_type")
private FunctionParameterType parameterType;
@@ -41,7 +41,7 @@ public class FunctionParameterInfo {
@JsonProperty("type_json")
private String typeJson;
- /** */
+ /** Name of type (INT, STRUCT, MAP, etc.) */
@JsonProperty("type_name")
private ColumnTypeName typeName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfos.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfos.java
index 8242101b9..f2941005c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfos.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfos.java
@@ -10,7 +10,7 @@
@Generated
public class FunctionParameterInfos {
- /** The array of __FunctionParameterInfo__ definitions of the function's parameters. */
+ /** */
@JsonProperty("parameters")
private Collection parameters;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterMode.java
index 731e91b2c..48a9a1870 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterMode.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterMode.java
@@ -4,7 +4,6 @@
import com.databricks.sdk.support.Generated;
-/** The mode of the function parameter. */
@Generated
public enum FunctionParameterMode {
IN,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterType.java
index 7e930bcd6..046f5037f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterType.java
@@ -4,7 +4,6 @@
import com.databricks.sdk.support.Generated;
-/** The type of function parameter. */
@Generated
public enum FunctionParameterType {
COLUMN,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
index ac2c1815a..f759c65bf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
@@ -86,6 +86,14 @@ public Iterable list(String catalogName, String schemaName) {
* the output list contains only functions for which either the user has the **EXECUTE** privilege
* or the user is the owner. There is no guarantee of a specific ordering of the elements in the
* array.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
public Iterable list(ListFunctionsRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java
index 387db0b64..2f7c48378 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java
@@ -36,7 +36,6 @@ public void delete(DeleteFunctionRequest request) {
try {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
- req.withHeader("Accept", "application/json");
apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java
index a88771087..3fbcccaa8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java
@@ -54,6 +54,14 @@ public interface FunctionsService {
* the output list contains only functions for which either the user has the **EXECUTE** privilege
* or the user is the owner. There is no guarantee of a specific ordering of the elements in the
* array.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
ListFunctionsResponse list(ListFunctionsRequest listFunctionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java
index 29c20c2fe..54943217b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java
@@ -12,7 +12,7 @@ public class GetAccountStorageCredentialRequest {
/** Unity Catalog metastore ID */
@JsonIgnore private String metastoreId;
- /** Name of the storage credential. */
+ /** Required. Name of the storage credential. */
@JsonIgnore private String storageCredentialName;
public GetAccountStorageCredentialRequest setMetastoreId(String metastoreId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java
index 59e2565c2..4e83c75d1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java
@@ -13,6 +13,11 @@ public class GetGrantRequest {
/** Full name of securable. */
@JsonIgnore private String fullName;
+ /** Optional. If true, also return privilege assignments whose principals have been deleted. */
+ @JsonIgnore
+ @QueryParam("include_deleted_principals")
+ private Boolean includeDeletedPrincipals;
+
/**
* Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment
* present in a single page response is guaranteed to contain all the privileges granted on the
@@ -50,6 +55,15 @@ public String getFullName() {
return fullName;
}
+ public GetGrantRequest setIncludeDeletedPrincipals(Boolean includeDeletedPrincipals) {
+ this.includeDeletedPrincipals = includeDeletedPrincipals;
+ return this;
+ }
+
+ public Boolean getIncludeDeletedPrincipals() {
+ return includeDeletedPrincipals;
+ }
+
public GetGrantRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -92,6 +106,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
GetGrantRequest that = (GetGrantRequest) o;
return Objects.equals(fullName, that.fullName)
+ && Objects.equals(includeDeletedPrincipals, that.includeDeletedPrincipals)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(pageToken, that.pageToken)
&& Objects.equals(principal, that.principal)
@@ -100,13 +115,15 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(fullName, maxResults, pageToken, principal, securableType);
+ return Objects.hash(
+ fullName, includeDeletedPrincipals, maxResults, pageToken, principal, securableType);
}
@Override
public String toString() {
return new ToStringer(GetGrantRequest.class)
.add("fullName", fullName)
+ .add("includeDeletedPrincipals", includeDeletedPrincipals)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.add("principal", principal)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java
index 5ca1d4263..be5de6d2e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java
@@ -8,7 +8,7 @@
import java.util.Collection;
import java.util.Objects;
-/** The list of workspaces to which the given metastore is assigned. */
+/** The metastore assignments were successfully returned. */
@Generated
public class ListAccountMetastoreAssignmentsResponse {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponse.java
index a5da186e4..fde3a512a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponse.java
@@ -8,6 +8,7 @@
import java.util.Collection;
import java.util.Objects;
+/** The metastore storage credentials were successfully returned. */
@Generated
public class ListAccountStorageCredentialsResponse {
/** An array of metastore storage credentials. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java
index 05fe12886..758e8afff 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java
@@ -18,6 +18,14 @@ public class ListCatalogsRequest {
@QueryParam("include_browse")
private Boolean includeBrowse;
+ /**
+ * Whether to include catalogs not bound to the workspace. Effective only if the user has
+ * permission to update the catalog–workspace binding.
+ */
+ @JsonIgnore
+ @QueryParam("include_unbound")
+ private Boolean includeUnbound;
+
/**
* Maximum number of catalogs to return. - when set to 0, the page length is set to a server
* configured value (recommended); - when set to a value greater than 0, the page length is the
@@ -45,6 +53,15 @@ public Boolean getIncludeBrowse() {
return includeBrowse;
}
+ public ListCatalogsRequest setIncludeUnbound(Boolean includeUnbound) {
+ this.includeUnbound = includeUnbound;
+ return this;
+ }
+
+ public Boolean getIncludeUnbound() {
+ return includeUnbound;
+ }
+
public ListCatalogsRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -69,19 +86,21 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ListCatalogsRequest that = (ListCatalogsRequest) o;
return Objects.equals(includeBrowse, that.includeBrowse)
+ && Objects.equals(includeUnbound, that.includeUnbound)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(pageToken, that.pageToken);
}
@Override
public int hashCode() {
- return Objects.hash(includeBrowse, maxResults, pageToken);
+ return Objects.hash(includeBrowse, includeUnbound, maxResults, pageToken);
}
@Override
public String toString() {
return new ToStringer(ListCatalogsRequest.class)
.add("includeBrowse", includeBrowse)
+ .add("includeUnbound", includeUnbound)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java
index 32dfc1888..c9b2c2dc2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java
@@ -10,6 +10,14 @@
@Generated
public class ListCredentialsRequest {
+ /**
+ * Whether to include credentials not bound to the workspace. Effective only if the user has
+ * permission to update the credential–workspace binding.
+ */
+ @JsonIgnore
+ @QueryParam("include_unbound")
+ private Boolean includeUnbound;
+
/**
* Maximum number of credentials to return. - If not set, the default max page size is used. -
* When set to a value greater than 0, the page length is the minimum of this value and a
@@ -30,6 +38,15 @@ public class ListCredentialsRequest {
@QueryParam("purpose")
private CredentialPurpose purpose;
+ public ListCredentialsRequest setIncludeUnbound(Boolean includeUnbound) {
+ this.includeUnbound = includeUnbound;
+ return this;
+ }
+
+ public Boolean getIncludeUnbound() {
+ return includeUnbound;
+ }
+
public ListCredentialsRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -62,19 +79,21 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ListCredentialsRequest that = (ListCredentialsRequest) o;
- return Objects.equals(maxResults, that.maxResults)
+ return Objects.equals(includeUnbound, that.includeUnbound)
+ && Objects.equals(maxResults, that.maxResults)
&& Objects.equals(pageToken, that.pageToken)
&& Objects.equals(purpose, that.purpose);
}
@Override
public int hashCode() {
- return Objects.hash(maxResults, pageToken, purpose);
+ return Objects.hash(includeUnbound, maxResults, pageToken, purpose);
}
@Override
public String toString() {
return new ToStringer(ListCredentialsRequest.class)
+ .add("includeUnbound", includeUnbound)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.add("purpose", purpose)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java
index ce3805d49..71bfa3314 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java
@@ -18,6 +18,14 @@ public class ListExternalLocationsRequest {
@QueryParam("include_browse")
private Boolean includeBrowse;
+ /**
+ * Whether to include external locations not bound to the workspace. Effective only if the user
+ * has permission to update the location–workspace binding.
+ */
+ @JsonIgnore
+ @QueryParam("include_unbound")
+ private Boolean includeUnbound;
+
/**
* Maximum number of external locations to return. If not set, all the external locations are
* returned (not recommended). - when set to a value greater than 0, the page length is the
@@ -43,6 +51,15 @@ public Boolean getIncludeBrowse() {
return includeBrowse;
}
+ public ListExternalLocationsRequest setIncludeUnbound(Boolean includeUnbound) {
+ this.includeUnbound = includeUnbound;
+ return this;
+ }
+
+ public Boolean getIncludeUnbound() {
+ return includeUnbound;
+ }
+
public ListExternalLocationsRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -67,19 +84,21 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ListExternalLocationsRequest that = (ListExternalLocationsRequest) o;
return Objects.equals(includeBrowse, that.includeBrowse)
+ && Objects.equals(includeUnbound, that.includeUnbound)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(pageToken, that.pageToken);
}
@Override
public int hashCode() {
- return Objects.hash(includeBrowse, maxResults, pageToken);
+ return Objects.hash(includeBrowse, includeUnbound, maxResults, pageToken);
}
@Override
public String toString() {
return new ToStringer(ListExternalLocationsRequest.class)
.add("includeBrowse", includeBrowse)
+ .add("includeUnbound", includeUnbound)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java
index c91be8012..9f1f82035 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java
@@ -10,6 +10,14 @@
@Generated
public class ListStorageCredentialsRequest {
+ /**
+ * Whether to include credentials not bound to the workspace. Effective only if the user has
+ * permission to update the credential–workspace binding.
+ */
+ @JsonIgnore
+ @QueryParam("include_unbound")
+ private Boolean includeUnbound;
+
/**
* Maximum number of storage credentials to return. If not set, all the storage credentials are
* returned (not recommended). - when set to a value greater than 0, the page length is the
@@ -26,6 +34,15 @@ public class ListStorageCredentialsRequest {
@QueryParam("page_token")
private String pageToken;
+ public ListStorageCredentialsRequest setIncludeUnbound(Boolean includeUnbound) {
+ this.includeUnbound = includeUnbound;
+ return this;
+ }
+
+ public Boolean getIncludeUnbound() {
+ return includeUnbound;
+ }
+
public ListStorageCredentialsRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -49,17 +66,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ListStorageCredentialsRequest that = (ListStorageCredentialsRequest) o;
- return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken);
+ return Objects.equals(includeUnbound, that.includeUnbound)
+ && Objects.equals(maxResults, that.maxResults)
+ && Objects.equals(pageToken, that.pageToken);
}
@Override
public int hashCode() {
- return Objects.hash(maxResults, pageToken);
+ return Objects.hash(includeUnbound, maxResults, pageToken);
}
@Override
public String toString() {
return new ToStringer(ListStorageCredentialsRequest.class)
+ .add("includeUnbound", includeUnbound)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java
index 0d9640d5f..3f78acc3b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java
@@ -9,7 +9,10 @@
@Generated
public class MetastoreAssignment {
- /** The name of the default catalog in the metastore. */
+ /**
+ * The name of the default catalog in the metastore. This field is deprecated. Please use "Default
+ * Namespace API" to configure the default catalog for a Databricks workspace.
+ */
@JsonProperty("default_catalog_name")
private String defaultCatalogName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
index e9d5011e1..ef832d957 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
@@ -85,6 +85,14 @@ public MetastoreInfo get(GetMetastoreRequest request) {
* Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an
* admin to retrieve this info. There is no guarantee of a specific ordering of the elements in
* the array.
+ *
+ *
NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
public Iterable list(ListMetastoresRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java
index 2f6f582ca..ff228ee46 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java
@@ -53,6 +53,14 @@ public interface MetastoresService {
* Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an
* admin to retrieve this info. There is no guarantee of a specific ordering of the elements in
* the array.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
ListMetastoresResponse list(ListMetastoresRequest listMetastoresRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java
index 8dbd67ae1..dd2cc00b9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java
@@ -14,13 +14,6 @@ public class ModelVersionInfo {
@JsonProperty("aliases")
private Collection aliases;
- /**
- * Indicates whether the principal is limited to retrieving metadata for the associated object
- * through the BROWSE privilege when include_browse is enabled in the request.
- */
- @JsonProperty("browse_only")
- private Boolean browseOnly;
-
/** The name of the catalog containing the model version */
@JsonProperty("catalog_name")
private String catalogName;
@@ -109,15 +102,6 @@ public Collection getAliases() {
return aliases;
}
- public ModelVersionInfo setBrowseOnly(Boolean browseOnly) {
- this.browseOnly = browseOnly;
- return this;
- }
-
- public Boolean getBrowseOnly() {
- return browseOnly;
- }
-
public ModelVersionInfo setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
@@ -277,7 +261,6 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ModelVersionInfo that = (ModelVersionInfo) o;
return Objects.equals(aliases, that.aliases)
- && Objects.equals(browseOnly, that.browseOnly)
&& Objects.equals(catalogName, that.catalogName)
&& Objects.equals(comment, that.comment)
&& Objects.equals(createdAt, that.createdAt)
@@ -301,7 +284,6 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
aliases,
- browseOnly,
catalogName,
comment,
createdAt,
@@ -325,7 +307,6 @@ public int hashCode() {
public String toString() {
return new ToStringer(ModelVersionInfo.class)
.add("aliases", aliases)
- .add("browseOnly", browseOnly)
.add("catalogName", catalogName)
.add("comment", comment)
.add("createdAt", createdAt)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfoStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfoStatus.java
index ffbbbc6a5..5ff0b7b8a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfoStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfoStatus.java
@@ -4,14 +4,10 @@
import com.databricks.sdk.support.Generated;
-/**
- * Current status of the model version. Newly created model versions start in PENDING_REGISTRATION
- * status, then move to READY status once the model version files are uploaded and the model version
- * is finalized. Only model versions in READY status can be loaded for inference or served.
- */
@Generated
public enum ModelVersionInfoStatus {
FAILED_REGISTRATION,
+ MODEL_VERSION_STATUS_UNKNOWN,
PENDING_REGISTRATION,
READY,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java
index cbd425817..9860e1026 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java
@@ -21,6 +21,18 @@ public class PermissionsChange {
@JsonProperty("principal")
private String principal;
+ /**
+ * An opaque internal ID that identifies the principal whose privileges should be removed.
+ *
+ * This field is intended for removing privileges associated with a deleted user. When set,
+ * only the entries specified in the remove field are processed; any entries in the add field will
+ * be rejected.
+ *
+ *
Only one of principal or principal_id should be specified, never both at the same time.
+ */
+ @JsonProperty("principal_id")
+ private Long principalId;
+
/** The set of privileges to remove. */
@JsonProperty("remove")
private Collection remove;
@@ -43,6 +55,15 @@ public String getPrincipal() {
return principal;
}
+ public PermissionsChange setPrincipalId(Long principalId) {
+ this.principalId = principalId;
+ return this;
+ }
+
+ public Long getPrincipalId() {
+ return principalId;
+ }
+
public PermissionsChange setRemove(Collection remove) {
this.remove = remove;
return this;
@@ -59,12 +80,13 @@ public boolean equals(Object o) {
PermissionsChange that = (PermissionsChange) o;
return Objects.equals(add, that.add)
&& Objects.equals(principal, that.principal)
+ && Objects.equals(principalId, that.principalId)
&& Objects.equals(remove, that.remove);
}
@Override
public int hashCode() {
- return Objects.hash(add, principal, remove);
+ return Objects.hash(add, principal, principalId, remove);
}
@Override
@@ -72,6 +94,7 @@ public String toString() {
return new ToStringer(PermissionsChange.class)
.add("add", add)
.add("principal", principal)
+ .add("principalId", principalId)
.add("remove", remove)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java
index 9d4ea5c05..5b76c8967 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java
@@ -34,6 +34,7 @@ public enum Privilege {
CREATE_VOLUME,
EXECUTE,
EXECUTE_CLEAN_ROOM_TASK,
+ EXTERNAL_USE_SCHEMA,
MANAGE,
MANAGE_ALLOWLIST,
MODIFY,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java
index 3781e98f5..4dd3f9910 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java
@@ -17,6 +17,13 @@ public class PrivilegeAssignment {
@JsonProperty("principal")
private String principal;
+ /**
+ * Unique identifier of the principal. For active principals, both `principal` and `principal_id`
+ * are present.
+ */
+ @JsonProperty("principal_id")
+ private Long principalId;
+
/** The privileges assigned to the principal. */
@JsonProperty("privileges")
private Collection privileges;
@@ -30,6 +37,15 @@ public String getPrincipal() {
return principal;
}
+ public PrivilegeAssignment setPrincipalId(Long principalId) {
+ this.principalId = principalId;
+ return this;
+ }
+
+ public Long getPrincipalId() {
+ return principalId;
+ }
+
public PrivilegeAssignment setPrivileges(Collection privileges) {
this.privileges = privileges;
return this;
@@ -44,18 +60,21 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PrivilegeAssignment that = (PrivilegeAssignment) o;
- return Objects.equals(principal, that.principal) && Objects.equals(privileges, that.privileges);
+ return Objects.equals(principal, that.principal)
+ && Objects.equals(principalId, that.principalId)
+ && Objects.equals(privileges, that.privileges);
}
@Override
public int hashCode() {
- return Objects.hash(principal, privileges);
+ return Objects.hash(principal, principalId, privileges);
}
@Override
public String toString() {
return new ToStringer(PrivilegeAssignment.class)
.add("principal", principal)
+ .add("principalId", principalId)
.add("privileges", privileges)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAlias.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAlias.java
index 3bfde7bc5..043755b1d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAlias.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAlias.java
@@ -7,13 +7,28 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Registered model alias. */
@Generated
public class RegisteredModelAlias {
/** Name of the alias, e.g. 'champion' or 'latest_stable' */
@JsonProperty("alias_name")
private String aliasName;
+ /** The name of the catalog containing the model version */
+ @JsonProperty("catalog_name")
+ private String catalogName;
+
+ /** The unique identifier of the alias */
+ @JsonProperty("id")
+ private String id;
+
+ /** The name of the parent registered model of the model version, relative to parent schema */
+ @JsonProperty("model_name")
+ private String modelName;
+
+ /** The name of the schema containing the model version, relative to parent catalog */
+ @JsonProperty("schema_name")
+ private String schemaName;
+
/** Integer version number of the model version to which this alias points. */
@JsonProperty("version_num")
private Long versionNum;
@@ -27,6 +42,42 @@ public String getAliasName() {
return aliasName;
}
+ public RegisteredModelAlias setCatalogName(String catalogName) {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ public String getCatalogName() {
+ return catalogName;
+ }
+
+ public RegisteredModelAlias setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public RegisteredModelAlias setModelName(String modelName) {
+ this.modelName = modelName;
+ return this;
+ }
+
+ public String getModelName() {
+ return modelName;
+ }
+
+ public RegisteredModelAlias setSchemaName(String schemaName) {
+ this.schemaName = schemaName;
+ return this;
+ }
+
+ public String getSchemaName() {
+ return schemaName;
+ }
+
public RegisteredModelAlias setVersionNum(Long versionNum) {
this.versionNum = versionNum;
return this;
@@ -41,18 +92,27 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RegisteredModelAlias that = (RegisteredModelAlias) o;
- return Objects.equals(aliasName, that.aliasName) && Objects.equals(versionNum, that.versionNum);
+ return Objects.equals(aliasName, that.aliasName)
+ && Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(id, that.id)
+ && Objects.equals(modelName, that.modelName)
+ && Objects.equals(schemaName, that.schemaName)
+ && Objects.equals(versionNum, that.versionNum);
}
@Override
public int hashCode() {
- return Objects.hash(aliasName, versionNum);
+ return Objects.hash(aliasName, catalogName, id, modelName, schemaName, versionNum);
}
@Override
public String toString() {
return new ToStringer(RegisteredModelAlias.class)
.add("aliasName", aliasName)
+ .add("catalogName", catalogName)
+ .add("id", id)
+ .add("modelName", modelName)
+ .add("schemaName", schemaName)
.add("versionNum", versionNum)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java
index 608438656..e5c508180 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java
@@ -30,8 +30,8 @@
* metadata (comments, aliases) create a new model version, or update permissions on the registered
* model, users must be owners of the registered model.
*
- * Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants)
- * that specify a securable type, use "FUNCTION" as the securable type.
+ *
Note: The securable type for models is FUNCTION. When using REST APIs (e.g. tagging, grants)
+ * that specify a securable type, use FUNCTION as the securable type.
*/
@Generated
public class RegisteredModelsAPI {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java
index a03772d04..ccc99737b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java
@@ -26,8 +26,8 @@
* metadata (comments, aliases) create a new model version, or update permissions on the registered
* model, users must be owners of the registered model.
*
- *
Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants)
- * that specify a securable type, use "FUNCTION" as the securable type.
+ *
Note: The securable type for models is FUNCTION. When using REST APIs (e.g. tagging, grants)
+ * that specify a securable type, use FUNCTION as the securable type.
*
*
This is the high-level interface, that contains generated methods.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
index f8657471a..3edf06b15 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
@@ -70,6 +70,14 @@ public Iterable list(String catalogName) {
* or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise,
* only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege)
* will be retrieved. There is no guarantee of a specific ordering of the elements in the array.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
public Iterable list(ListSchemasRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
index abe123cb5..74b235095 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
@@ -38,6 +38,14 @@ public interface SchemasService {
* or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise,
* only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege)
* will be retrieved. There is no guarantee of a specific ordering of the elements in the array.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
ListSchemasResponse list(ListSchemasRequest listSchemasRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
index fd09c0225..a02ad9204 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Latest kind: CONNECTION_PALANTIR_OAUTH_M2M = 263; Next id:264 */
+/** Latest kind: CONNECTION_REDSHIFT_IAM = 265; Next id:266 */
@Generated
public enum SecurableKind {
TABLE_DB_STORAGE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java
index 3c6c39fde..a024d5ded 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java
@@ -4,18 +4,17 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
@Generated
public class SetRegisteredModelAliasRequest {
/** The name of the alias */
- @JsonProperty("alias")
- private String alias;
+ @JsonIgnore private String alias;
- /** Full name of the registered model */
- @JsonProperty("full_name")
- private String fullName;
+ /** The three-level (fully qualified) name of the registered model */
+ @JsonIgnore private String fullName;
/** The version number of the model version to which the alias points */
@JsonProperty("version_num")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
index 2ce220029..89ee608ec 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
@@ -75,6 +75,14 @@ public StorageCredentialInfo get(GetStorageCredentialRequest request) {
* limited to only those storage credentials the caller has permission to access. If the caller is
* a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a
* specific ordering of the elements in the array.
+ *
+ *
NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
public Iterable list(ListStorageCredentialsRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
index b403f3e83..4687ed10e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
@@ -47,6 +47,14 @@ public interface StorageCredentialsService {
* limited to only those storage credentials the caller has permission to access. If the caller is
* a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a
* specific ordering of the elements in the array.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
ListStorageCredentialsResponse list(ListStorageCredentialsRequest listStorageCredentialsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
index d6784e1e6..a62bef4ca 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
@@ -55,6 +55,14 @@ public Iterable list(String metastoreId) {
/**
* Gets an array of system schemas for a metastore. The caller must be an account admin or a
* metastore admin.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
public Iterable list(ListSystemSchemasRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasService.java
index 01ce9aa13..59cf8627b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasService.java
@@ -29,6 +29,14 @@ public interface SystemSchemasService {
/**
* Gets an array of system schemas for a metastore. The caller must be an account admin or a
* metastore admin.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
ListSystemSchemasResponse list(ListSystemSchemasRequest listSystemSchemasRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java
index 324e1b850..527a800b8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java
@@ -25,6 +25,7 @@ public enum SystemType {
SAP,
SERVICENOW,
SNOWFLAKE,
+ STREAM_NATIVE,
TABLEAU,
TERADATA,
WORKDAY,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
index 71c7e2a1b..7ae3a0063 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
@@ -116,6 +116,14 @@ public Iterable list(String catalogName, String schemaName) {
* table. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
* privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is
* no guarantee of a specific ordering of the elements in the array.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
public Iterable list(ListTablesRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java
index 6c08d2bc0..0f516bcd3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java
@@ -78,6 +78,14 @@ public interface TablesService {
* table. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
* privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is
* no guarantee of a specific ordering of the elements in the array.
+ *
+ * NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated
+ * calls will be deprecated soon.
+ *
+ *
PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero
+ * results while still providing a next_page_token. Clients must continue reading pages until
+ * next_page_token is absent, which is the only indication that the end of results has been
+ * reached. This behavior follows Google AIP-158 guidelines.
*/
ListTablesResponse list(ListTablesRequest listTablesRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsMetastore.java
new file mode 100755
index 000000000..215f0eacf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsMetastore.java
@@ -0,0 +1,134 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateAccountsMetastore {
+ /**
+ * The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta
+ * Sharing as the official name.
+ */
+ @JsonProperty("delta_sharing_organization_name")
+ private String deltaSharingOrganizationName;
+
+ /** The lifetime of delta sharing recipient token in seconds. */
+ @JsonProperty("delta_sharing_recipient_token_lifetime_in_seconds")
+ private Long deltaSharingRecipientTokenLifetimeInSeconds;
+
+ /** The scope of Delta Sharing enabled for the metastore. */
+ @JsonProperty("delta_sharing_scope")
+ private DeltaSharingScopeEnum deltaSharingScope;
+
+ /** The owner of the metastore. */
+ @JsonProperty("owner")
+ private String owner;
+
+ /** Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`). */
+ @JsonProperty("privilege_model_version")
+ private String privilegeModelVersion;
+
+ /** UUID of storage credential to access the metastore storage_root. */
+ @JsonProperty("storage_root_credential_id")
+ private String storageRootCredentialId;
+
+ public UpdateAccountsMetastore setDeltaSharingOrganizationName(
+ String deltaSharingOrganizationName) {
+ this.deltaSharingOrganizationName = deltaSharingOrganizationName;
+ return this;
+ }
+
+ public String getDeltaSharingOrganizationName() {
+ return deltaSharingOrganizationName;
+ }
+
+ public UpdateAccountsMetastore setDeltaSharingRecipientTokenLifetimeInSeconds(
+ Long deltaSharingRecipientTokenLifetimeInSeconds) {
+ this.deltaSharingRecipientTokenLifetimeInSeconds = deltaSharingRecipientTokenLifetimeInSeconds;
+ return this;
+ }
+
+ public Long getDeltaSharingRecipientTokenLifetimeInSeconds() {
+ return deltaSharingRecipientTokenLifetimeInSeconds;
+ }
+
+ public UpdateAccountsMetastore setDeltaSharingScope(DeltaSharingScopeEnum deltaSharingScope) {
+ this.deltaSharingScope = deltaSharingScope;
+ return this;
+ }
+
+ public DeltaSharingScopeEnum getDeltaSharingScope() {
+ return deltaSharingScope;
+ }
+
+ public UpdateAccountsMetastore setOwner(String owner) {
+ this.owner = owner;
+ return this;
+ }
+
+ public String getOwner() {
+ return owner;
+ }
+
+ public UpdateAccountsMetastore setPrivilegeModelVersion(String privilegeModelVersion) {
+ this.privilegeModelVersion = privilegeModelVersion;
+ return this;
+ }
+
+ public String getPrivilegeModelVersion() {
+ return privilegeModelVersion;
+ }
+
+ public UpdateAccountsMetastore setStorageRootCredentialId(String storageRootCredentialId) {
+ this.storageRootCredentialId = storageRootCredentialId;
+ return this;
+ }
+
+ public String getStorageRootCredentialId() {
+ return storageRootCredentialId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateAccountsMetastore that = (UpdateAccountsMetastore) o;
+ return Objects.equals(deltaSharingOrganizationName, that.deltaSharingOrganizationName)
+ && Objects.equals(
+ deltaSharingRecipientTokenLifetimeInSeconds,
+ that.deltaSharingRecipientTokenLifetimeInSeconds)
+ && Objects.equals(deltaSharingScope, that.deltaSharingScope)
+ && Objects.equals(owner, that.owner)
+ && Objects.equals(privilegeModelVersion, that.privilegeModelVersion)
+ && Objects.equals(storageRootCredentialId, that.storageRootCredentialId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ deltaSharingOrganizationName,
+ deltaSharingRecipientTokenLifetimeInSeconds,
+ deltaSharingScope,
+ owner,
+ privilegeModelVersion,
+ storageRootCredentialId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateAccountsMetastore.class)
+ .add("deltaSharingOrganizationName", deltaSharingOrganizationName)
+ .add(
+ "deltaSharingRecipientTokenLifetimeInSeconds",
+ deltaSharingRecipientTokenLifetimeInSeconds)
+ .add("deltaSharingScope", deltaSharingScope)
+ .add("owner", owner)
+ .add("privilegeModelVersion", privilegeModelVersion)
+ .add("storageRootCredentialId", storageRootCredentialId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsStorageCredential.java
new file mode 100755
index 000000000..22801de40
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsStorageCredential.java
@@ -0,0 +1,183 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateAccountsStorageCredential {
+ /** The AWS IAM role configuration. */
+ @JsonProperty("aws_iam_role")
+ private AwsIamRoleRequest awsIamRole;
+
+ /** The Azure managed identity configuration. */
+ @JsonProperty("azure_managed_identity")
+ private AzureManagedIdentityResponse azureManagedIdentity;
+
+ /** The Azure service principal configuration. */
+ @JsonProperty("azure_service_principal")
+ private AzureServicePrincipal azureServicePrincipal;
+
+ /** The Cloudflare API token configuration. */
+ @JsonProperty("cloudflare_api_token")
+ private CloudflareApiToken cloudflareApiToken;
+
+ /** Comment associated with the credential. */
+ @JsonProperty("comment")
+ private String comment;
+
+ /** The Databricks managed GCP service account configuration. */
+ @JsonProperty("databricks_gcp_service_account")
+ private DatabricksGcpServiceAccountRequest databricksGcpServiceAccount;
+
+ /**
+ * Whether the current securable is accessible from all workspaces or a specific set of
+ * workspaces.
+ */
+ @JsonProperty("isolation_mode")
+ private IsolationMode isolationMode;
+
+ /** Username of current owner of credential. */
+ @JsonProperty("owner")
+ private String owner;
+
+ /**
+ * Whether the credential is usable only for read operations. Only applicable when purpose is
+ * **STORAGE**.
+ */
+ @JsonProperty("read_only")
+ private Boolean readOnly;
+
+ public UpdateAccountsStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) {
+ this.awsIamRole = awsIamRole;
+ return this;
+ }
+
+ public AwsIamRoleRequest getAwsIamRole() {
+ return awsIamRole;
+ }
+
+ public UpdateAccountsStorageCredential setAzureManagedIdentity(
+ AzureManagedIdentityResponse azureManagedIdentity) {
+ this.azureManagedIdentity = azureManagedIdentity;
+ return this;
+ }
+
+ public AzureManagedIdentityResponse getAzureManagedIdentity() {
+ return azureManagedIdentity;
+ }
+
+ public UpdateAccountsStorageCredential setAzureServicePrincipal(
+ AzureServicePrincipal azureServicePrincipal) {
+ this.azureServicePrincipal = azureServicePrincipal;
+ return this;
+ }
+
+ public AzureServicePrincipal getAzureServicePrincipal() {
+ return azureServicePrincipal;
+ }
+
+ public UpdateAccountsStorageCredential setCloudflareApiToken(
+ CloudflareApiToken cloudflareApiToken) {
+ this.cloudflareApiToken = cloudflareApiToken;
+ return this;
+ }
+
+ public CloudflareApiToken getCloudflareApiToken() {
+ return cloudflareApiToken;
+ }
+
+ public UpdateAccountsStorageCredential setComment(String comment) {
+ this.comment = comment;
+ return this;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
+ public UpdateAccountsStorageCredential setDatabricksGcpServiceAccount(
+ DatabricksGcpServiceAccountRequest databricksGcpServiceAccount) {
+ this.databricksGcpServiceAccount = databricksGcpServiceAccount;
+ return this;
+ }
+
+ public DatabricksGcpServiceAccountRequest getDatabricksGcpServiceAccount() {
+ return databricksGcpServiceAccount;
+ }
+
+ public UpdateAccountsStorageCredential setIsolationMode(IsolationMode isolationMode) {
+ this.isolationMode = isolationMode;
+ return this;
+ }
+
+ public IsolationMode getIsolationMode() {
+ return isolationMode;
+ }
+
+ public UpdateAccountsStorageCredential setOwner(String owner) {
+ this.owner = owner;
+ return this;
+ }
+
+ public String getOwner() {
+ return owner;
+ }
+
+ public UpdateAccountsStorageCredential setReadOnly(Boolean readOnly) {
+ this.readOnly = readOnly;
+ return this;
+ }
+
+ public Boolean getReadOnly() {
+ return readOnly;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateAccountsStorageCredential that = (UpdateAccountsStorageCredential) o;
+ return Objects.equals(awsIamRole, that.awsIamRole)
+ && Objects.equals(azureManagedIdentity, that.azureManagedIdentity)
+ && Objects.equals(azureServicePrincipal, that.azureServicePrincipal)
+ && Objects.equals(cloudflareApiToken, that.cloudflareApiToken)
+ && Objects.equals(comment, that.comment)
+ && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount)
+ && Objects.equals(isolationMode, that.isolationMode)
+ && Objects.equals(owner, that.owner)
+ && Objects.equals(readOnly, that.readOnly);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ awsIamRole,
+ azureManagedIdentity,
+ azureServicePrincipal,
+ cloudflareApiToken,
+ comment,
+ databricksGcpServiceAccount,
+ isolationMode,
+ owner,
+ readOnly);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateAccountsStorageCredential.class)
+ .add("awsIamRole", awsIamRole)
+ .add("azureManagedIdentity", azureManagedIdentity)
+ .add("azureServicePrincipal", azureServicePrincipal)
+ .add("cloudflareApiToken", cloudflareApiToken)
+ .add("comment", comment)
+ .add("databricksGcpServiceAccount", databricksGcpServiceAccount)
+ .add("isolationMode", isolationMode)
+ .add("owner", owner)
+ .add("readOnly", readOnly)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java
index b817347f1..e5352ef9d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java
@@ -15,6 +15,14 @@ public class UpdateCatalog {
@JsonProperty("comment")
private String comment;
+ /** Status of conversion of FOREIGN catalog to UC Native catalog. */
+ @JsonProperty("conversion_info")
+ private ConversionInfo conversionInfo;
+
+ /** Disaster Recovery replication state snapshot. */
+ @JsonProperty("dr_replication_info")
+ private DrReplicationInfo drReplicationInfo;
+
/** Whether predictive optimization should be enabled for this object and objects under it. */
@JsonProperty("enable_predictive_optimization")
private EnablePredictiveOptimization enablePredictiveOptimization;
@@ -54,6 +62,24 @@ public String getComment() {
return comment;
}
+ public UpdateCatalog setConversionInfo(ConversionInfo conversionInfo) {
+ this.conversionInfo = conversionInfo;
+ return this;
+ }
+
+ public ConversionInfo getConversionInfo() {
+ return conversionInfo;
+ }
+
+ public UpdateCatalog setDrReplicationInfo(DrReplicationInfo drReplicationInfo) {
+ this.drReplicationInfo = drReplicationInfo;
+ return this;
+ }
+
+ public DrReplicationInfo getDrReplicationInfo() {
+ return drReplicationInfo;
+ }
+
public UpdateCatalog setEnablePredictiveOptimization(
EnablePredictiveOptimization enablePredictiveOptimization) {
this.enablePredictiveOptimization = enablePredictiveOptimization;
@@ -124,6 +150,8 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
UpdateCatalog that = (UpdateCatalog) o;
return Objects.equals(comment, that.comment)
+ && Objects.equals(conversionInfo, that.conversionInfo)
+ && Objects.equals(drReplicationInfo, that.drReplicationInfo)
&& Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization)
&& Objects.equals(isolationMode, that.isolationMode)
&& Objects.equals(name, that.name)
@@ -137,6 +165,8 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
comment,
+ conversionInfo,
+ drReplicationInfo,
enablePredictiveOptimization,
isolationMode,
name,
@@ -150,6 +180,8 @@ public int hashCode() {
public String toString() {
return new ToStringer(UpdateCatalog.class)
.add("comment", comment)
+ .add("conversionInfo", conversionInfo)
+ .add("drReplicationInfo", drReplicationInfo)
.add("enablePredictiveOptimization", enablePredictiveOptimization)
.add("isolationMode", isolationMode)
.add("name", name)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java
index d37165c3a..5119aa1fa 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java
@@ -11,6 +11,10 @@
@Generated
public class UpdateConnection {
+ /** [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. */
+ @JsonProperty("environment_settings")
+ private EnvironmentSettings environmentSettings;
+
/** Name of the connection. */
@JsonIgnore private String name;
@@ -26,6 +30,15 @@ public class UpdateConnection {
@JsonProperty("owner")
private String owner;
+ public UpdateConnection setEnvironmentSettings(EnvironmentSettings environmentSettings) {
+ this.environmentSettings = environmentSettings;
+ return this;
+ }
+
+ public EnvironmentSettings getEnvironmentSettings() {
+ return environmentSettings;
+ }
+
public UpdateConnection setName(String name) {
this.name = name;
return this;
@@ -67,7 +80,8 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
UpdateConnection that = (UpdateConnection) o;
- return Objects.equals(name, that.name)
+ return Objects.equals(environmentSettings, that.environmentSettings)
+ && Objects.equals(name, that.name)
&& Objects.equals(newName, that.newName)
&& Objects.equals(options, that.options)
&& Objects.equals(owner, that.owner);
@@ -75,12 +89,13 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(name, newName, options, owner);
+ return Objects.hash(environmentSettings, name, newName, options, owner);
}
@Override
public String toString() {
return new ToStringer(UpdateConnection.class)
+ .add("environmentSettings", environmentSettings)
.add("name", name)
.add("newName", newName)
.add("options", options)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java
index a785536bf..bdbf23c12 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java
@@ -16,7 +16,7 @@ public class UpdateFunction {
*/
@JsonIgnore private String name;
- /** Username of current owner of function. */
+ /** Username of current owner of the function. */
@JsonProperty("owner")
private String owner;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java
index 4e220749c..b49178472 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java
@@ -6,20 +6,114 @@
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
import java.util.Objects;
@Generated
public class UpdateModelVersionRequest {
+ /** List of aliases associated with the model version */
+ @JsonProperty("aliases")
+ private Collection aliases;
+
+ /** The name of the catalog containing the model version */
+ @JsonProperty("catalog_name")
+ private String catalogName;
+
/** The comment attached to the model version */
@JsonProperty("comment")
private String comment;
+ /** */
+ @JsonProperty("created_at")
+ private Long createdAt;
+
+ /** The identifier of the user who created the model version */
+ @JsonProperty("created_by")
+ private String createdBy;
+
/** The three-level (fully qualified) name of the model version */
@JsonIgnore private String fullName;
+ /** The unique identifier of the model version */
+ @JsonProperty("id")
+ private String id;
+
+ /** The unique identifier of the metastore containing the model version */
+ @JsonProperty("metastore_id")
+ private String metastoreId;
+
+ /** The name of the parent registered model of the model version, relative to parent schema */
+ @JsonProperty("model_name")
+ private String modelName;
+
+ /** Model version dependencies, for feature-store packaged models */
+ @JsonProperty("model_version_dependencies")
+ private DependencyList modelVersionDependencies;
+
+ /**
+ * MLflow run ID used when creating the model version, if ``source`` was generated by an
+ * experiment run stored in an MLflow tracking server
+ */
+ @JsonProperty("run_id")
+ private String runId;
+
+ /**
+ * ID of the Databricks workspace containing the MLflow run that generated this model version, if
+ * applicable
+ */
+ @JsonProperty("run_workspace_id")
+ private Long runWorkspaceId;
+
+ /** The name of the schema containing the model version, relative to parent catalog */
+ @JsonProperty("schema_name")
+ private String schemaName;
+
+ /** URI indicating the location of the source artifacts (files) for the model version */
+ @JsonProperty("source")
+ private String source;
+
+ /**
+ * Current status of the model version. Newly created model versions start in PENDING_REGISTRATION
+ * status, then move to READY status once the model version files are uploaded and the model
+ * version is finalized. Only model versions in READY status can be loaded for inference or
+ * served.
+ */
+ @JsonProperty("status")
+ private ModelVersionInfoStatus status;
+
+ /** The storage location on the cloud under which model version data files are stored */
+ @JsonProperty("storage_location")
+ private String storageLocation;
+
+ /** */
+ @JsonProperty("updated_at")
+ private Long updatedAt;
+
+ /** The identifier of the user who updated the model version last time */
+ @JsonProperty("updated_by")
+ private String updatedBy;
+
/** The integer version number of the model version */
@JsonIgnore private Long version;
+ public UpdateModelVersionRequest setAliases(Collection aliases) {
+ this.aliases = aliases;
+ return this;
+ }
+
+ public Collection getAliases() {
+ return aliases;
+ }
+
+ public UpdateModelVersionRequest setCatalogName(String catalogName) {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ public String getCatalogName() {
+ return catalogName;
+ }
+
public UpdateModelVersionRequest setComment(String comment) {
this.comment = comment;
return this;
@@ -29,6 +123,24 @@ public String getComment() {
return comment;
}
+ public UpdateModelVersionRequest setCreatedAt(Long createdAt) {
+ this.createdAt = createdAt;
+ return this;
+ }
+
+ public Long getCreatedAt() {
+ return createdAt;
+ }
+
+ public UpdateModelVersionRequest setCreatedBy(String createdBy) {
+ this.createdBy = createdBy;
+ return this;
+ }
+
+ public String getCreatedBy() {
+ return createdBy;
+ }
+
public UpdateModelVersionRequest setFullName(String fullName) {
this.fullName = fullName;
return this;
@@ -38,6 +150,115 @@ public String getFullName() {
return fullName;
}
+ public UpdateModelVersionRequest setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public UpdateModelVersionRequest setMetastoreId(String metastoreId) {
+ this.metastoreId = metastoreId;
+ return this;
+ }
+
+ public String getMetastoreId() {
+ return metastoreId;
+ }
+
+ public UpdateModelVersionRequest setModelName(String modelName) {
+ this.modelName = modelName;
+ return this;
+ }
+
+ public String getModelName() {
+ return modelName;
+ }
+
+ public UpdateModelVersionRequest setModelVersionDependencies(
+ DependencyList modelVersionDependencies) {
+ this.modelVersionDependencies = modelVersionDependencies;
+ return this;
+ }
+
+ public DependencyList getModelVersionDependencies() {
+ return modelVersionDependencies;
+ }
+
+ public UpdateModelVersionRequest setRunId(String runId) {
+ this.runId = runId;
+ return this;
+ }
+
+ public String getRunId() {
+ return runId;
+ }
+
+ public UpdateModelVersionRequest setRunWorkspaceId(Long runWorkspaceId) {
+ this.runWorkspaceId = runWorkspaceId;
+ return this;
+ }
+
+ public Long getRunWorkspaceId() {
+ return runWorkspaceId;
+ }
+
+ public UpdateModelVersionRequest setSchemaName(String schemaName) {
+ this.schemaName = schemaName;
+ return this;
+ }
+
+ public String getSchemaName() {
+ return schemaName;
+ }
+
+ public UpdateModelVersionRequest setSource(String source) {
+ this.source = source;
+ return this;
+ }
+
+ public String getSource() {
+ return source;
+ }
+
+ public UpdateModelVersionRequest setStatus(ModelVersionInfoStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public ModelVersionInfoStatus getStatus() {
+ return status;
+ }
+
+ public UpdateModelVersionRequest setStorageLocation(String storageLocation) {
+ this.storageLocation = storageLocation;
+ return this;
+ }
+
+ public String getStorageLocation() {
+ return storageLocation;
+ }
+
+ public UpdateModelVersionRequest setUpdatedAt(Long updatedAt) {
+ this.updatedAt = updatedAt;
+ return this;
+ }
+
+ public Long getUpdatedAt() {
+ return updatedAt;
+ }
+
+ public UpdateModelVersionRequest setUpdatedBy(String updatedBy) {
+ this.updatedBy = updatedBy;
+ return this;
+ }
+
+ public String getUpdatedBy() {
+ return updatedBy;
+ }
+
public UpdateModelVersionRequest setVersion(Long version) {
this.version = version;
return this;
@@ -52,21 +273,72 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
UpdateModelVersionRequest that = (UpdateModelVersionRequest) o;
- return Objects.equals(comment, that.comment)
+ return Objects.equals(aliases, that.aliases)
+ && Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(comment, that.comment)
+ && Objects.equals(createdAt, that.createdAt)
+ && Objects.equals(createdBy, that.createdBy)
&& Objects.equals(fullName, that.fullName)
+ && Objects.equals(id, that.id)
+ && Objects.equals(metastoreId, that.metastoreId)
+ && Objects.equals(modelName, that.modelName)
+ && Objects.equals(modelVersionDependencies, that.modelVersionDependencies)
+ && Objects.equals(runId, that.runId)
+ && Objects.equals(runWorkspaceId, that.runWorkspaceId)
+ && Objects.equals(schemaName, that.schemaName)
+ && Objects.equals(source, that.source)
+ && Objects.equals(status, that.status)
+ && Objects.equals(storageLocation, that.storageLocation)
+ && Objects.equals(updatedAt, that.updatedAt)
+ && Objects.equals(updatedBy, that.updatedBy)
&& Objects.equals(version, that.version);
}
@Override
public int hashCode() {
- return Objects.hash(comment, fullName, version);
+ return Objects.hash(
+ aliases,
+ catalogName,
+ comment,
+ createdAt,
+ createdBy,
+ fullName,
+ id,
+ metastoreId,
+ modelName,
+ modelVersionDependencies,
+ runId,
+ runWorkspaceId,
+ schemaName,
+ source,
+ status,
+ storageLocation,
+ updatedAt,
+ updatedBy,
+ version);
}
@Override
public String toString() {
return new ToStringer(UpdateModelVersionRequest.class)
+ .add("aliases", aliases)
+ .add("catalogName", catalogName)
.add("comment", comment)
+ .add("createdAt", createdAt)
+ .add("createdBy", createdBy)
.add("fullName", fullName)
+ .add("id", id)
+ .add("metastoreId", metastoreId)
+ .add("modelName", modelName)
+ .add("modelVersionDependencies", modelVersionDependencies)
+ .add("runId", runId)
+ .add("runWorkspaceId", runWorkspaceId)
+ .add("schemaName", schemaName)
+ .add("source", source)
+ .add("status", status)
+ .add("storageLocation", storageLocation)
+ .add("updatedAt", updatedAt)
+ .add("updatedBy", updatedBy)
.add("version", version)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java
index d7fdfe4b0..963ea7bc6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java
@@ -6,17 +6,49 @@
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
import java.util.Objects;
@Generated
public class UpdateRegisteredModelRequest {
+ /** List of aliases associated with the registered model */
+ @JsonProperty("aliases")
+ private Collection aliases;
+
+ /**
+ * Indicates whether the principal is limited to retrieving metadata for the associated object
+ * through the BROWSE privilege when include_browse is enabled in the request.
+ */
+ @JsonProperty("browse_only")
+ private Boolean browseOnly;
+
+ /** The name of the catalog where the schema and the registered model reside */
+ @JsonProperty("catalog_name")
+ private String catalogName;
+
/** The comment attached to the registered model */
@JsonProperty("comment")
private String comment;
+ /** Creation timestamp of the registered model in milliseconds since the Unix epoch */
+ @JsonProperty("created_at")
+ private Long createdAt;
+
+ /** The identifier of the user who created the registered model */
+ @JsonProperty("created_by")
+ private String createdBy;
+
/** The three-level (fully qualified) name of the registered model */
@JsonIgnore private String fullName;
+ /** The unique identifier of the metastore */
+ @JsonProperty("metastore_id")
+ private String metastoreId;
+
+ /** The name of the registered model */
+ @JsonProperty("name")
+ private String name;
+
/** New name for the registered model. */
@JsonProperty("new_name")
private String newName;
@@ -25,6 +57,49 @@ public class UpdateRegisteredModelRequest {
@JsonProperty("owner")
private String owner;
+ /** The name of the schema where the registered model resides */
+ @JsonProperty("schema_name")
+ private String schemaName;
+
+ /** The storage location on the cloud under which model version data files are stored */
+ @JsonProperty("storage_location")
+ private String storageLocation;
+
+ /** Last-update timestamp of the registered model in milliseconds since the Unix epoch */
+ @JsonProperty("updated_at")
+ private Long updatedAt;
+
+ /** The identifier of the user who updated the registered model last time */
+ @JsonProperty("updated_by")
+ private String updatedBy;
+
+ public UpdateRegisteredModelRequest setAliases(Collection aliases) {
+ this.aliases = aliases;
+ return this;
+ }
+
+ public Collection getAliases() {
+ return aliases;
+ }
+
+ public UpdateRegisteredModelRequest setBrowseOnly(Boolean browseOnly) {
+ this.browseOnly = browseOnly;
+ return this;
+ }
+
+ public Boolean getBrowseOnly() {
+ return browseOnly;
+ }
+
+ public UpdateRegisteredModelRequest setCatalogName(String catalogName) {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ public String getCatalogName() {
+ return catalogName;
+ }
+
public UpdateRegisteredModelRequest setComment(String comment) {
this.comment = comment;
return this;
@@ -34,6 +109,24 @@ public String getComment() {
return comment;
}
+ public UpdateRegisteredModelRequest setCreatedAt(Long createdAt) {
+ this.createdAt = createdAt;
+ return this;
+ }
+
+ public Long getCreatedAt() {
+ return createdAt;
+ }
+
+ public UpdateRegisteredModelRequest setCreatedBy(String createdBy) {
+ this.createdBy = createdBy;
+ return this;
+ }
+
+ public String getCreatedBy() {
+ return createdBy;
+ }
+
public UpdateRegisteredModelRequest setFullName(String fullName) {
this.fullName = fullName;
return this;
@@ -43,6 +136,24 @@ public String getFullName() {
return fullName;
}
+ public UpdateRegisteredModelRequest setMetastoreId(String metastoreId) {
+ this.metastoreId = metastoreId;
+ return this;
+ }
+
+ public String getMetastoreId() {
+ return metastoreId;
+ }
+
+ public UpdateRegisteredModelRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
public UpdateRegisteredModelRequest setNewName(String newName) {
this.newName = newName;
return this;
@@ -61,29 +172,102 @@ public String getOwner() {
return owner;
}
+ public UpdateRegisteredModelRequest setSchemaName(String schemaName) {
+ this.schemaName = schemaName;
+ return this;
+ }
+
+ public String getSchemaName() {
+ return schemaName;
+ }
+
+ public UpdateRegisteredModelRequest setStorageLocation(String storageLocation) {
+ this.storageLocation = storageLocation;
+ return this;
+ }
+
+ public String getStorageLocation() {
+ return storageLocation;
+ }
+
+ public UpdateRegisteredModelRequest setUpdatedAt(Long updatedAt) {
+ this.updatedAt = updatedAt;
+ return this;
+ }
+
+ public Long getUpdatedAt() {
+ return updatedAt;
+ }
+
+ public UpdateRegisteredModelRequest setUpdatedBy(String updatedBy) {
+ this.updatedBy = updatedBy;
+ return this;
+ }
+
+ public String getUpdatedBy() {
+ return updatedBy;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
UpdateRegisteredModelRequest that = (UpdateRegisteredModelRequest) o;
- return Objects.equals(comment, that.comment)
+ return Objects.equals(aliases, that.aliases)
+ && Objects.equals(browseOnly, that.browseOnly)
+ && Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(comment, that.comment)
+ && Objects.equals(createdAt, that.createdAt)
+ && Objects.equals(createdBy, that.createdBy)
&& Objects.equals(fullName, that.fullName)
+ && Objects.equals(metastoreId, that.metastoreId)
+ && Objects.equals(name, that.name)
&& Objects.equals(newName, that.newName)
- && Objects.equals(owner, that.owner);
+ && Objects.equals(owner, that.owner)
+ && Objects.equals(schemaName, that.schemaName)
+ && Objects.equals(storageLocation, that.storageLocation)
+ && Objects.equals(updatedAt, that.updatedAt)
+ && Objects.equals(updatedBy, that.updatedBy);
}
@Override
public int hashCode() {
- return Objects.hash(comment, fullName, newName, owner);
+ return Objects.hash(
+ aliases,
+ browseOnly,
+ catalogName,
+ comment,
+ createdAt,
+ createdBy,
+ fullName,
+ metastoreId,
+ name,
+ newName,
+ owner,
+ schemaName,
+ storageLocation,
+ updatedAt,
+ updatedBy);
}
@Override
public String toString() {
return new ToStringer(UpdateRegisteredModelRequest.class)
+ .add("aliases", aliases)
+ .add("browseOnly", browseOnly)
+ .add("catalogName", catalogName)
.add("comment", comment)
+ .add("createdAt", createdAt)
+ .add("createdBy", createdBy)
.add("fullName", fullName)
+ .add("metastoreId", metastoreId)
+ .add("name", name)
.add("newName", newName)
.add("owner", owner)
+ .add("schemaName", schemaName)
+ .add("storageLocation", storageLocation)
+ .add("updatedAt", updatedAt)
+ .add("updatedBy", updatedBy)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java
index 3af63d755..21f22415c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java
@@ -11,11 +11,15 @@
@Generated
public class UpdateWorkspaceBindingsParameters {
- /** List of workspace bindings. */
+ /**
+ * List of workspace bindings to add. If a binding for the workspace already exists with a
+ * different binding_type, adding it again with a new binding_type will update the existing
+ * binding (e.g., from READ_WRITE to READ_ONLY).
+ */
@JsonProperty("add")
private Collection add;
- /** List of workspace bindings. */
+ /** List of workspace bindings to remove. */
@JsonProperty("remove")
private Collection remove;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java
index 21ac9b83e..3f74f8647 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java
@@ -76,7 +76,13 @@ public class VolumeInfo {
@JsonProperty("volume_id")
private String volumeId;
- /** */
+ /**
+ * The type of the volume. An external volume is located in the specified external location. A
+ * managed volume is located in the default location which is specified by the parent schema, or
+ * the parent catalog, or the Metastore. [Learn more]
+ *
+ * [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external
+ */
@JsonProperty("volume_type")
private VolumeType volumeType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java
index fcb9f83ac..044f72a39 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java
@@ -4,13 +4,6 @@
import com.databricks.sdk.support.Generated;
-/**
- * The type of the volume. An external volume is located in the specified external location. A
- * managed volume is located in the default location which is specified by the parent schema, or the
- * parent catalog, or the Metastore. [Learn more]
- *
- *
[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external
- */
@Generated
public enum VolumeType {
EXTERNAL,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java
index 609ef16ba..7b0ea8f0a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java
@@ -78,7 +78,7 @@ public Iterable list(String catalogName, String schemaName) {
*
* The returned volumes are filtered based on the privileges of the calling user. For example,
* the metastore admin is able to list all the volumes. A regular user needs to be the owner or
- * have the **READ VOLUME** privilege on the volume to recieve the volumes in the response. For
+ * have the **READ VOLUME** privilege on the volume to receive the volumes in the response. For
* the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the
* parent catalog and the **USE_SCHEMA** privilege on the parent schema.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java
index fe725c7ef..7ff906c48 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java
@@ -52,7 +52,7 @@ public interface VolumesService {
*
*
The returned volumes are filtered based on the privileges of the calling user. For example,
* the metastore admin is able to list all the volumes. A regular user needs to be the owner or
- * have the **READ VOLUME** privilege on the volume to recieve the volumes in the response. For
+ * have the **READ VOLUME** privilege on the volume to receive the volumes in the response. For
* the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the
* parent catalog and the **USE_SCHEMA** privilege on the parent schema.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/BaseEnvironmentType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/BaseEnvironmentType.java
new file mode 100755
index 000000000..93018e7a6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/BaseEnvironmentType.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+
+/** If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto */
+@Generated
+public enum BaseEnvironmentType {
+ CPU,
+ GPU,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateDefaultBaseEnvironmentRequest.java
new file mode 100755
index 000000000..598f95361
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateDefaultBaseEnvironmentRequest.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateDefaultBaseEnvironmentRequest {
+ /** */
+ @JsonProperty("default_base_environment")
+ private DefaultBaseEnvironment defaultBaseEnvironment;
+
+ /**
+ * A unique identifier for this request. A random UUID is recommended. This request is only
+ * idempotent if a `request_id` is provided.
+ */
+ @JsonProperty("request_id")
+ private String requestId;
+
+ public CreateDefaultBaseEnvironmentRequest setDefaultBaseEnvironment(
+ DefaultBaseEnvironment defaultBaseEnvironment) {
+ this.defaultBaseEnvironment = defaultBaseEnvironment;
+ return this;
+ }
+
+ public DefaultBaseEnvironment getDefaultBaseEnvironment() {
+ return defaultBaseEnvironment;
+ }
+
+ public CreateDefaultBaseEnvironmentRequest setRequestId(String requestId) {
+ this.requestId = requestId;
+ return this;
+ }
+
+ public String getRequestId() {
+ return requestId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateDefaultBaseEnvironmentRequest that = (CreateDefaultBaseEnvironmentRequest) o;
+ return Objects.equals(defaultBaseEnvironment, that.defaultBaseEnvironment)
+ && Objects.equals(requestId, that.requestId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(defaultBaseEnvironment, requestId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateDefaultBaseEnvironmentRequest.class)
+ .add("defaultBaseEnvironment", defaultBaseEnvironment)
+ .add("requestId", requestId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java
index cdc49aa18..828e11a2d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java
@@ -38,6 +38,13 @@ public class CreateInstancePool {
@JsonProperty("disk_spec")
private DiskSpec diskSpec;
+ /**
+ * For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids
+ * are enabled. This field should not be true if node_type_flexibility is set.
+ */
+ @JsonProperty("enable_auto_alternate_node_types")
+ private Boolean enableAutoAlternateNodeTypes;
+
/**
* Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire
* additional disk space when its Spark workers are running low on disk space. In AWS, this
@@ -83,6 +90,14 @@ public class CreateInstancePool {
@JsonProperty("min_idle_instances")
private Long minIdleInstances;
+ /**
+ * For pools with node type flexibility (Fleet-V2), this object contains the information about the
+ * alternate node type ids to use when attempting to launch a cluster if the node type id is not
+ * available. This field should not be set if enable_auto_alternate_node_types is true.
+ */
+ @JsonProperty("node_type_flexibility")
+ private NodeTypeFlexibility nodeTypeFlexibility;
+
/**
* This field encodes, through a single value, the resources available to each of the Spark nodes
* in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -154,6 +169,15 @@ public DiskSpec getDiskSpec() {
return diskSpec;
}
+ public CreateInstancePool setEnableAutoAlternateNodeTypes(Boolean enableAutoAlternateNodeTypes) {
+ this.enableAutoAlternateNodeTypes = enableAutoAlternateNodeTypes;
+ return this;
+ }
+
+ public Boolean getEnableAutoAlternateNodeTypes() {
+ return enableAutoAlternateNodeTypes;
+ }
+
public CreateInstancePool setEnableElasticDisk(Boolean enableElasticDisk) {
this.enableElasticDisk = enableElasticDisk;
return this;
@@ -209,6 +233,15 @@ public Long getMinIdleInstances() {
return minIdleInstances;
}
+ public CreateInstancePool setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) {
+ this.nodeTypeFlexibility = nodeTypeFlexibility;
+ return this;
+ }
+
+ public NodeTypeFlexibility getNodeTypeFlexibility() {
+ return nodeTypeFlexibility;
+ }
+
public CreateInstancePool setNodeTypeId(String nodeTypeId) {
this.nodeTypeId = nodeTypeId;
return this;
@@ -264,6 +297,7 @@ public boolean equals(Object o) {
&& Objects.equals(azureAttributes, that.azureAttributes)
&& Objects.equals(customTags, that.customTags)
&& Objects.equals(diskSpec, that.diskSpec)
+ && Objects.equals(enableAutoAlternateNodeTypes, that.enableAutoAlternateNodeTypes)
&& Objects.equals(enableElasticDisk, that.enableElasticDisk)
&& Objects.equals(gcpAttributes, that.gcpAttributes)
&& Objects.equals(
@@ -271,6 +305,7 @@ public boolean equals(Object o) {
&& Objects.equals(instancePoolName, that.instancePoolName)
&& Objects.equals(maxCapacity, that.maxCapacity)
&& Objects.equals(minIdleInstances, that.minIdleInstances)
+ && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility)
&& Objects.equals(nodeTypeId, that.nodeTypeId)
&& Objects.equals(preloadedDockerImages, that.preloadedDockerImages)
&& Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions)
@@ -285,12 +320,14 @@ public int hashCode() {
azureAttributes,
customTags,
diskSpec,
+ enableAutoAlternateNodeTypes,
enableElasticDisk,
gcpAttributes,
idleInstanceAutoterminationMinutes,
instancePoolName,
maxCapacity,
minIdleInstances,
+ nodeTypeFlexibility,
nodeTypeId,
preloadedDockerImages,
preloadedSparkVersions,
@@ -305,12 +342,14 @@ public String toString() {
.add("azureAttributes", azureAttributes)
.add("customTags", customTags)
.add("diskSpec", diskSpec)
+ .add("enableAutoAlternateNodeTypes", enableAutoAlternateNodeTypes)
.add("enableElasticDisk", enableElasticDisk)
.add("gcpAttributes", gcpAttributes)
.add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes)
.add("instancePoolName", instancePoolName)
.add("maxCapacity", maxCapacity)
.add("minIdleInstances", minIdleInstances)
+ .add("nodeTypeFlexibility", nodeTypeFlexibility)
.add("nodeTypeId", nodeTypeId)
.add("preloadedDockerImages", preloadedDockerImages)
.add("preloadedSparkVersions", preloadedSparkVersions)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironment.java
new file mode 100755
index 000000000..234b6cbc3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironment.java
@@ -0,0 +1,258 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class DefaultBaseEnvironment {
+ /** */
+ @JsonProperty("base_environment_cache")
+ private Collection baseEnvironmentCache;
+
+ /** */
+ @JsonProperty("base_environment_type")
+ private BaseEnvironmentType baseEnvironmentType;
+
+ /** */
+ @JsonProperty("created_timestamp")
+ private Long createdTimestamp;
+
+ /** */
+ @JsonProperty("creator_user_id")
+ private Long creatorUserId;
+
+ /**
+ * Note: we made `environment` non-internal because we need to expose its `client` field. All
+ * other fields should be treated as internal.
+ */
+ @JsonProperty("environment")
+ private Environment environment;
+
+ /** */
+ @JsonProperty("filepath")
+ private String filepath;
+
+ /** */
+ @JsonProperty("id")
+ private String id;
+
+ /** */
+ @JsonProperty("is_default")
+ private Boolean isDefault;
+
+ /** */
+ @JsonProperty("last_updated_timestamp")
+ private Long lastUpdatedTimestamp;
+
+ /** */
+ @JsonProperty("last_updated_user_id")
+ private Long lastUpdatedUserId;
+
+ /** */
+ @JsonProperty("message")
+ private String message;
+
+ /** */
+ @JsonProperty("name")
+ private String name;
+
+ /** */
+ @JsonProperty("principal_ids")
+ private Collection principalIds;
+
+ /** */
+ @JsonProperty("status")
+ private DefaultBaseEnvironmentCacheStatus status;
+
+ public DefaultBaseEnvironment setBaseEnvironmentCache(
+ Collection baseEnvironmentCache) {
+ this.baseEnvironmentCache = baseEnvironmentCache;
+ return this;
+ }
+
+ public Collection getBaseEnvironmentCache() {
+ return baseEnvironmentCache;
+ }
+
+ public DefaultBaseEnvironment setBaseEnvironmentType(BaseEnvironmentType baseEnvironmentType) {
+ this.baseEnvironmentType = baseEnvironmentType;
+ return this;
+ }
+
+ public BaseEnvironmentType getBaseEnvironmentType() {
+ return baseEnvironmentType;
+ }
+
+ public DefaultBaseEnvironment setCreatedTimestamp(Long createdTimestamp) {
+ this.createdTimestamp = createdTimestamp;
+ return this;
+ }
+
+ public Long getCreatedTimestamp() {
+ return createdTimestamp;
+ }
+
+ public DefaultBaseEnvironment setCreatorUserId(Long creatorUserId) {
+ this.creatorUserId = creatorUserId;
+ return this;
+ }
+
+ public Long getCreatorUserId() {
+ return creatorUserId;
+ }
+
+ public DefaultBaseEnvironment setEnvironment(Environment environment) {
+ this.environment = environment;
+ return this;
+ }
+
+ public Environment getEnvironment() {
+ return environment;
+ }
+
+ public DefaultBaseEnvironment setFilepath(String filepath) {
+ this.filepath = filepath;
+ return this;
+ }
+
+ public String getFilepath() {
+ return filepath;
+ }
+
+ public DefaultBaseEnvironment setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public DefaultBaseEnvironment setIsDefault(Boolean isDefault) {
+ this.isDefault = isDefault;
+ return this;
+ }
+
+ public Boolean getIsDefault() {
+ return isDefault;
+ }
+
+ public DefaultBaseEnvironment setLastUpdatedTimestamp(Long lastUpdatedTimestamp) {
+ this.lastUpdatedTimestamp = lastUpdatedTimestamp;
+ return this;
+ }
+
+ public Long getLastUpdatedTimestamp() {
+ return lastUpdatedTimestamp;
+ }
+
+ public DefaultBaseEnvironment setLastUpdatedUserId(Long lastUpdatedUserId) {
+ this.lastUpdatedUserId = lastUpdatedUserId;
+ return this;
+ }
+
+ public Long getLastUpdatedUserId() {
+ return lastUpdatedUserId;
+ }
+
+ public DefaultBaseEnvironment setMessage(String message) {
+ this.message = message;
+ return this;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public DefaultBaseEnvironment setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public DefaultBaseEnvironment setPrincipalIds(Collection principalIds) {
+ this.principalIds = principalIds;
+ return this;
+ }
+
+ public Collection getPrincipalIds() {
+ return principalIds;
+ }
+
+ public DefaultBaseEnvironment setStatus(DefaultBaseEnvironmentCacheStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public DefaultBaseEnvironmentCacheStatus getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DefaultBaseEnvironment that = (DefaultBaseEnvironment) o;
+ return Objects.equals(baseEnvironmentCache, that.baseEnvironmentCache)
+ && Objects.equals(baseEnvironmentType, that.baseEnvironmentType)
+ && Objects.equals(createdTimestamp, that.createdTimestamp)
+ && Objects.equals(creatorUserId, that.creatorUserId)
+ && Objects.equals(environment, that.environment)
+ && Objects.equals(filepath, that.filepath)
+ && Objects.equals(id, that.id)
+ && Objects.equals(isDefault, that.isDefault)
+ && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp)
+ && Objects.equals(lastUpdatedUserId, that.lastUpdatedUserId)
+ && Objects.equals(message, that.message)
+ && Objects.equals(name, that.name)
+ && Objects.equals(principalIds, that.principalIds)
+ && Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ baseEnvironmentCache,
+ baseEnvironmentType,
+ createdTimestamp,
+ creatorUserId,
+ environment,
+ filepath,
+ id,
+ isDefault,
+ lastUpdatedTimestamp,
+ lastUpdatedUserId,
+ message,
+ name,
+ principalIds,
+ status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DefaultBaseEnvironment.class)
+ .add("baseEnvironmentCache", baseEnvironmentCache)
+ .add("baseEnvironmentType", baseEnvironmentType)
+ .add("createdTimestamp", createdTimestamp)
+ .add("creatorUserId", creatorUserId)
+ .add("environment", environment)
+ .add("filepath", filepath)
+ .add("id", id)
+ .add("isDefault", isDefault)
+ .add("lastUpdatedTimestamp", lastUpdatedTimestamp)
+ .add("lastUpdatedUserId", lastUpdatedUserId)
+ .add("message", message)
+ .add("name", name)
+ .add("principalIds", principalIds)
+ .add("status", status)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCache.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCache.java
new file mode 100755
index 000000000..ea464af11
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCache.java
@@ -0,0 +1,92 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DefaultBaseEnvironmentCache {
+ /** */
+ @JsonProperty("indefinite_materialized_environment")
+ private MaterializedEnvironment indefiniteMaterializedEnvironment;
+
+ /** */
+ @JsonProperty("materialized_environment")
+ private MaterializedEnvironment materializedEnvironment;
+
+ /** */
+ @JsonProperty("message")
+ private String message;
+
+ /** */
+ @JsonProperty("status")
+ private DefaultBaseEnvironmentCacheStatus status;
+
+ public DefaultBaseEnvironmentCache setIndefiniteMaterializedEnvironment(
+ MaterializedEnvironment indefiniteMaterializedEnvironment) {
+ this.indefiniteMaterializedEnvironment = indefiniteMaterializedEnvironment;
+ return this;
+ }
+
+ public MaterializedEnvironment getIndefiniteMaterializedEnvironment() {
+ return indefiniteMaterializedEnvironment;
+ }
+
+ public DefaultBaseEnvironmentCache setMaterializedEnvironment(
+ MaterializedEnvironment materializedEnvironment) {
+ this.materializedEnvironment = materializedEnvironment;
+ return this;
+ }
+
+ public MaterializedEnvironment getMaterializedEnvironment() {
+ return materializedEnvironment;
+ }
+
+ public DefaultBaseEnvironmentCache setMessage(String message) {
+ this.message = message;
+ return this;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public DefaultBaseEnvironmentCache setStatus(DefaultBaseEnvironmentCacheStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public DefaultBaseEnvironmentCacheStatus getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DefaultBaseEnvironmentCache that = (DefaultBaseEnvironmentCache) o;
+ return Objects.equals(indefiniteMaterializedEnvironment, that.indefiniteMaterializedEnvironment)
+ && Objects.equals(materializedEnvironment, that.materializedEnvironment)
+ && Objects.equals(message, that.message)
+ && Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ indefiniteMaterializedEnvironment, materializedEnvironment, message, status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DefaultBaseEnvironmentCache.class)
+ .add("indefiniteMaterializedEnvironment", indefiniteMaterializedEnvironment)
+ .add("materializedEnvironment", materializedEnvironment)
+ .add("message", message)
+ .add("status", status)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCacheStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCacheStatus.java
new file mode 100755
index 000000000..aaee91c80
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCacheStatus.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum DefaultBaseEnvironmentCacheStatus {
+ CREATED,
+ EXPIRED,
+ FAILED,
+ INVALID,
+ PENDING,
+ REFRESHING,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteDefaultBaseEnvironmentRequest.java
new file mode 100755
index 000000000..bef81a175
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteDefaultBaseEnvironmentRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteDefaultBaseEnvironmentRequest {
+ /** */
+ @JsonIgnore private String id;
+
+ public DeleteDefaultBaseEnvironmentRequest setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDefaultBaseEnvironmentRequest that = (DeleteDefaultBaseEnvironmentRequest) o;
+ return Objects.equals(id, that.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDefaultBaseEnvironmentRequest.class).add("id", id).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java
index b5cd70eb2..c2eea1d95 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java
@@ -19,6 +19,13 @@ public class EditInstancePool {
@JsonProperty("custom_tags")
private Map customTags;
+ /**
+ * For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids
+ * are enabled. This field should not be true if node_type_flexibility is set.
+ */
+ @JsonProperty("enable_auto_alternate_node_types")
+ private Boolean enableAutoAlternateNodeTypes;
+
/**
* Automatically terminates the extra instances in the pool cache after they are inactive for this
* time in minutes if min_idle_instances requirement is already met. If not set, the extra pool
@@ -52,6 +59,14 @@ public class EditInstancePool {
@JsonProperty("min_idle_instances")
private Long minIdleInstances;
+ /**
+ * For pools with node type flexibility (Fleet-V2), this object contains the information about the
+ * alternate node type ids to use when attempting to launch a cluster if the node type id is not
+ * available. This field should not be set if enable_auto_alternate_node_types is true.
+ */
+ @JsonProperty("node_type_flexibility")
+ private NodeTypeFlexibility nodeTypeFlexibility;
+
/**
* This field encodes, through a single value, the resources available to each of the Spark nodes
* in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -84,6 +99,15 @@ public Map getCustomTags() {
return customTags;
}
+ public EditInstancePool setEnableAutoAlternateNodeTypes(Boolean enableAutoAlternateNodeTypes) {
+ this.enableAutoAlternateNodeTypes = enableAutoAlternateNodeTypes;
+ return this;
+ }
+
+ public Boolean getEnableAutoAlternateNodeTypes() {
+ return enableAutoAlternateNodeTypes;
+ }
+
public EditInstancePool setIdleInstanceAutoterminationMinutes(
Long idleInstanceAutoterminationMinutes) {
this.idleInstanceAutoterminationMinutes = idleInstanceAutoterminationMinutes;
@@ -130,6 +154,15 @@ public Long getMinIdleInstances() {
return minIdleInstances;
}
+ public EditInstancePool setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) {
+ this.nodeTypeFlexibility = nodeTypeFlexibility;
+ return this;
+ }
+
+ public NodeTypeFlexibility getNodeTypeFlexibility() {
+ return nodeTypeFlexibility;
+ }
+
public EditInstancePool setNodeTypeId(String nodeTypeId) {
this.nodeTypeId = nodeTypeId;
return this;
@@ -163,12 +196,14 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
EditInstancePool that = (EditInstancePool) o;
return Objects.equals(customTags, that.customTags)
+ && Objects.equals(enableAutoAlternateNodeTypes, that.enableAutoAlternateNodeTypes)
&& Objects.equals(
idleInstanceAutoterminationMinutes, that.idleInstanceAutoterminationMinutes)
&& Objects.equals(instancePoolId, that.instancePoolId)
&& Objects.equals(instancePoolName, that.instancePoolName)
&& Objects.equals(maxCapacity, that.maxCapacity)
&& Objects.equals(minIdleInstances, that.minIdleInstances)
+ && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility)
&& Objects.equals(nodeTypeId, that.nodeTypeId)
&& Objects.equals(remoteDiskThroughput, that.remoteDiskThroughput)
&& Objects.equals(totalInitialRemoteDiskSize, that.totalInitialRemoteDiskSize);
@@ -178,11 +213,13 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
customTags,
+ enableAutoAlternateNodeTypes,
idleInstanceAutoterminationMinutes,
instancePoolId,
instancePoolName,
maxCapacity,
minIdleInstances,
+ nodeTypeFlexibility,
nodeTypeId,
remoteDiskThroughput,
totalInitialRemoteDiskSize);
@@ -192,11 +229,13 @@ public int hashCode() {
public String toString() {
return new ToStringer(EditInstancePool.class)
.add("customTags", customTags)
+ .add("enableAutoAlternateNodeTypes", enableAutoAlternateNodeTypes)
.add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes)
.add("instancePoolId", instancePoolId)
.add("instancePoolName", instancePoolName)
.add("maxCapacity", maxCapacity)
.add("minIdleInstances", minIdleInstances)
+ .add("nodeTypeFlexibility", nodeTypeFlexibility)
.add("nodeTypeId", nodeTypeId)
.add("remoteDiskThroughput", remoteDiskThroughput)
.add("totalInitialRemoteDiskSize", totalInitialRemoteDiskSize)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
index 28acb8090..7e131ef28 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
@@ -36,13 +36,9 @@ public class Environment {
@JsonProperty("environment_version")
private String environmentVersion;
- /** Use `java_dependencies` instead. */
- @JsonProperty("jar_dependencies")
- private Collection jarDependencies;
-
/**
- * List of jar dependencies, should be string representing volume paths. For example:
- * `/Volumes/path/to/test.jar`.
+ * List of java dependencies. Each dependency is a string representing a java library path. For
+ * example: `/Volumes/path/to/test.jar`.
*/
@JsonProperty("java_dependencies")
private Collection javaDependencies;
@@ -74,15 +70,6 @@ public String getEnvironmentVersion() {
return environmentVersion;
}
- public Environment setJarDependencies(Collection jarDependencies) {
- this.jarDependencies = jarDependencies;
- return this;
- }
-
- public Collection getJarDependencies() {
- return jarDependencies;
- }
-
public Environment setJavaDependencies(Collection javaDependencies) {
this.javaDependencies = javaDependencies;
return this;
@@ -100,14 +87,12 @@ public boolean equals(Object o) {
return Objects.equals(client, that.client)
&& Objects.equals(dependencies, that.dependencies)
&& Objects.equals(environmentVersion, that.environmentVersion)
- && Objects.equals(jarDependencies, that.jarDependencies)
&& Objects.equals(javaDependencies, that.javaDependencies);
}
@Override
public int hashCode() {
- return Objects.hash(
- client, dependencies, environmentVersion, jarDependencies, javaDependencies);
+ return Objects.hash(client, dependencies, environmentVersion, javaDependencies);
}
@Override
@@ -116,7 +101,6 @@ public String toString() {
.add("client", client)
.add("dependencies", dependencies)
.add("environmentVersion", environmentVersion)
- .add("jarDependencies", jarDependencies)
.add("javaDependencies", javaDependencies)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetDefaultBaseEnvironmentRequest.java
new file mode 100755
index 000000000..6c682a012
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetDefaultBaseEnvironmentRequest.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetDefaultBaseEnvironmentRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("id")
+ private String id;
+
+ /** Deprecated: use ctx.requestId instead */
+ @JsonIgnore
+ @QueryParam("trace_id")
+ private String traceId;
+
+ public GetDefaultBaseEnvironmentRequest setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public GetDefaultBaseEnvironmentRequest setTraceId(String traceId) {
+ this.traceId = traceId;
+ return this;
+ }
+
+ public String getTraceId() {
+ return traceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetDefaultBaseEnvironmentRequest that = (GetDefaultBaseEnvironmentRequest) o;
+ return Objects.equals(id, that.id) && Objects.equals(traceId, that.traceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, traceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetDefaultBaseEnvironmentRequest.class)
+ .add("id", id)
+ .add("traceId", traceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java
index be68fd165..c9dd511d8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java
@@ -52,6 +52,13 @@ public class GetInstancePool {
@JsonProperty("disk_spec")
private DiskSpec diskSpec;
+ /**
+ * For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids
+ * are enabled. This field should not be true if node_type_flexibility is set.
+ */
+ @JsonProperty("enable_auto_alternate_node_types")
+ private Boolean enableAutoAlternateNodeTypes;
+
/**
* Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire
* additional disk space when its Spark workers are running low on disk space. In AWS, this
@@ -101,6 +108,14 @@ public class GetInstancePool {
@JsonProperty("min_idle_instances")
private Long minIdleInstances;
+ /**
+ * For pools with node type flexibility (Fleet-V2), this object contains the information about the
+ * alternate node type ids to use when attempting to launch a cluster if the node type id is not
+ * available. This field should not be set if enable_auto_alternate_node_types is true.
+ */
+ @JsonProperty("node_type_flexibility")
+ private NodeTypeFlexibility nodeTypeFlexibility;
+
/**
* This field encodes, through a single value, the resources available to each of the Spark nodes
* in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -193,6 +208,15 @@ public DiskSpec getDiskSpec() {
return diskSpec;
}
+ public GetInstancePool setEnableAutoAlternateNodeTypes(Boolean enableAutoAlternateNodeTypes) {
+ this.enableAutoAlternateNodeTypes = enableAutoAlternateNodeTypes;
+ return this;
+ }
+
+ public Boolean getEnableAutoAlternateNodeTypes() {
+ return enableAutoAlternateNodeTypes;
+ }
+
public GetInstancePool setEnableElasticDisk(Boolean enableElasticDisk) {
this.enableElasticDisk = enableElasticDisk;
return this;
@@ -257,6 +281,15 @@ public Long getMinIdleInstances() {
return minIdleInstances;
}
+ public GetInstancePool setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) {
+ this.nodeTypeFlexibility = nodeTypeFlexibility;
+ return this;
+ }
+
+ public NodeTypeFlexibility getNodeTypeFlexibility() {
+ return nodeTypeFlexibility;
+ }
+
public GetInstancePool setNodeTypeId(String nodeTypeId) {
this.nodeTypeId = nodeTypeId;
return this;
@@ -339,6 +372,7 @@ public boolean equals(Object o) {
&& Objects.equals(customTags, that.customTags)
&& Objects.equals(defaultTags, that.defaultTags)
&& Objects.equals(diskSpec, that.diskSpec)
+ && Objects.equals(enableAutoAlternateNodeTypes, that.enableAutoAlternateNodeTypes)
&& Objects.equals(enableElasticDisk, that.enableElasticDisk)
&& Objects.equals(gcpAttributes, that.gcpAttributes)
&& Objects.equals(
@@ -347,6 +381,7 @@ public boolean equals(Object o) {
&& Objects.equals(instancePoolName, that.instancePoolName)
&& Objects.equals(maxCapacity, that.maxCapacity)
&& Objects.equals(minIdleInstances, that.minIdleInstances)
+ && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility)
&& Objects.equals(nodeTypeId, that.nodeTypeId)
&& Objects.equals(preloadedDockerImages, that.preloadedDockerImages)
&& Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions)
@@ -365,6 +400,7 @@ public int hashCode() {
customTags,
defaultTags,
diskSpec,
+ enableAutoAlternateNodeTypes,
enableElasticDisk,
gcpAttributes,
idleInstanceAutoterminationMinutes,
@@ -372,6 +408,7 @@ public int hashCode() {
instancePoolName,
maxCapacity,
minIdleInstances,
+ nodeTypeFlexibility,
nodeTypeId,
preloadedDockerImages,
preloadedSparkVersions,
@@ -390,6 +427,7 @@ public String toString() {
.add("customTags", customTags)
.add("defaultTags", defaultTags)
.add("diskSpec", diskSpec)
+ .add("enableAutoAlternateNodeTypes", enableAutoAlternateNodeTypes)
.add("enableElasticDisk", enableElasticDisk)
.add("gcpAttributes", gcpAttributes)
.add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes)
@@ -397,6 +435,7 @@ public String toString() {
.add("instancePoolName", instancePoolName)
.add("maxCapacity", maxCapacity)
.add("minIdleInstances", minIdleInstances)
+ .add("nodeTypeFlexibility", nodeTypeFlexibility)
.add("nodeTypeId", nodeTypeId)
.add("preloadedDockerImages", preloadedDockerImages)
.add("preloadedSparkVersions", preloadedSparkVersions)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java
index 485798092..9f9932894 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java
@@ -52,6 +52,13 @@ public class InstancePoolAndStats {
@JsonProperty("disk_spec")
private DiskSpec diskSpec;
+ /**
+ * For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids
+ * are enabled. This field should not be true if node_type_flexibility is set.
+ */
+ @JsonProperty("enable_auto_alternate_node_types")
+ private Boolean enableAutoAlternateNodeTypes;
+
/**
* Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire
* additional disk space when its Spark workers are running low on disk space. In AWS, this
@@ -101,6 +108,14 @@ public class InstancePoolAndStats {
@JsonProperty("min_idle_instances")
private Long minIdleInstances;
+ /**
+ * For pools with node type flexibility (Fleet-V2), this object contains the information about the
+ * alternate node type ids to use when attempting to launch a cluster if the node type id is not
+ * available. This field should not be set if enable_auto_alternate_node_types is true.
+ */
+ @JsonProperty("node_type_flexibility")
+ private NodeTypeFlexibility nodeTypeFlexibility;
+
/**
* This field encodes, through a single value, the resources available to each of the Spark nodes
* in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -193,6 +208,16 @@ public DiskSpec getDiskSpec() {
return diskSpec;
}
+ public InstancePoolAndStats setEnableAutoAlternateNodeTypes(
+ Boolean enableAutoAlternateNodeTypes) {
+ this.enableAutoAlternateNodeTypes = enableAutoAlternateNodeTypes;
+ return this;
+ }
+
+ public Boolean getEnableAutoAlternateNodeTypes() {
+ return enableAutoAlternateNodeTypes;
+ }
+
public InstancePoolAndStats setEnableElasticDisk(Boolean enableElasticDisk) {
this.enableElasticDisk = enableElasticDisk;
return this;
@@ -257,6 +282,15 @@ public Long getMinIdleInstances() {
return minIdleInstances;
}
+ public InstancePoolAndStats setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) {
+ this.nodeTypeFlexibility = nodeTypeFlexibility;
+ return this;
+ }
+
+ public NodeTypeFlexibility getNodeTypeFlexibility() {
+ return nodeTypeFlexibility;
+ }
+
public InstancePoolAndStats setNodeTypeId(String nodeTypeId) {
this.nodeTypeId = nodeTypeId;
return this;
@@ -340,6 +374,7 @@ public boolean equals(Object o) {
&& Objects.equals(customTags, that.customTags)
&& Objects.equals(defaultTags, that.defaultTags)
&& Objects.equals(diskSpec, that.diskSpec)
+ && Objects.equals(enableAutoAlternateNodeTypes, that.enableAutoAlternateNodeTypes)
&& Objects.equals(enableElasticDisk, that.enableElasticDisk)
&& Objects.equals(gcpAttributes, that.gcpAttributes)
&& Objects.equals(
@@ -348,6 +383,7 @@ public boolean equals(Object o) {
&& Objects.equals(instancePoolName, that.instancePoolName)
&& Objects.equals(maxCapacity, that.maxCapacity)
&& Objects.equals(minIdleInstances, that.minIdleInstances)
+ && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility)
&& Objects.equals(nodeTypeId, that.nodeTypeId)
&& Objects.equals(preloadedDockerImages, that.preloadedDockerImages)
&& Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions)
@@ -366,6 +402,7 @@ public int hashCode() {
customTags,
defaultTags,
diskSpec,
+ enableAutoAlternateNodeTypes,
enableElasticDisk,
gcpAttributes,
idleInstanceAutoterminationMinutes,
@@ -373,6 +410,7 @@ public int hashCode() {
instancePoolName,
maxCapacity,
minIdleInstances,
+ nodeTypeFlexibility,
nodeTypeId,
preloadedDockerImages,
preloadedSparkVersions,
@@ -391,6 +429,7 @@ public String toString() {
.add("customTags", customTags)
.add("defaultTags", defaultTags)
.add("diskSpec", diskSpec)
+ .add("enableAutoAlternateNodeTypes", enableAutoAlternateNodeTypes)
.add("enableElasticDisk", enableElasticDisk)
.add("gcpAttributes", gcpAttributes)
.add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes)
@@ -398,6 +437,7 @@ public String toString() {
.add("instancePoolName", instancePoolName)
.add("maxCapacity", maxCapacity)
.add("minIdleInstances", minIdleInstances)
+ .add("nodeTypeFlexibility", nodeTypeFlexibility)
.add("nodeTypeId", nodeTypeId)
.add("preloadedDockerImages", preloadedDockerImages)
.add("preloadedSparkVersions", preloadedSparkVersions)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java
index 2520eca50..6e7b6041b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java
@@ -14,6 +14,19 @@ public class InstancePoolAwsAttributes {
@JsonProperty("availability")
private InstancePoolAwsAttributesAvailability availability;
+ /**
+ * All AWS instances belonging to the instance pool will have this instance profile. If omitted,
+ * instances will initially be launched with the workspace's default instance profile. If defined,
+ * clusters that use the pool will inherit the instance profile, and must not specify their own
+ * instance profile on cluster creation or update. If the pool does not specify an instance
+ * profile, clusters using the pool may specify any instance profile. The instance profile must
+ * have previously been added to the Databricks environment by an account administrator.
+ *
+ * This feature may only be available to certain customer plans.
+ */
+ @JsonProperty("instance_profile_arn")
+ private String instanceProfileArn;
+
/**
* Calculates the bid price for AWS spot instances, as a percentage of the corresponding instance
* type's on-demand price. For example, if this field is set to 50, and the cluster needs a new
@@ -48,6 +61,15 @@ public InstancePoolAwsAttributesAvailability getAvailability() {
return availability;
}
+ public InstancePoolAwsAttributes setInstanceProfileArn(String instanceProfileArn) {
+ this.instanceProfileArn = instanceProfileArn;
+ return this;
+ }
+
+ public String getInstanceProfileArn() {
+ return instanceProfileArn;
+ }
+
public InstancePoolAwsAttributes setSpotBidPricePercent(Long spotBidPricePercent) {
this.spotBidPricePercent = spotBidPricePercent;
return this;
@@ -72,19 +94,21 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
InstancePoolAwsAttributes that = (InstancePoolAwsAttributes) o;
return Objects.equals(availability, that.availability)
+ && Objects.equals(instanceProfileArn, that.instanceProfileArn)
&& Objects.equals(spotBidPricePercent, that.spotBidPricePercent)
&& Objects.equals(zoneId, that.zoneId);
}
@Override
public int hashCode() {
- return Objects.hash(availability, spotBidPricePercent, zoneId);
+ return Objects.hash(availability, instanceProfileArn, spotBidPricePercent, zoneId);
}
@Override
public String toString() {
return new ToStringer(InstancePoolAwsAttributes.class)
.add("availability", availability)
+ .add("instanceProfileArn", instanceProfileArn)
.add("spotBidPricePercent", spotBidPricePercent)
.add("zoneId", zoneId)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
index bf8a99af3..4a692cb61 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
@@ -4,6 +4,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Paginator;
+import java.util.Collection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -68,6 +69,38 @@ public Iterable clusterStatus(ClusterStatus request) {
request, impl::clusterStatus, ClusterLibraryStatuses::getLibraryStatuses, response -> null);
}
+ /**
+ * Create a default base environment within workspaces to define the environment version and a
+ * list of dependencies to be used in serverless notebooks and jobs. This process will
+ * asynchronously generate a cache to optimize dependency resolution.
+ */
+ public DefaultBaseEnvironment createDefaultBaseEnvironment(
+ CreateDefaultBaseEnvironmentRequest request) {
+ return impl.createDefaultBaseEnvironment(request);
+ }
+
+ public void deleteDefaultBaseEnvironment(String id) {
+ deleteDefaultBaseEnvironment(new DeleteDefaultBaseEnvironmentRequest().setId(id));
+ }
+
+ /**
+ * Delete the default base environment given an ID. The default base environment may be used by
+ * downstream workloads. Please ensure that the deletion is intentional.
+ */
+ public void deleteDefaultBaseEnvironment(DeleteDefaultBaseEnvironmentRequest request) {
+ impl.deleteDefaultBaseEnvironment(request);
+ }
+
+ public DefaultBaseEnvironment getDefaultBaseEnvironment(String id) {
+ return getDefaultBaseEnvironment(new GetDefaultBaseEnvironmentRequest().setId(id));
+ }
+
+ /** Return the default base environment details for a given ID. */
+ public DefaultBaseEnvironment getDefaultBaseEnvironment(
+ GetDefaultBaseEnvironmentRequest request) {
+ return impl.getDefaultBaseEnvironment(request);
+ }
+
/**
* Add libraries to install on a cluster. The installation is asynchronous; it happens in the
* background after the completion of this request.
@@ -76,6 +109,34 @@ public void install(InstallLibraries request) {
impl.install(request);
}
+ /** List default base environments defined in the workspaces for the requested user. */
+ public Iterable listDefaultBaseEnvironments(
+ ListDefaultBaseEnvironmentsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listDefaultBaseEnvironments,
+ ListDefaultBaseEnvironmentsResponse::getDefaultBaseEnvironments,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ public void refreshDefaultBaseEnvironments(Collection ids) {
+ refreshDefaultBaseEnvironments(new RefreshDefaultBaseEnvironmentsRequest().setIds(ids));
+ }
+
+ /**
+ * Refresh the cached default base environments for the given IDs. This process will
+ * asynchronously regenerate the caches. The existing caches remains available until it expires.
+ */
+ public void refreshDefaultBaseEnvironments(RefreshDefaultBaseEnvironmentsRequest request) {
+ impl.refreshDefaultBaseEnvironments(request);
+ }
+
/**
* Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster
* is restarted. A request to uninstall a library that is not currently installed is ignored.
@@ -84,6 +145,24 @@ public void uninstall(UninstallLibraries request) {
impl.uninstall(request);
}
+ /**
+ * Update the default base environment for the given ID. This process will asynchronously
+ * regenerate the cache. The existing cache remains available until it expires.
+ */
+ public DefaultBaseEnvironment updateDefaultBaseEnvironment(
+ UpdateDefaultBaseEnvironmentRequest request) {
+ return impl.updateDefaultBaseEnvironment(request);
+ }
+
+ /**
+ * Set the default base environment for the workspace. This marks the specified DBE as the
+ * workspace default.
+ */
+ public DefaultBaseEnvironment updateDefaultDefaultBaseEnvironment(
+ UpdateDefaultDefaultBaseEnvironmentRequest request) {
+ return impl.updateDefaultDefaultBaseEnvironment(request);
+ }
+
public LibrariesService impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java
index 6bb0dd63e..e5a04da90 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java
@@ -41,6 +41,48 @@ public ClusterLibraryStatuses clusterStatus(ClusterStatus request) {
}
}
+ @Override
+ public DefaultBaseEnvironment createDefaultBaseEnvironment(
+ CreateDefaultBaseEnvironmentRequest request) {
+ String path = "/api/2.0/default-base-environments";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DefaultBaseEnvironment.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteDefaultBaseEnvironment(DeleteDefaultBaseEnvironmentRequest request) {
+ String path = String.format("/api/2.0/default-base-environments/%s", request.getId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DefaultBaseEnvironment getDefaultBaseEnvironment(
+ GetDefaultBaseEnvironmentRequest request) {
+ String path = "/api/2.0/default-base-environments:getDefaultBaseEnvironment";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, DefaultBaseEnvironment.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public void install(InstallLibraries request) {
String path = "/api/2.0/libraries/install";
@@ -55,6 +97,34 @@ public void install(InstallLibraries request) {
}
}
+ @Override
+ public ListDefaultBaseEnvironmentsResponse listDefaultBaseEnvironments(
+ ListDefaultBaseEnvironmentsRequest request) {
+ String path = "/api/2.0/default-base-environments";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListDefaultBaseEnvironmentsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void refreshDefaultBaseEnvironments(RefreshDefaultBaseEnvironmentsRequest request) {
+ String path = "/api/2.0/default-base-environments/refresh";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public void uninstall(UninstallLibraries request) {
String path = "/api/2.0/libraries/uninstall";
@@ -68,4 +138,34 @@ public void uninstall(UninstallLibraries request) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
+
+ @Override
+ public DefaultBaseEnvironment updateDefaultBaseEnvironment(
+ UpdateDefaultBaseEnvironmentRequest request) {
+ String path = String.format("/api/2.0/default-base-environments/%s", request.getId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DefaultBaseEnvironment.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DefaultBaseEnvironment updateDefaultDefaultBaseEnvironment(
+ UpdateDefaultDefaultBaseEnvironmentRequest request) {
+ String path = "/api/2.0/default-base-environments:setDefault";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DefaultBaseEnvironment.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java
index 5ccaf55cb..d35cb4101 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java
@@ -41,15 +41,59 @@ public interface LibrariesService {
*/
ClusterLibraryStatuses clusterStatus(ClusterStatus clusterStatus);
+ /**
+ * Create a default base environment within workspaces to define the environment version and a
+ * list of dependencies to be used in serverless notebooks and jobs. This process will
+ * asynchronously generate a cache to optimize dependency resolution.
+ */
+ DefaultBaseEnvironment createDefaultBaseEnvironment(
+ CreateDefaultBaseEnvironmentRequest createDefaultBaseEnvironmentRequest);
+
+ /**
+ * Delete the default base environment given an ID. The default base environment may be used by
+ * downstream workloads. Please ensure that the deletion is intentional.
+ */
+ void deleteDefaultBaseEnvironment(
+ DeleteDefaultBaseEnvironmentRequest deleteDefaultBaseEnvironmentRequest);
+
+ /** Return the default base environment details for a given ID. */
+ DefaultBaseEnvironment getDefaultBaseEnvironment(
+ GetDefaultBaseEnvironmentRequest getDefaultBaseEnvironmentRequest);
+
/**
* Add libraries to install on a cluster. The installation is asynchronous; it happens in the
* background after the completion of this request.
*/
void install(InstallLibraries installLibraries);
+ /** List default base environments defined in the workspaces for the requested user. */
+ ListDefaultBaseEnvironmentsResponse listDefaultBaseEnvironments(
+ ListDefaultBaseEnvironmentsRequest listDefaultBaseEnvironmentsRequest);
+
+ /**
+ * Refresh the cached default base environments for the given IDs. This process will
+ * asynchronously regenerate the caches. The existing caches remains available until it expires.
+ */
+ void refreshDefaultBaseEnvironments(
+ RefreshDefaultBaseEnvironmentsRequest refreshDefaultBaseEnvironmentsRequest);
+
/**
* Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster
* is restarted. A request to uninstall a library that is not currently installed is ignored.
*/
void uninstall(UninstallLibraries uninstallLibraries);
+
+ /**
+ * Update the default base environment for the given ID. This process will asynchronously
+ * regenerate the cache. The existing cache remains available until it expires.
+ */
+ DefaultBaseEnvironment updateDefaultBaseEnvironment(
+ UpdateDefaultBaseEnvironmentRequest updateDefaultBaseEnvironmentRequest);
+
+ /**
+ * Set the default base environment for the workspace. This marks the specified DBE as the
+ * workspace default.
+ */
+ DefaultBaseEnvironment updateDefaultDefaultBaseEnvironment(
+ UpdateDefaultDefaultBaseEnvironmentRequest updateDefaultDefaultBaseEnvironmentRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsRequest.java
new file mode 100755
index 000000000..b4f8149ba
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsRequest.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListDefaultBaseEnvironmentsRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListDefaultBaseEnvironmentsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListDefaultBaseEnvironmentsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDefaultBaseEnvironmentsRequest that = (ListDefaultBaseEnvironmentsRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDefaultBaseEnvironmentsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsResponse.java
new file mode 100755
index 000000000..c941b5fa9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListDefaultBaseEnvironmentsResponse {
+ /** */
+ @JsonProperty("default_base_environments")
+ private Collection defaultBaseEnvironments;
+
+ /** */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListDefaultBaseEnvironmentsResponse setDefaultBaseEnvironments(
+ Collection defaultBaseEnvironments) {
+ this.defaultBaseEnvironments = defaultBaseEnvironments;
+ return this;
+ }
+
+ public Collection getDefaultBaseEnvironments() {
+ return defaultBaseEnvironments;
+ }
+
+ public ListDefaultBaseEnvironmentsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDefaultBaseEnvironmentsResponse that = (ListDefaultBaseEnvironmentsResponse) o;
+ return Objects.equals(defaultBaseEnvironments, that.defaultBaseEnvironments)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(defaultBaseEnvironments, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDefaultBaseEnvironmentsResponse.class)
+ .add("defaultBaseEnvironments", defaultBaseEnvironments)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MaterializedEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MaterializedEnvironment.java
new file mode 100755
index 000000000..52a43e725
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MaterializedEnvironment.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Materialized Environment information enables environment sharing and reuse via Environment
+ * Caching during library installations. Currently this feature is only supported for Python
+ * libraries.
+ *
+ * - If the env cache entry in LMv2 DB doesn't exist or invalid, library installations and
+ * environment materialization will occur. A new Materialized Environment metadata will be sent from
+ * DP upon successful library installations and env materialization, and is persisted into database
+ * by LMv2. - If the env cache entry in LMv2 DB is valid, the Materialized Environment will be sent
+ * to DP by LMv2, and DP will restore the cached environment from a store instead of reinstalling
+ * libraries from scratch.
+ *
+ *
If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto with new
+ * version
+ */
+@Generated
+public class MaterializedEnvironment {
+ /** The timestamp (in epoch milliseconds) when the materialized env is updated. */
+ @JsonProperty("last_updated_timestamp")
+ private Long lastUpdatedTimestamp;
+
+ public MaterializedEnvironment setLastUpdatedTimestamp(Long lastUpdatedTimestamp) {
+ this.lastUpdatedTimestamp = lastUpdatedTimestamp;
+ return this;
+ }
+
+ public Long getLastUpdatedTimestamp() {
+ return lastUpdatedTimestamp;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ MaterializedEnvironment that = (MaterializedEnvironment) o;
+ return Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(lastUpdatedTimestamp);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(MaterializedEnvironment.class)
+ .add("lastUpdatedTimestamp", lastUpdatedTimestamp)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java
new file mode 100755
index 000000000..7366ed43d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java
@@ -0,0 +1,33 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/**
+ * For Fleet-V2 using classic clusters, this object contains the information about the alternate
+ * node type ids to use when attempting to launch a cluster. It can be used with both the driver and
+ * worker node types.
+ */
+@Generated
+public class NodeTypeFlexibility {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(NodeTypeFlexibility.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RefreshDefaultBaseEnvironmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RefreshDefaultBaseEnvironmentsRequest.java
new file mode 100755
index 000000000..c7f60255b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RefreshDefaultBaseEnvironmentsRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class RefreshDefaultBaseEnvironmentsRequest {
+ /** */
+ @JsonProperty("ids")
+ private Collection ids;
+
+ public RefreshDefaultBaseEnvironmentsRequest setIds(Collection ids) {
+ this.ids = ids;
+ return this;
+ }
+
+ public Collection getIds() {
+ return ids;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RefreshDefaultBaseEnvironmentsRequest that = (RefreshDefaultBaseEnvironmentsRequest) o;
+ return Objects.equals(ids, that.ids);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(ids);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RefreshDefaultBaseEnvironmentsRequest.class).add("ids", ids).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java
index ef5dfa156..4ea0454c8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java
@@ -19,11 +19,19 @@ public class Results {
@JsonProperty("data")
private Object data;
- /** The image filename */
+ /**
+ * The image data in one of the following formats:
+ *
+ * 1. A Data URL with base64-encoded image data: `data:image/{type};base64,{base64-data}`.
+ * Example: `data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUA...`
+ *
+ *
2. A FileStore file path for large images: `/plots/{filename}.png`. Example:
+ * `/plots/b6a7ad70-fb2c-4353-8aed-3f1e015174a4.png`
+ */
@JsonProperty("fileName")
private String fileName;
- /** */
+ /** List of image data for multiple images. Each element follows the same format as file_name. */
@JsonProperty("fileNames")
private Collection fileNames;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java
index 18917be99..2dcf7125a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java
@@ -45,6 +45,7 @@ public enum TerminationReasonCode {
BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG,
BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED,
BUDGET_POLICY_RESOLUTION_FAILURE,
+ CLOUD_ACCOUNT_POD_QUOTA_EXCEEDED,
CLOUD_ACCOUNT_SETUP_FAILURE,
CLOUD_OPERATION_CANCELLED,
CLOUD_PROVIDER_DISK_SETUP_FAILURE,
@@ -122,6 +123,7 @@ public enum TerminationReasonCode {
IN_PENALTY_BOX,
IP_EXHAUSTION_FAILURE,
JOB_FINISHED,
+ K8S_ACTIVE_POD_QUOTA_EXCEEDED,
K8S_AUTOSCALING_FAILURE,
K8S_DBR_CLUSTER_LAUNCH_TIMEOUT,
LAZY_ALLOCATION_TIMEOUT,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultBaseEnvironmentRequest.java
new file mode 100755
index 000000000..fcf7e1e9d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultBaseEnvironmentRequest.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateDefaultBaseEnvironmentRequest {
+ /** */
+ @JsonProperty("default_base_environment")
+ private DefaultBaseEnvironment defaultBaseEnvironment;
+
+ /** */
+ @JsonIgnore private String id;
+
+ public UpdateDefaultBaseEnvironmentRequest setDefaultBaseEnvironment(
+ DefaultBaseEnvironment defaultBaseEnvironment) {
+ this.defaultBaseEnvironment = defaultBaseEnvironment;
+ return this;
+ }
+
+ public DefaultBaseEnvironment getDefaultBaseEnvironment() {
+ return defaultBaseEnvironment;
+ }
+
+ public UpdateDefaultBaseEnvironmentRequest setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDefaultBaseEnvironmentRequest that = (UpdateDefaultBaseEnvironmentRequest) o;
+ return Objects.equals(defaultBaseEnvironment, that.defaultBaseEnvironment)
+ && Objects.equals(id, that.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(defaultBaseEnvironment, id);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDefaultBaseEnvironmentRequest.class)
+ .add("defaultBaseEnvironment", defaultBaseEnvironment)
+ .add("id", id)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultDefaultBaseEnvironmentRequest.java
new file mode 100755
index 000000000..3cd5d7508
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultDefaultBaseEnvironmentRequest.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateDefaultDefaultBaseEnvironmentRequest {
+ /** */
+ @JsonProperty("base_environment_type")
+ private BaseEnvironmentType baseEnvironmentType;
+
+ /** */
+ @JsonProperty("id")
+ private String id;
+
+ public UpdateDefaultDefaultBaseEnvironmentRequest setBaseEnvironmentType(
+ BaseEnvironmentType baseEnvironmentType) {
+ this.baseEnvironmentType = baseEnvironmentType;
+ return this;
+ }
+
+ public BaseEnvironmentType getBaseEnvironmentType() {
+ return baseEnvironmentType;
+ }
+
+ public UpdateDefaultDefaultBaseEnvironmentRequest setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDefaultDefaultBaseEnvironmentRequest that =
+ (UpdateDefaultDefaultBaseEnvironmentRequest) o;
+ return Objects.equals(baseEnvironmentType, that.baseEnvironmentType)
+ && Objects.equals(id, that.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(baseEnvironmentType, id);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDefaultDefaultBaseEnvironmentRequest.class)
+ .add("baseEnvironmentType", baseEnvironmentType)
+ .add("id", id)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java
new file mode 100755
index 000000000..07776b06d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class CancelPublishedQueryExecutionRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("dashboard_name")
+ private String dashboardName;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("dashboard_revision_id")
+ private String dashboardRevisionId;
+
+ /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */
+ @JsonIgnore
+ @QueryParam("tokens")
+ private Collection tokens;
+
+ public CancelPublishedQueryExecutionRequest setDashboardName(String dashboardName) {
+ this.dashboardName = dashboardName;
+ return this;
+ }
+
+ public String getDashboardName() {
+ return dashboardName;
+ }
+
+ public CancelPublishedQueryExecutionRequest setDashboardRevisionId(String dashboardRevisionId) {
+ this.dashboardRevisionId = dashboardRevisionId;
+ return this;
+ }
+
+ public String getDashboardRevisionId() {
+ return dashboardRevisionId;
+ }
+
+ public CancelPublishedQueryExecutionRequest setTokens(Collection tokens) {
+ this.tokens = tokens;
+ return this;
+ }
+
+ public Collection getTokens() {
+ return tokens;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelPublishedQueryExecutionRequest that = (CancelPublishedQueryExecutionRequest) o;
+ return Objects.equals(dashboardName, that.dashboardName)
+ && Objects.equals(dashboardRevisionId, that.dashboardRevisionId)
+ && Objects.equals(tokens, that.tokens);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardName, dashboardRevisionId, tokens);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelPublishedQueryExecutionRequest.class)
+ .add("dashboardName", dashboardName)
+ .add("dashboardRevisionId", dashboardRevisionId)
+ .add("tokens", tokens)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java
new file mode 100755
index 000000000..3476fb9ef
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class CancelQueryExecutionResponse {
+ /** */
+ @JsonProperty("status")
+ private Collection status;
+
+ public CancelQueryExecutionResponse setStatus(
+ Collection status) {
+ this.status = status;
+ return this;
+ }
+
+ public Collection getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelQueryExecutionResponse that = (CancelQueryExecutionResponse) o;
+ return Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelQueryExecutionResponse.class).add("status", status).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java
new file mode 100755
index 000000000..3d8a03c06
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java
@@ -0,0 +1,77 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CancelQueryExecutionResponseStatus {
+ /**
+ * The token to poll for result asynchronously Example:
+ * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+ */
+ @JsonProperty("data_token")
+ private String dataToken;
+
+ /** */
+ @JsonProperty("pending")
+ private Empty pending;
+
+ /** */
+ @JsonProperty("success")
+ private Empty success;
+
+ public CancelQueryExecutionResponseStatus setDataToken(String dataToken) {
+ this.dataToken = dataToken;
+ return this;
+ }
+
+ public String getDataToken() {
+ return dataToken;
+ }
+
+ public CancelQueryExecutionResponseStatus setPending(Empty pending) {
+ this.pending = pending;
+ return this;
+ }
+
+ public Empty getPending() {
+ return pending;
+ }
+
+ public CancelQueryExecutionResponseStatus setSuccess(Empty success) {
+ this.success = success;
+ return this;
+ }
+
+ public Empty getSuccess() {
+ return success;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelQueryExecutionResponseStatus that = (CancelQueryExecutionResponseStatus) o;
+ return Objects.equals(dataToken, that.dataToken)
+ && Objects.equals(pending, that.pending)
+ && Objects.equals(success, that.success);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dataToken, pending, success);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelQueryExecutionResponseStatus.class)
+ .add("dataToken", dataToken)
+ .add("pending", pending)
+ .add("success", success)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java
new file mode 100755
index 000000000..8714d62a6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java
@@ -0,0 +1,32 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/**
+ * Represents an empty message, similar to google.protobuf.Empty, which is not available in the firm
+ * right now.
+ */
+@Generated
+public class Empty {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Empty.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java
new file mode 100755
index 000000000..c5223007c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Execute query request for published Dashboards. Since published dashboards have the option of
+ * running as the publisher, the datasets, warehouse_id are excluded from the request and instead
+ * read from the source (lakeview-config) via the additional parameters (dashboardName and
+ * dashboardRevisionId)
+ */
+@Generated
+public class ExecutePublishedDashboardQueryRequest {
+ /**
+ * Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains
+ * the list of datasets, warehouse_id, and embedded_credentials
+ */
+ @JsonProperty("dashboard_name")
+ private String dashboardName;
+
+ /** */
+ @JsonProperty("dashboard_revision_id")
+ private String dashboardRevisionId;
+
+ /**
+ * A dashboard schedule can override the warehouse used as compute for processing the published
+ * dashboard queries
+ */
+ @JsonProperty("override_warehouse_id")
+ private String overrideWarehouseId;
+
+ public ExecutePublishedDashboardQueryRequest setDashboardName(String dashboardName) {
+ this.dashboardName = dashboardName;
+ return this;
+ }
+
+ public String getDashboardName() {
+ return dashboardName;
+ }
+
+ public ExecutePublishedDashboardQueryRequest setDashboardRevisionId(String dashboardRevisionId) {
+ this.dashboardRevisionId = dashboardRevisionId;
+ return this;
+ }
+
+ public String getDashboardRevisionId() {
+ return dashboardRevisionId;
+ }
+
+ public ExecutePublishedDashboardQueryRequest setOverrideWarehouseId(String overrideWarehouseId) {
+ this.overrideWarehouseId = overrideWarehouseId;
+ return this;
+ }
+
+ public String getOverrideWarehouseId() {
+ return overrideWarehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ExecutePublishedDashboardQueryRequest that = (ExecutePublishedDashboardQueryRequest) o;
+ return Objects.equals(dashboardName, that.dashboardName)
+ && Objects.equals(dashboardRevisionId, that.dashboardRevisionId)
+ && Objects.equals(overrideWarehouseId, that.overrideWarehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardName, dashboardRevisionId, overrideWarehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ExecutePublishedDashboardQueryRequest.class)
+ .add("dashboardName", dashboardName)
+ .add("dashboardRevisionId", dashboardRevisionId)
+ .add("overrideWarehouseId", overrideWarehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
index 7bd915755..97dd4d4c1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
@@ -110,6 +110,11 @@ public Wait createMessage(
response);
}
+ /** Creates a Genie space from a serialized payload. */
+ public GenieSpace createSpace(GenieCreateSpaceRequest request) {
+ return impl.createSpace(request);
+ }
+
public void deleteConversation(String spaceId, String conversationId) {
deleteConversation(
new GenieDeleteConversationRequest().setSpaceId(spaceId).setConversationId(conversationId));
@@ -151,6 +156,49 @@ public GenieGetMessageQueryResultResponse executeMessageQuery(
return impl.executeMessageQuery(request);
}
+ /**
+ * Initiates a new SQL execution and returns a `download_id` that you can use to track the
+ * progress of the download. The query result is stored in an external link and can be retrieved
+ * using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API.
+ * Warning: Databricks strongly recommends that you protect the URLs that are returned by the
+ * `EXTERNAL_LINKS` disposition. See [Execute
+ * Statement](:method:statementexecution/executestatement) for more details.
+ */
+ public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult(
+ GenieGenerateDownloadFullQueryResultRequest request) {
+ return impl.generateDownloadFullQueryResult(request);
+ }
+
+ public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
+ String spaceId,
+ String conversationId,
+ String messageId,
+ String attachmentId,
+ String downloadId) {
+ return getDownloadFullQueryResult(
+ new GenieGetDownloadFullQueryResultRequest()
+ .setSpaceId(spaceId)
+ .setConversationId(conversationId)
+ .setMessageId(messageId)
+ .setAttachmentId(attachmentId)
+ .setDownloadId(downloadId));
+ }
+
+ /**
+ * After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and
+ * successfully receiving a `download_id`, use this API to poll the download progress. When the
+ * download is complete, the API returns one or more external links to the query result files.
+ * Warning: Databricks strongly recommends that you protect the URLs that are returned by the
+ * `EXTERNAL_LINKS` disposition. You must not set an Authorization header in download requests.
+ * When using the `EXTERNAL_LINKS` disposition, Databricks returns presigned URLs that grant
+ * temporary access to data. See [Execute Statement](:method:statementexecution/executestatement)
+ * for more details.
+ */
+ public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
+ GenieGetDownloadFullQueryResultRequest request) {
+ return impl.getDownloadFullQueryResult(request);
+ }
+
public GenieMessage getMessage(String spaceId, String conversationId, String messageId) {
return getMessage(
new GenieGetConversationMessageRequest()
@@ -286,6 +334,11 @@ public void trashSpace(GenieTrashSpaceRequest request) {
impl.trashSpace(request);
}
+ /** Updates a Genie space with a serialized payload. */
+ public GenieSpace updateSpace(GenieUpdateSpaceRequest request) {
+ return impl.updateSpace(request);
+ }
+
public GenieService impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java
new file mode 100755
index 000000000..5d425c84b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java
@@ -0,0 +1,104 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenieCreateSpaceRequest {
+ /** Optional description */
+ @JsonProperty("description")
+ private String description;
+
+ /** Parent folder path where the space will be registered */
+ @JsonProperty("parent_path")
+ private String parentPath;
+
+ /** Serialized export model for the space contents */
+ @JsonProperty("serialized_space")
+ private String serializedSpace;
+
+ /** Optional title override */
+ @JsonProperty("title")
+ private String title;
+
+ /** Warehouse to associate with the new space */
+ @JsonProperty("warehouse_id")
+ private String warehouseId;
+
+ public GenieCreateSpaceRequest setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public GenieCreateSpaceRequest setParentPath(String parentPath) {
+ this.parentPath = parentPath;
+ return this;
+ }
+
+ public String getParentPath() {
+ return parentPath;
+ }
+
+ public GenieCreateSpaceRequest setSerializedSpace(String serializedSpace) {
+ this.serializedSpace = serializedSpace;
+ return this;
+ }
+
+ public String getSerializedSpace() {
+ return serializedSpace;
+ }
+
+ public GenieCreateSpaceRequest setTitle(String title) {
+ this.title = title;
+ return this;
+ }
+
+ public String getTitle() {
+ return title;
+ }
+
+ public GenieCreateSpaceRequest setWarehouseId(String warehouseId) {
+ this.warehouseId = warehouseId;
+ return this;
+ }
+
+ public String getWarehouseId() {
+ return warehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieCreateSpaceRequest that = (GenieCreateSpaceRequest) o;
+ return Objects.equals(description, that.description)
+ && Objects.equals(parentPath, that.parentPath)
+ && Objects.equals(serializedSpace, that.serializedSpace)
+ && Objects.equals(title, that.title)
+ && Objects.equals(warehouseId, that.warehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(description, parentPath, serializedSpace, title, warehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieCreateSpaceRequest.class)
+ .add("description", description)
+ .add("parentPath", parentPath)
+ .add("serializedSpace", serializedSpace)
+ .add("title", title)
+ .add("warehouseId", warehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java
index 92d35fc98..86339a735 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java
@@ -10,10 +10,23 @@
/** Feedback containing rating and optional comment */
@Generated
public class GenieFeedback {
+ /** Optional feedback comment text */
+ @JsonProperty("comment")
+ private String comment;
+
/** The feedback rating */
@JsonProperty("rating")
private GenieFeedbackRating rating;
+ public GenieFeedback setComment(String comment) {
+ this.comment = comment;
+ return this;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
public GenieFeedback setRating(GenieFeedbackRating rating) {
this.rating = rating;
return this;
@@ -28,16 +41,19 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GenieFeedback that = (GenieFeedback) o;
- return Objects.equals(rating, that.rating);
+ return Objects.equals(comment, that.comment) && Objects.equals(rating, that.rating);
}
@Override
public int hashCode() {
- return Objects.hash(rating);
+ return Objects.hash(comment, rating);
}
@Override
public String toString() {
- return new ToStringer(GenieFeedback.class).add("rating", rating).toString();
+ return new ToStringer(GenieFeedback.class)
+ .add("comment", comment)
+ .add("rating", rating)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java
new file mode 100755
index 000000000..7dc36298f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GenieGenerateDownloadFullQueryResultRequest {
+ /** Attachment ID */
+ @JsonIgnore private String attachmentId;
+
+ /** Conversation ID */
+ @JsonIgnore private String conversationId;
+
+ /** Message ID */
+ @JsonIgnore private String messageId;
+
+ /** Genie space ID */
+ @JsonIgnore private String spaceId;
+
+ public GenieGenerateDownloadFullQueryResultRequest setAttachmentId(String attachmentId) {
+ this.attachmentId = attachmentId;
+ return this;
+ }
+
+ public String getAttachmentId() {
+ return attachmentId;
+ }
+
+ public GenieGenerateDownloadFullQueryResultRequest setConversationId(String conversationId) {
+ this.conversationId = conversationId;
+ return this;
+ }
+
+ public String getConversationId() {
+ return conversationId;
+ }
+
+ public GenieGenerateDownloadFullQueryResultRequest setMessageId(String messageId) {
+ this.messageId = messageId;
+ return this;
+ }
+
+ public String getMessageId() {
+ return messageId;
+ }
+
+ public GenieGenerateDownloadFullQueryResultRequest setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieGenerateDownloadFullQueryResultRequest that =
+ (GenieGenerateDownloadFullQueryResultRequest) o;
+ return Objects.equals(attachmentId, that.attachmentId)
+ && Objects.equals(conversationId, that.conversationId)
+ && Objects.equals(messageId, that.messageId)
+ && Objects.equals(spaceId, that.spaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(attachmentId, conversationId, messageId, spaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieGenerateDownloadFullQueryResultRequest.class)
+ .add("attachmentId", attachmentId)
+ .add("conversationId", conversationId)
+ .add("messageId", messageId)
+ .add("spaceId", spaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java
new file mode 100755
index 000000000..e51751c8b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenieGenerateDownloadFullQueryResultResponse {
+ /** Download ID. Use this ID to track the download request in subsequent polling calls */
+ @JsonProperty("download_id")
+ private String downloadId;
+
+ public GenieGenerateDownloadFullQueryResultResponse setDownloadId(String downloadId) {
+ this.downloadId = downloadId;
+ return this;
+ }
+
+ public String getDownloadId() {
+ return downloadId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieGenerateDownloadFullQueryResultResponse that =
+ (GenieGenerateDownloadFullQueryResultResponse) o;
+ return Objects.equals(downloadId, that.downloadId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(downloadId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieGenerateDownloadFullQueryResultResponse.class)
+ .add("downloadId", downloadId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java
new file mode 100755
index 000000000..73fd97ba2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java
@@ -0,0 +1,102 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GenieGetDownloadFullQueryResultRequest {
+ /** Attachment ID */
+ @JsonIgnore private String attachmentId;
+
+ /** Conversation ID */
+ @JsonIgnore private String conversationId;
+
+ /**
+ * Download ID. This ID is provided by the [Generate Download
+ * endpoint](:method:genie/generateDownloadFullQueryResult)
+ */
+ @JsonIgnore private String downloadId;
+
+ /** Message ID */
+ @JsonIgnore private String messageId;
+
+ /** Genie space ID */
+ @JsonIgnore private String spaceId;
+
+ public GenieGetDownloadFullQueryResultRequest setAttachmentId(String attachmentId) {
+ this.attachmentId = attachmentId;
+ return this;
+ }
+
+ public String getAttachmentId() {
+ return attachmentId;
+ }
+
+ public GenieGetDownloadFullQueryResultRequest setConversationId(String conversationId) {
+ this.conversationId = conversationId;
+ return this;
+ }
+
+ public String getConversationId() {
+ return conversationId;
+ }
+
+ public GenieGetDownloadFullQueryResultRequest setDownloadId(String downloadId) {
+ this.downloadId = downloadId;
+ return this;
+ }
+
+ public String getDownloadId() {
+ return downloadId;
+ }
+
+ public GenieGetDownloadFullQueryResultRequest setMessageId(String messageId) {
+ this.messageId = messageId;
+ return this;
+ }
+
+ public String getMessageId() {
+ return messageId;
+ }
+
+ public GenieGetDownloadFullQueryResultRequest setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieGetDownloadFullQueryResultRequest that = (GenieGetDownloadFullQueryResultRequest) o;
+ return Objects.equals(attachmentId, that.attachmentId)
+ && Objects.equals(conversationId, that.conversationId)
+ && Objects.equals(downloadId, that.downloadId)
+ && Objects.equals(messageId, that.messageId)
+ && Objects.equals(spaceId, that.spaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(attachmentId, conversationId, downloadId, messageId, spaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieGetDownloadFullQueryResultRequest.class)
+ .add("attachmentId", attachmentId)
+ .add("conversationId", conversationId)
+ .add("downloadId", downloadId)
+ .add("messageId", messageId)
+ .add("spaceId", spaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java
new file mode 100755
index 000000000..490c5c518
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java
@@ -0,0 +1,48 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenieGetDownloadFullQueryResultResponse {
+ /**
+ * SQL Statement Execution response. See [Get status, manifest, and result first
+ * chunk](:method:statementexecution/getstatement) for more details.
+ */
+ @JsonProperty("statement_response")
+ private com.databricks.sdk.service.sql.StatementResponse statementResponse;
+
+ public GenieGetDownloadFullQueryResultResponse setStatementResponse(
+ com.databricks.sdk.service.sql.StatementResponse statementResponse) {
+ this.statementResponse = statementResponse;
+ return this;
+ }
+
+ public com.databricks.sdk.service.sql.StatementResponse getStatementResponse() {
+ return statementResponse;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieGetDownloadFullQueryResultResponse that = (GenieGetDownloadFullQueryResultResponse) o;
+ return Objects.equals(statementResponse, that.statementResponse);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(statementResponse);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieGetDownloadFullQueryResultResponse.class)
+ .add("statementResponse", statementResponse)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
index 432a981a9..e18be2894 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
@@ -33,6 +33,20 @@ public GenieMessage createMessage(GenieCreateConversationMessageRequest request)
}
}
+ @Override
+ public GenieSpace createSpace(GenieCreateSpaceRequest request) {
+ String path = "/api/2.0/genie/spaces";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, GenieSpace.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public void deleteConversation(GenieDeleteConversationRequest request) {
String path =
@@ -102,6 +116,47 @@ public GenieGetMessageQueryResultResponse executeMessageQuery(
}
}
+ @Override
+ public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult(
+ GenieGenerateDownloadFullQueryResultRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/attachments/%s/downloads",
+ request.getSpaceId(),
+ request.getConversationId(),
+ request.getMessageId(),
+ request.getAttachmentId());
+ try {
+ Request req = new Request("POST", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, GenieGenerateDownloadFullQueryResultResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
+ GenieGetDownloadFullQueryResultRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/attachments/%s/downloads/%s",
+ request.getSpaceId(),
+ request.getConversationId(),
+ request.getMessageId(),
+ request.getAttachmentId(),
+ request.getDownloadId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, GenieGetDownloadFullQueryResultResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public GenieMessage getMessage(GenieGetConversationMessageRequest request) {
String path =
@@ -276,4 +331,18 @@ public void trashSpace(GenieTrashSpaceRequest request) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
+
+ @Override
+ public GenieSpace updateSpace(GenieUpdateSpaceRequest request) {
+ String path = String.format("/api/2.0/genie/spaces/%s", request.getSpaceId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, GenieSpace.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java
index 5ad10ce62..cc0a48ab5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java
@@ -5,6 +5,7 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
import java.util.Objects;
@Generated
@@ -21,6 +22,10 @@ public class GenieQueryAttachment {
@JsonProperty("last_updated_timestamp")
private Long lastUpdatedTimestamp;
+ /** */
+ @JsonProperty("parameters")
+ private Collection parameters;
+
/** AI generated SQL query */
@JsonProperty("query")
private String query;
@@ -67,6 +72,15 @@ public Long getLastUpdatedTimestamp() {
return lastUpdatedTimestamp;
}
+ public GenieQueryAttachment setParameters(Collection parameters) {
+ this.parameters = parameters;
+ return this;
+ }
+
+ public Collection getParameters() {
+ return parameters;
+ }
+
public GenieQueryAttachment setQuery(String query) {
this.query = query;
return this;
@@ -111,6 +125,7 @@ public boolean equals(Object o) {
return Objects.equals(description, that.description)
&& Objects.equals(id, that.id)
&& Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp)
+ && Objects.equals(parameters, that.parameters)
&& Objects.equals(query, that.query)
&& Objects.equals(queryResultMetadata, that.queryResultMetadata)
&& Objects.equals(statementId, that.statementId)
@@ -120,7 +135,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- description, id, lastUpdatedTimestamp, query, queryResultMetadata, statementId, title);
+ description,
+ id,
+ lastUpdatedTimestamp,
+ parameters,
+ query,
+ queryResultMetadata,
+ statementId,
+ title);
}
@Override
@@ -129,6 +151,7 @@ public String toString() {
.add("description", description)
.add("id", id)
.add("lastUpdatedTimestamp", lastUpdatedTimestamp)
+ .add("parameters", parameters)
.add("query", query)
.add("queryResultMetadata", queryResultMetadata)
.add("statementId", statementId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java
index 2ddc8b835..656cfabbf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java
@@ -10,6 +10,10 @@
@Generated
public class GenieSendMessageFeedbackRequest {
+ /** Optional text feedback that will be stored as a comment. */
+ @JsonProperty("comment")
+ private String comment;
+
/** The ID associated with the conversation. */
@JsonIgnore private String conversationId;
@@ -23,6 +27,15 @@ public class GenieSendMessageFeedbackRequest {
/** The ID associated with the Genie space where the message is located. */
@JsonIgnore private String spaceId;
+ public GenieSendMessageFeedbackRequest setComment(String comment) {
+ this.comment = comment;
+ return this;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
public GenieSendMessageFeedbackRequest setConversationId(String conversationId) {
this.conversationId = conversationId;
return this;
@@ -64,7 +77,8 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GenieSendMessageFeedbackRequest that = (GenieSendMessageFeedbackRequest) o;
- return Objects.equals(conversationId, that.conversationId)
+ return Objects.equals(comment, that.comment)
+ && Objects.equals(conversationId, that.conversationId)
&& Objects.equals(messageId, that.messageId)
&& Objects.equals(rating, that.rating)
&& Objects.equals(spaceId, that.spaceId);
@@ -72,12 +86,13 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(conversationId, messageId, rating, spaceId);
+ return Objects.hash(comment, conversationId, messageId, rating, spaceId);
}
@Override
public String toString() {
return new ToStringer(GenieSendMessageFeedbackRequest.class)
+ .add("comment", comment)
.add("conversationId", conversationId)
.add("messageId", messageId)
.add("rating", rating)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
index d12aa918c..37455fc2d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
@@ -22,6 +22,9 @@ public interface GenieService {
GenieMessage createMessage(
GenieCreateConversationMessageRequest genieCreateConversationMessageRequest);
+ /** Creates a Genie space from a serialized payload. */
+ GenieSpace createSpace(GenieCreateSpaceRequest genieCreateSpaceRequest);
+
/** Delete a conversation. */
void deleteConversation(GenieDeleteConversationRequest genieDeleteConversationRequest);
@@ -43,6 +46,30 @@ GenieGetMessageQueryResultResponse executeMessageAttachmentQuery(
GenieGetMessageQueryResultResponse executeMessageQuery(
GenieExecuteMessageQueryRequest genieExecuteMessageQueryRequest);
+ /**
+ * Initiates a new SQL execution and returns a `download_id` that you can use to track the
+ * progress of the download. The query result is stored in an external link and can be retrieved
+ * using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API.
+ * Warning: Databricks strongly recommends that you protect the URLs that are returned by the
+ * `EXTERNAL_LINKS` disposition. See [Execute
+ * Statement](:method:statementexecution/executestatement) for more details.
+ */
+ GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult(
+ GenieGenerateDownloadFullQueryResultRequest genieGenerateDownloadFullQueryResultRequest);
+
+ /**
+ * After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and
+ * successfully receiving a `download_id`, use this API to poll the download progress. When the
+ * download is complete, the API returns one or more external links to the query result files.
+ * Warning: Databricks strongly recommends that you protect the URLs that are returned by the
+ * `EXTERNAL_LINKS` disposition. You must not set an Authorization header in download requests.
+ * When using the `EXTERNAL_LINKS` disposition, Databricks returns presigned URLs that grant
+ * temporary access to data. See [Execute Statement](:method:statementexecution/executestatement)
+ * for more details.
+ */
+ GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
+ GenieGetDownloadFullQueryResultRequest genieGetDownloadFullQueryResultRequest);
+
/** Get message from conversation. */
GenieMessage getMessage(GenieGetConversationMessageRequest genieGetConversationMessageRequest);
@@ -90,4 +117,7 @@ GenieStartConversationResponse startConversation(
/** Move a Genie Space to the trash. */
void trashSpace(GenieTrashSpaceRequest genieTrashSpaceRequest);
+
+ /** Updates a Genie space with a serialized payload. */
+ GenieSpace updateSpace(GenieUpdateSpaceRequest genieUpdateSpaceRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java
new file mode 100755
index 000000000..e8e67cedf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java
@@ -0,0 +1,104 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenieUpdateSpaceRequest {
+ /** Optional description */
+ @JsonProperty("description")
+ private String description;
+
+ /** Serialized export model for the space contents (full replacement) */
+ @JsonProperty("serialized_space")
+ private String serializedSpace;
+
+ /** Genie space ID */
+ @JsonIgnore private String spaceId;
+
+ /** Optional title override */
+ @JsonProperty("title")
+ private String title;
+
+ /** Optional warehouse override */
+ @JsonProperty("warehouse_id")
+ private String warehouseId;
+
+ public GenieUpdateSpaceRequest setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public GenieUpdateSpaceRequest setSerializedSpace(String serializedSpace) {
+ this.serializedSpace = serializedSpace;
+ return this;
+ }
+
+ public String getSerializedSpace() {
+ return serializedSpace;
+ }
+
+ public GenieUpdateSpaceRequest setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ public GenieUpdateSpaceRequest setTitle(String title) {
+ this.title = title;
+ return this;
+ }
+
+ public String getTitle() {
+ return title;
+ }
+
+ public GenieUpdateSpaceRequest setWarehouseId(String warehouseId) {
+ this.warehouseId = warehouseId;
+ return this;
+ }
+
+ public String getWarehouseId() {
+ return warehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieUpdateSpaceRequest that = (GenieUpdateSpaceRequest) o;
+ return Objects.equals(description, that.description)
+ && Objects.equals(serializedSpace, that.serializedSpace)
+ && Objects.equals(spaceId, that.spaceId)
+ && Objects.equals(title, that.title)
+ && Objects.equals(warehouseId, that.warehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(description, serializedSpace, spaceId, title, warehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieUpdateSpaceRequest.class)
+ .add("description", description)
+ .add("serializedSpace", serializedSpace)
+ .add("spaceId", spaceId)
+ .add("title", title)
+ .add("warehouseId", warehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java
new file mode 100755
index 000000000..95f6048f1
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetPublishedDashboardEmbeddedRequest {
+ /** UUID identifying the published dashboard. */
+ @JsonIgnore private String dashboardId;
+
+ public GetPublishedDashboardEmbeddedRequest setDashboardId(String dashboardId) {
+ this.dashboardId = dashboardId;
+ return this;
+ }
+
+ public String getDashboardId() {
+ return dashboardId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetPublishedDashboardEmbeddedRequest that = (GetPublishedDashboardEmbeddedRequest) o;
+ return Objects.equals(dashboardId, that.dashboardId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetPublishedDashboardEmbeddedRequest.class)
+ .add("dashboardId", dashboardId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
index ed46478a1..a7bc6c10d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
@@ -23,6 +23,16 @@ public LakeviewEmbeddedAPI(LakeviewEmbeddedService mock) {
impl = mock;
}
+ public void getPublishedDashboardEmbedded(String dashboardId) {
+ getPublishedDashboardEmbedded(
+ new GetPublishedDashboardEmbeddedRequest().setDashboardId(dashboardId));
+ }
+
+ /** Get the current published dashboard within an embedded context. */
+ public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) {
+ impl.getPublishedDashboardEmbedded(request);
+ }
+
public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo(String dashboardId) {
return getPublishedDashboardTokenInfo(
new GetPublishedDashboardTokenInfoRequest().setDashboardId(dashboardId));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java
index 55a489702..171eb1e7f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java
@@ -16,6 +16,21 @@ public LakeviewEmbeddedImpl(ApiClient apiClient) {
this.apiClient = apiClient;
}
+ @Override
+ public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/lakeview/dashboards/%s/published/embedded", request.getDashboardId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo(
GetPublishedDashboardTokenInfoRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
index 3aa679410..98c1b546d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
@@ -12,6 +12,10 @@
*/
@Generated
public interface LakeviewEmbeddedService {
+ /** Get the current published dashboard within an embedded context. */
+ void getPublishedDashboardEmbedded(
+ GetPublishedDashboardEmbeddedRequest getPublishedDashboardEmbeddedRequest);
+
/**
* Get a required authorization details and scopes of a published dashboard to mint an OAuth
* token.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java
index 8b6b10fc7..75e28eb70 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java
@@ -18,6 +18,7 @@ public enum MessageErrorType {
DESCRIBE_QUERY_INVALID_SQL_ERROR,
DESCRIBE_QUERY_TIMEOUT,
DESCRIBE_QUERY_UNEXPECTED_FAILURE,
+ EXCEEDED_MAX_TOKEN_LENGTH_EXCEPTION,
FUNCTIONS_NOT_AVAILABLE_EXCEPTION,
FUNCTION_ARGUMENTS_INVALID_EXCEPTION,
FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION,
@@ -28,6 +29,9 @@ public enum MessageErrorType {
GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION,
GENERIC_SQL_EXEC_API_CALL_EXCEPTION,
ILLEGAL_PARAMETER_DEFINITION_EXCEPTION,
+ INTERNAL_CATALOG_ASSET_CREATION_FAILED_EXCEPTION,
+ INTERNAL_CATALOG_ASSET_CREATION_ONGOING_EXCEPTION,
+ INTERNAL_CATALOG_ASSET_CREATION_UNSUPPORTED_EXCEPTION,
INTERNAL_CATALOG_MISSING_UC_PATH_EXCEPTION,
INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION,
INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java
new file mode 100755
index 000000000..f041070b2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class PendingStatus {
+ /**
+ * The token to poll for result asynchronously Example:
+ * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+ */
+ @JsonProperty("data_token")
+ private String dataToken;
+
+ public PendingStatus setDataToken(String dataToken) {
+ this.dataToken = dataToken;
+ return this;
+ }
+
+ public String getDataToken() {
+ return dataToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PendingStatus that = (PendingStatus) o;
+ return Objects.equals(dataToken, that.dataToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dataToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PendingStatus.class).add("dataToken", dataToken).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java
new file mode 100755
index 000000000..e34c7af59
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class PollPublishedQueryStatusRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("dashboard_name")
+ private String dashboardName;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("dashboard_revision_id")
+ private String dashboardRevisionId;
+
+ /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */
+ @JsonIgnore
+ @QueryParam("tokens")
+ private Collection tokens;
+
+ public PollPublishedQueryStatusRequest setDashboardName(String dashboardName) {
+ this.dashboardName = dashboardName;
+ return this;
+ }
+
+ public String getDashboardName() {
+ return dashboardName;
+ }
+
+ public PollPublishedQueryStatusRequest setDashboardRevisionId(String dashboardRevisionId) {
+ this.dashboardRevisionId = dashboardRevisionId;
+ return this;
+ }
+
+ public String getDashboardRevisionId() {
+ return dashboardRevisionId;
+ }
+
+ public PollPublishedQueryStatusRequest setTokens(Collection tokens) {
+ this.tokens = tokens;
+ return this;
+ }
+
+ public Collection getTokens() {
+ return tokens;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PollPublishedQueryStatusRequest that = (PollPublishedQueryStatusRequest) o;
+ return Objects.equals(dashboardName, that.dashboardName)
+ && Objects.equals(dashboardRevisionId, that.dashboardRevisionId)
+ && Objects.equals(tokens, that.tokens);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardName, dashboardRevisionId, tokens);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PollPublishedQueryStatusRequest.class)
+ .add("dashboardName", dashboardName)
+ .add("dashboardRevisionId", dashboardRevisionId)
+ .add("tokens", tokens)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java
new file mode 100755
index 000000000..778e1d961
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class PollQueryStatusResponse {
+ /** */
+ @JsonProperty("data")
+ private Collection data;
+
+ public PollQueryStatusResponse setData(Collection data) {
+ this.data = data;
+ return this;
+ }
+
+ public Collection getData() {
+ return data;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PollQueryStatusResponse that = (PollQueryStatusResponse) o;
+ return Objects.equals(data, that.data);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(data);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PollQueryStatusResponse.class).add("data", data).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java
new file mode 100755
index 000000000..9de9b2743
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class PollQueryStatusResponseData {
+ /** */
+ @JsonProperty("status")
+ private QueryResponseStatus status;
+
+ public PollQueryStatusResponseData setStatus(QueryResponseStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public QueryResponseStatus getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PollQueryStatusResponseData that = (PollQueryStatusResponseData) o;
+ return Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PollQueryStatusResponseData.class).add("status", status).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachmentParameter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachmentParameter.java
new file mode 100755
index 000000000..e2609ee6a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachmentParameter.java
@@ -0,0 +1,74 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class QueryAttachmentParameter {
+ /** */
+ @JsonProperty("keyword")
+ private String keyword;
+
+ /** */
+ @JsonProperty("sql_type")
+ private String sqlType;
+
+ /** */
+ @JsonProperty("value")
+ private String value;
+
+ public QueryAttachmentParameter setKeyword(String keyword) {
+ this.keyword = keyword;
+ return this;
+ }
+
+ public String getKeyword() {
+ return keyword;
+ }
+
+ public QueryAttachmentParameter setSqlType(String sqlType) {
+ this.sqlType = sqlType;
+ return this;
+ }
+
+ public String getSqlType() {
+ return sqlType;
+ }
+
+ public QueryAttachmentParameter setValue(String value) {
+ this.value = value;
+ return this;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ QueryAttachmentParameter that = (QueryAttachmentParameter) o;
+ return Objects.equals(keyword, that.keyword)
+ && Objects.equals(sqlType, that.sqlType)
+ && Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(keyword, sqlType, value);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(QueryAttachmentParameter.class)
+ .add("keyword", keyword)
+ .add("sqlType", sqlType)
+ .add("value", value)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java
new file mode 100755
index 000000000..5ab84661d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Query execution APIs for AI / BI Dashboards */
+@Generated
+public class QueryExecutionAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(QueryExecutionAPI.class);
+
+ private final QueryExecutionService impl;
+
+ /** Regular-use constructor */
+ public QueryExecutionAPI(ApiClient apiClient) {
+ impl = new QueryExecutionImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public QueryExecutionAPI(QueryExecutionService mock) {
+ impl = mock;
+ }
+
+ public CancelQueryExecutionResponse cancelPublishedQueryExecution(
+ String dashboardName, String dashboardRevisionId) {
+ return cancelPublishedQueryExecution(
+ new CancelPublishedQueryExecutionRequest()
+ .setDashboardName(dashboardName)
+ .setDashboardRevisionId(dashboardRevisionId));
+ }
+
+ /** Cancel the results for the a query for a published, embedded dashboard. */
+ public CancelQueryExecutionResponse cancelPublishedQueryExecution(
+ CancelPublishedQueryExecutionRequest request) {
+ return impl.cancelPublishedQueryExecution(request);
+ }
+
+ /** Execute a query for a published dashboard. */
+ public void executePublishedDashboardQuery(ExecutePublishedDashboardQueryRequest request) {
+ impl.executePublishedDashboardQuery(request);
+ }
+
+ public PollQueryStatusResponse pollPublishedQueryStatus(
+ String dashboardName, String dashboardRevisionId) {
+ return pollPublishedQueryStatus(
+ new PollPublishedQueryStatusRequest()
+ .setDashboardName(dashboardName)
+ .setDashboardRevisionId(dashboardRevisionId));
+ }
+
+ /** Poll the results for the a query for a published, embedded dashboard. */
+ public PollQueryStatusResponse pollPublishedQueryStatus(PollPublishedQueryStatusRequest request) {
+ return impl.pollPublishedQueryStatus(request);
+ }
+
+ public QueryExecutionService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java
new file mode 100755
index 000000000..19efc614a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of QueryExecution */
+@Generated
+class QueryExecutionImpl implements QueryExecutionService {
+ private final ApiClient apiClient;
+
+ public QueryExecutionImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public CancelQueryExecutionResponse cancelPublishedQueryExecution(
+ CancelPublishedQueryExecutionRequest request) {
+ String path = "/api/2.0/lakeview-query/query/published";
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, CancelQueryExecutionResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void executePublishedDashboardQuery(ExecutePublishedDashboardQueryRequest request) {
+ String path = "/api/2.0/lakeview-query/query/published";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public PollQueryStatusResponse pollPublishedQueryStatus(PollPublishedQueryStatusRequest request) {
+ String path = "/api/2.0/lakeview-query/query/published";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, PollQueryStatusResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java
new file mode 100755
index 000000000..d30cda5b6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java
@@ -0,0 +1,26 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Query execution APIs for AI / BI Dashboards
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface QueryExecutionService {
+ /** Cancel the results for the a query for a published, embedded dashboard. */
+ CancelQueryExecutionResponse cancelPublishedQueryExecution(
+ CancelPublishedQueryExecutionRequest cancelPublishedQueryExecutionRequest);
+
+ /** Execute a query for a published dashboard. */
+ void executePublishedDashboardQuery(
+ ExecutePublishedDashboardQueryRequest executePublishedDashboardQueryRequest);
+
+ /** Poll the results for the a query for a published, embedded dashboard. */
+ PollQueryStatusResponse pollPublishedQueryStatus(
+ PollPublishedQueryStatusRequest pollPublishedQueryStatusRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java
new file mode 100755
index 000000000..a57d202ef
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java
@@ -0,0 +1,108 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class QueryResponseStatus {
+ /** */
+ @JsonProperty("canceled")
+ private Empty canceled;
+
+ /** */
+ @JsonProperty("closed")
+ private Empty closed;
+
+ /** */
+ @JsonProperty("pending")
+ private PendingStatus pending;
+
+ /**
+ * The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be
+ * identical to data_token in SuccessStatus and PendingStatus. This field is created for audit
+ * logging purpose to record the statement_id of all QueryResponseStatus.
+ */
+ @JsonProperty("statement_id")
+ private String statementId;
+
+ /** */
+ @JsonProperty("success")
+ private SuccessStatus success;
+
+ public QueryResponseStatus setCanceled(Empty canceled) {
+ this.canceled = canceled;
+ return this;
+ }
+
+ public Empty getCanceled() {
+ return canceled;
+ }
+
+ public QueryResponseStatus setClosed(Empty closed) {
+ this.closed = closed;
+ return this;
+ }
+
+ public Empty getClosed() {
+ return closed;
+ }
+
+ public QueryResponseStatus setPending(PendingStatus pending) {
+ this.pending = pending;
+ return this;
+ }
+
+ public PendingStatus getPending() {
+ return pending;
+ }
+
+ public QueryResponseStatus setStatementId(String statementId) {
+ this.statementId = statementId;
+ return this;
+ }
+
+ public String getStatementId() {
+ return statementId;
+ }
+
+ public QueryResponseStatus setSuccess(SuccessStatus success) {
+ this.success = success;
+ return this;
+ }
+
+ public SuccessStatus getSuccess() {
+ return success;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ QueryResponseStatus that = (QueryResponseStatus) o;
+ return Objects.equals(canceled, that.canceled)
+ && Objects.equals(closed, that.closed)
+ && Objects.equals(pending, that.pending)
+ && Objects.equals(statementId, that.statementId)
+ && Objects.equals(success, that.success);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(canceled, closed, pending, statementId, success);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(QueryResponseStatus.class)
+ .add("canceled", canceled)
+ .add("closed", closed)
+ .add("pending", pending)
+ .add("statementId", statementId)
+ .add("success", success)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java
new file mode 100755
index 000000000..c54d199d3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class SuccessStatus {
+ /**
+ * The token to poll for result asynchronously Example:
+ * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+ */
+ @JsonProperty("data_token")
+ private String dataToken;
+
+ /** Whether the query result is truncated (either by byte limit or row limit) */
+ @JsonProperty("truncated")
+ private Boolean truncated;
+
+ public SuccessStatus setDataToken(String dataToken) {
+ this.dataToken = dataToken;
+ return this;
+ }
+
+ public String getDataToken() {
+ return dataToken;
+ }
+
+ public SuccessStatus setTruncated(Boolean truncated) {
+ this.truncated = truncated;
+ return this;
+ }
+
+ public Boolean getTruncated() {
+ return truncated;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SuccessStatus that = (SuccessStatus) o;
+ return Objects.equals(dataToken, that.dataToken) && Objects.equals(truncated, that.truncated);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dataToken, truncated);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SuccessStatus.class)
+ .add("dataToken", dataToken)
+ .add("truncated", truncated)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseBranchRequest.java
new file mode 100755
index 000000000..cd862ab79
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseBranchRequest.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateDatabaseBranchRequest {
+ /** */
+ @JsonProperty("database_branch")
+ private DatabaseBranch databaseBranch;
+
+ /** */
+ @JsonIgnore private String projectId;
+
+ public CreateDatabaseBranchRequest setDatabaseBranch(DatabaseBranch databaseBranch) {
+ this.databaseBranch = databaseBranch;
+ return this;
+ }
+
+ public DatabaseBranch getDatabaseBranch() {
+ return databaseBranch;
+ }
+
+ public CreateDatabaseBranchRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateDatabaseBranchRequest that = (CreateDatabaseBranchRequest) o;
+ return Objects.equals(databaseBranch, that.databaseBranch)
+ && Objects.equals(projectId, that.projectId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseBranch, projectId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateDatabaseBranchRequest.class)
+ .add("databaseBranch", databaseBranch)
+ .add("projectId", projectId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseEndpointRequest.java
new file mode 100755
index 000000000..b6437fc63
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseEndpointRequest.java
@@ -0,0 +1,73 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateDatabaseEndpointRequest {
+ /** */
+ @JsonIgnore private String branchId;
+
+ /** */
+ @JsonProperty("database_endpoint")
+ private DatabaseEndpoint databaseEndpoint;
+
+ /** */
+ @JsonIgnore private String projectId;
+
+ public CreateDatabaseEndpointRequest setBranchId(String branchId) {
+ this.branchId = branchId;
+ return this;
+ }
+
+ public String getBranchId() {
+ return branchId;
+ }
+
+ public CreateDatabaseEndpointRequest setDatabaseEndpoint(DatabaseEndpoint databaseEndpoint) {
+ this.databaseEndpoint = databaseEndpoint;
+ return this;
+ }
+
+ public DatabaseEndpoint getDatabaseEndpoint() {
+ return databaseEndpoint;
+ }
+
+ public CreateDatabaseEndpointRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateDatabaseEndpointRequest that = (CreateDatabaseEndpointRequest) o;
+ return Objects.equals(branchId, that.branchId)
+ && Objects.equals(databaseEndpoint, that.databaseEndpoint)
+ && Objects.equals(projectId, that.projectId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(branchId, databaseEndpoint, projectId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateDatabaseEndpointRequest.class)
+ .add("branchId", branchId)
+ .add("databaseEndpoint", databaseEndpoint)
+ .add("projectId", projectId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java
index af69b9394..643688431 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java
@@ -3,6 +3,7 @@
package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -10,6 +11,11 @@
@Generated
public class CreateDatabaseInstanceRoleRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("database_instance_name")
+ private String databaseInstanceName;
+
/** */
@JsonProperty("database_instance_role")
private DatabaseInstanceRole databaseInstanceRole;
@@ -17,6 +23,15 @@ public class CreateDatabaseInstanceRoleRequest {
/** */
@JsonIgnore private String instanceName;
+ public CreateDatabaseInstanceRoleRequest setDatabaseInstanceName(String databaseInstanceName) {
+ this.databaseInstanceName = databaseInstanceName;
+ return this;
+ }
+
+ public String getDatabaseInstanceName() {
+ return databaseInstanceName;
+ }
+
public CreateDatabaseInstanceRoleRequest setDatabaseInstanceRole(
DatabaseInstanceRole databaseInstanceRole) {
this.databaseInstanceRole = databaseInstanceRole;
@@ -41,18 +56,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CreateDatabaseInstanceRoleRequest that = (CreateDatabaseInstanceRoleRequest) o;
- return Objects.equals(databaseInstanceRole, that.databaseInstanceRole)
+ return Objects.equals(databaseInstanceName, that.databaseInstanceName)
+ && Objects.equals(databaseInstanceRole, that.databaseInstanceRole)
&& Objects.equals(instanceName, that.instanceName);
}
@Override
public int hashCode() {
- return Objects.hash(databaseInstanceRole, instanceName);
+ return Objects.hash(databaseInstanceName, databaseInstanceRole, instanceName);
}
@Override
public String toString() {
return new ToStringer(CreateDatabaseInstanceRoleRequest.class)
+ .add("databaseInstanceName", databaseInstanceName)
.add("databaseInstanceRole", databaseInstanceRole)
.add("instanceName", instanceName)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseProjectRequest.java
new file mode 100755
index 000000000..489b30295
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseProjectRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateDatabaseProjectRequest {
+ /** */
+ @JsonProperty("database_project")
+ private DatabaseProject databaseProject;
+
+ public CreateDatabaseProjectRequest setDatabaseProject(DatabaseProject databaseProject) {
+ this.databaseProject = databaseProject;
+ return this;
+ }
+
+ public DatabaseProject getDatabaseProject() {
+ return databaseProject;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateDatabaseProjectRequest that = (CreateDatabaseProjectRequest) o;
+ return Objects.equals(databaseProject, that.databaseProject);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseProject);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateDatabaseProjectRequest.class)
+ .add("databaseProject", databaseProject)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CustomTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CustomTag.java
new file mode 100755
index 000000000..61833a248
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CustomTag.java
@@ -0,0 +1,55 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CustomTag {
+ /** The key of the custom tag. */
+ @JsonProperty("key")
+ private String key;
+
+ /** The value of the custom tag. */
+ @JsonProperty("value")
+ private String value;
+
+ public CustomTag setKey(String key) {
+ this.key = key;
+ return this;
+ }
+
+ public String getKey() {
+ return key;
+ }
+
+ public CustomTag setValue(String value) {
+ this.value = value;
+ return this;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CustomTag that = (CustomTag) o;
+ return Objects.equals(key, that.key) && Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(key, value);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CustomTag.class).add("key", key).add("value", value).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
index 8c96210c2..2d70135a5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
@@ -150,6 +150,11 @@ public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request)
impl.deleteSyncedDatabaseTable(request);
}
+ /** Failover the primary node of a Database Instance to a secondary. */
+ public DatabaseInstance failoverDatabaseInstance(FailoverDatabaseInstanceRequest request) {
+ return impl.failoverDatabaseInstance(request);
+ }
+
/** Find a Database Instance by uid. */
public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidRequest request) {
return impl.findDatabaseInstanceByUid(request);
@@ -297,6 +302,12 @@ public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest req
return impl.updateDatabaseInstance(request);
}
+ /** Update a role for a Database Instance. */
+ public DatabaseInstanceRole updateDatabaseInstanceRole(
+ UpdateDatabaseInstanceRoleRequest request) {
+ return impl.updateDatabaseInstanceRole(request);
+ }
+
/** This API is currently unimplemented, but exposed for Terraform support. */
public SyncedDatabaseTable updateSyncedDatabaseTable(UpdateSyncedDatabaseTableRequest request) {
return impl.updateSyncedDatabaseTable(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseBranch.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseBranch.java
new file mode 100755
index 000000000..74abb5509
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseBranch.java
@@ -0,0 +1,261 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DatabaseBranch {
+ /** */
+ @JsonProperty("branch_id")
+ private String branchId;
+
+ /** A timestamp indicating when the branch was created. */
+ @JsonProperty("create_time")
+ private String createTime;
+
+ /** The branch’s state, indicating if it is initializing, ready for use, or archived. */
+ @JsonProperty("current_state")
+ private String currentState;
+
+ /**
+ * Whether the branch is the project's default branch. This field is only returned on
+ * create/update responses. See effective_default for the value that is actually applied to the
+ * database branch.
+ */
+ @JsonProperty("default")
+ private Boolean defaultValue;
+
+ /** Whether the branch is the project's default branch. */
+ @JsonProperty("effective_default")
+ private Boolean effectiveDefault;
+
+ /** Whether the branch is protected. */
+ @JsonProperty("is_protected")
+ private Boolean isProtected;
+
+ /** The logical size of the branch. */
+ @JsonProperty("logical_size_bytes")
+ private Long logicalSizeBytes;
+
+ /** The id of the parent branch */
+ @JsonProperty("parent_id")
+ private String parentId;
+
+ /**
+ * The Log Sequence Number (LSN) on the parent branch from which this branch was created. When
+ * restoring a branch using the Restore Database Branch endpoint, this value isn’t finalized until
+ * all operations related to the restore have completed successfully.
+ */
+ @JsonProperty("parent_lsn")
+ private String parentLsn;
+
+ /** The point in time on the parent branch from which this branch was created. */
+ @JsonProperty("parent_time")
+ private String parentTime;
+
+ /** */
+ @JsonProperty("pending_state")
+ private String pendingState;
+
+ /** */
+ @JsonProperty("project_id")
+ private String projectId;
+
+ /** A timestamp indicating when the `current_state` began. */
+ @JsonProperty("state_change_time")
+ private String stateChangeTime;
+
+ /** A timestamp indicating when the branch was last updated. */
+ @JsonProperty("update_time")
+ private String updateTime;
+
+ public DatabaseBranch setBranchId(String branchId) {
+ this.branchId = branchId;
+ return this;
+ }
+
+ public String getBranchId() {
+ return branchId;
+ }
+
+ public DatabaseBranch setCreateTime(String createTime) {
+ this.createTime = createTime;
+ return this;
+ }
+
+ public String getCreateTime() {
+ return createTime;
+ }
+
+ public DatabaseBranch setCurrentState(String currentState) {
+ this.currentState = currentState;
+ return this;
+ }
+
+ public String getCurrentState() {
+ return currentState;
+ }
+
+ public DatabaseBranch setDefault(Boolean defaultValue) {
+ this.defaultValue = defaultValue;
+ return this;
+ }
+
+ public Boolean getDefault() {
+ return defaultValue;
+ }
+
+ public DatabaseBranch setEffectiveDefault(Boolean effectiveDefault) {
+ this.effectiveDefault = effectiveDefault;
+ return this;
+ }
+
+ public Boolean getEffectiveDefault() {
+ return effectiveDefault;
+ }
+
+ public DatabaseBranch setIsProtected(Boolean isProtected) {
+ this.isProtected = isProtected;
+ return this;
+ }
+
+ public Boolean getIsProtected() {
+ return isProtected;
+ }
+
+ public DatabaseBranch setLogicalSizeBytes(Long logicalSizeBytes) {
+ this.logicalSizeBytes = logicalSizeBytes;
+ return this;
+ }
+
+ public Long getLogicalSizeBytes() {
+ return logicalSizeBytes;
+ }
+
+ public DatabaseBranch setParentId(String parentId) {
+ this.parentId = parentId;
+ return this;
+ }
+
+ public String getParentId() {
+ return parentId;
+ }
+
+ public DatabaseBranch setParentLsn(String parentLsn) {
+ this.parentLsn = parentLsn;
+ return this;
+ }
+
+ public String getParentLsn() {
+ return parentLsn;
+ }
+
+ public DatabaseBranch setParentTime(String parentTime) {
+ this.parentTime = parentTime;
+ return this;
+ }
+
+ public String getParentTime() {
+ return parentTime;
+ }
+
+ public DatabaseBranch setPendingState(String pendingState) {
+ this.pendingState = pendingState;
+ return this;
+ }
+
+ public String getPendingState() {
+ return pendingState;
+ }
+
+ public DatabaseBranch setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ public DatabaseBranch setStateChangeTime(String stateChangeTime) {
+ this.stateChangeTime = stateChangeTime;
+ return this;
+ }
+
+ public String getStateChangeTime() {
+ return stateChangeTime;
+ }
+
+ public DatabaseBranch setUpdateTime(String updateTime) {
+ this.updateTime = updateTime;
+ return this;
+ }
+
+ public String getUpdateTime() {
+ return updateTime;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseBranch that = (DatabaseBranch) o;
+ return Objects.equals(branchId, that.branchId)
+ && Objects.equals(createTime, that.createTime)
+ && Objects.equals(currentState, that.currentState)
+ && Objects.equals(defaultValue, that.defaultValue)
+ && Objects.equals(effectiveDefault, that.effectiveDefault)
+ && Objects.equals(isProtected, that.isProtected)
+ && Objects.equals(logicalSizeBytes, that.logicalSizeBytes)
+ && Objects.equals(parentId, that.parentId)
+ && Objects.equals(parentLsn, that.parentLsn)
+ && Objects.equals(parentTime, that.parentTime)
+ && Objects.equals(pendingState, that.pendingState)
+ && Objects.equals(projectId, that.projectId)
+ && Objects.equals(stateChangeTime, that.stateChangeTime)
+ && Objects.equals(updateTime, that.updateTime);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ branchId,
+ createTime,
+ currentState,
+ defaultValue,
+ effectiveDefault,
+ isProtected,
+ logicalSizeBytes,
+ parentId,
+ parentLsn,
+ parentTime,
+ pendingState,
+ projectId,
+ stateChangeTime,
+ updateTime);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseBranch.class)
+ .add("branchId", branchId)
+ .add("createTime", createTime)
+ .add("currentState", currentState)
+ .add("defaultValue", defaultValue)
+ .add("effectiveDefault", effectiveDefault)
+ .add("isProtected", isProtected)
+ .add("logicalSizeBytes", logicalSizeBytes)
+ .add("parentId", parentId)
+ .add("parentLsn", parentLsn)
+ .add("parentTime", parentTime)
+ .add("pendingState", pendingState)
+ .add("projectId", projectId)
+ .add("stateChangeTime", stateChangeTime)
+ .add("updateTime", updateTime)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java
index 06049eb18..fe1a9ecc4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java
@@ -13,6 +13,10 @@ public class DatabaseCatalog {
@JsonProperty("create_database_if_not_exists")
private Boolean createDatabaseIfNotExists;
+ /** The branch_id of the database branch associated with the catalog. */
+ @JsonProperty("database_branch_id")
+ private String databaseBranchId;
+
/** The name of the DatabaseInstance housing the database. */
@JsonProperty("database_instance_name")
private String databaseInstanceName;
@@ -21,6 +25,10 @@ public class DatabaseCatalog {
@JsonProperty("database_name")
private String databaseName;
+ /** The project_id of the database project associated with the catalog. */
+ @JsonProperty("database_project_id")
+ private String databaseProjectId;
+
/** The name of the catalog in UC. */
@JsonProperty("name")
private String name;
@@ -38,6 +46,15 @@ public Boolean getCreateDatabaseIfNotExists() {
return createDatabaseIfNotExists;
}
+ public DatabaseCatalog setDatabaseBranchId(String databaseBranchId) {
+ this.databaseBranchId = databaseBranchId;
+ return this;
+ }
+
+ public String getDatabaseBranchId() {
+ return databaseBranchId;
+ }
+
public DatabaseCatalog setDatabaseInstanceName(String databaseInstanceName) {
this.databaseInstanceName = databaseInstanceName;
return this;
@@ -56,6 +73,15 @@ public String getDatabaseName() {
return databaseName;
}
+ public DatabaseCatalog setDatabaseProjectId(String databaseProjectId) {
+ this.databaseProjectId = databaseProjectId;
+ return this;
+ }
+
+ public String getDatabaseProjectId() {
+ return databaseProjectId;
+ }
+
public DatabaseCatalog setName(String name) {
this.name = name;
return this;
@@ -80,23 +106,34 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
DatabaseCatalog that = (DatabaseCatalog) o;
return Objects.equals(createDatabaseIfNotExists, that.createDatabaseIfNotExists)
+ && Objects.equals(databaseBranchId, that.databaseBranchId)
&& Objects.equals(databaseInstanceName, that.databaseInstanceName)
&& Objects.equals(databaseName, that.databaseName)
+ && Objects.equals(databaseProjectId, that.databaseProjectId)
&& Objects.equals(name, that.name)
&& Objects.equals(uid, that.uid);
}
@Override
public int hashCode() {
- return Objects.hash(createDatabaseIfNotExists, databaseInstanceName, databaseName, name, uid);
+ return Objects.hash(
+ createDatabaseIfNotExists,
+ databaseBranchId,
+ databaseInstanceName,
+ databaseName,
+ databaseProjectId,
+ name,
+ uid);
}
@Override
public String toString() {
return new ToStringer(DatabaseCatalog.class)
.add("createDatabaseIfNotExists", createDatabaseIfNotExists)
+ .add("databaseBranchId", databaseBranchId)
.add("databaseInstanceName", databaseInstanceName)
.add("databaseName", databaseName)
+ .add("databaseProjectId", databaseProjectId)
.add("name", name)
.add("uid", uid)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpoint.java
new file mode 100755
index 000000000..918b6c5a2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpoint.java
@@ -0,0 +1,327 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DatabaseEndpoint {
+ /** The maximum number of Compute Units. */
+ @JsonProperty("autoscaling_limit_max_cu")
+ private Double autoscalingLimitMaxCu;
+
+ /** The minimum number of Compute Units. */
+ @JsonProperty("autoscaling_limit_min_cu")
+ private Double autoscalingLimitMinCu;
+
+ /** */
+ @JsonProperty("branch_id")
+ private String branchId;
+
+ /** A timestamp indicating when the compute endpoint was created. */
+ @JsonProperty("create_time")
+ private String createTime;
+
+ /** */
+ @JsonProperty("current_state")
+ private DatabaseEndpointState currentState;
+
+ /**
+ * Whether to restrict connections to the compute endpoint. Enabling this option schedules a
+ * suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or
+ * console action.
+ */
+ @JsonProperty("disabled")
+ private Boolean disabled;
+
+ /** */
+ @JsonProperty("endpoint_id")
+ private String endpointId;
+
+ /**
+ * The hostname of the compute endpoint. This is the hostname specified when connecting to a
+ * database.
+ */
+ @JsonProperty("host")
+ private String host;
+
+ /** A timestamp indicating when the compute endpoint was last active. */
+ @JsonProperty("last_active_time")
+ private String lastActiveTime;
+
+ /** */
+ @JsonProperty("pending_state")
+ private DatabaseEndpointState pendingState;
+
+ /** */
+ @JsonProperty("pooler_mode")
+ private DatabaseEndpointPoolerMode poolerMode;
+
+ /** */
+ @JsonProperty("project_id")
+ private String projectId;
+
+ /** */
+ @JsonProperty("settings")
+ private DatabaseEndpointSettings settings;
+
+ /** A timestamp indicating when the compute endpoint was last started. */
+ @JsonProperty("start_time")
+ private String startTime;
+
+ /** A timestamp indicating when the compute endpoint was last suspended. */
+ @JsonProperty("suspend_time")
+ private String suspendTime;
+
+ /** Duration of inactivity after which the compute endpoint is automatically suspended. */
+ @JsonProperty("suspend_timeout_duration")
+ private String suspendTimeoutDuration;
+
+ /**
+ * NOTE: if want type to default to some value set the server then an effective_type field OR make
+ * this field REQUIRED
+ */
+ @JsonProperty("type")
+ private DatabaseEndpointType typeValue;
+
+ /** A timestamp indicating when the compute endpoint was last updated. */
+ @JsonProperty("update_time")
+ private String updateTime;
+
+ public DatabaseEndpoint setAutoscalingLimitMaxCu(Double autoscalingLimitMaxCu) {
+ this.autoscalingLimitMaxCu = autoscalingLimitMaxCu;
+ return this;
+ }
+
+ public Double getAutoscalingLimitMaxCu() {
+ return autoscalingLimitMaxCu;
+ }
+
+ public DatabaseEndpoint setAutoscalingLimitMinCu(Double autoscalingLimitMinCu) {
+ this.autoscalingLimitMinCu = autoscalingLimitMinCu;
+ return this;
+ }
+
+ public Double getAutoscalingLimitMinCu() {
+ return autoscalingLimitMinCu;
+ }
+
+ public DatabaseEndpoint setBranchId(String branchId) {
+ this.branchId = branchId;
+ return this;
+ }
+
+ public String getBranchId() {
+ return branchId;
+ }
+
+ public DatabaseEndpoint setCreateTime(String createTime) {
+ this.createTime = createTime;
+ return this;
+ }
+
+ public String getCreateTime() {
+ return createTime;
+ }
+
+ public DatabaseEndpoint setCurrentState(DatabaseEndpointState currentState) {
+ this.currentState = currentState;
+ return this;
+ }
+
+ public DatabaseEndpointState getCurrentState() {
+ return currentState;
+ }
+
+ public DatabaseEndpoint setDisabled(Boolean disabled) {
+ this.disabled = disabled;
+ return this;
+ }
+
+ public Boolean getDisabled() {
+ return disabled;
+ }
+
+ public DatabaseEndpoint setEndpointId(String endpointId) {
+ this.endpointId = endpointId;
+ return this;
+ }
+
+ public String getEndpointId() {
+ return endpointId;
+ }
+
+ public DatabaseEndpoint setHost(String host) {
+ this.host = host;
+ return this;
+ }
+
+ public String getHost() {
+ return host;
+ }
+
+ public DatabaseEndpoint setLastActiveTime(String lastActiveTime) {
+ this.lastActiveTime = lastActiveTime;
+ return this;
+ }
+
+ public String getLastActiveTime() {
+ return lastActiveTime;
+ }
+
+ public DatabaseEndpoint setPendingState(DatabaseEndpointState pendingState) {
+ this.pendingState = pendingState;
+ return this;
+ }
+
+ public DatabaseEndpointState getPendingState() {
+ return pendingState;
+ }
+
+ public DatabaseEndpoint setPoolerMode(DatabaseEndpointPoolerMode poolerMode) {
+ this.poolerMode = poolerMode;
+ return this;
+ }
+
+ public DatabaseEndpointPoolerMode getPoolerMode() {
+ return poolerMode;
+ }
+
+ public DatabaseEndpoint setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ public DatabaseEndpoint setSettings(DatabaseEndpointSettings settings) {
+ this.settings = settings;
+ return this;
+ }
+
+ public DatabaseEndpointSettings getSettings() {
+ return settings;
+ }
+
+ public DatabaseEndpoint setStartTime(String startTime) {
+ this.startTime = startTime;
+ return this;
+ }
+
+ public String getStartTime() {
+ return startTime;
+ }
+
+ public DatabaseEndpoint setSuspendTime(String suspendTime) {
+ this.suspendTime = suspendTime;
+ return this;
+ }
+
+ public String getSuspendTime() {
+ return suspendTime;
+ }
+
+ public DatabaseEndpoint setSuspendTimeoutDuration(String suspendTimeoutDuration) {
+ this.suspendTimeoutDuration = suspendTimeoutDuration;
+ return this;
+ }
+
+ public String getSuspendTimeoutDuration() {
+ return suspendTimeoutDuration;
+ }
+
+ public DatabaseEndpoint setType(DatabaseEndpointType typeValue) {
+ this.typeValue = typeValue;
+ return this;
+ }
+
+ public DatabaseEndpointType getType() {
+ return typeValue;
+ }
+
+ public DatabaseEndpoint setUpdateTime(String updateTime) {
+ this.updateTime = updateTime;
+ return this;
+ }
+
+ public String getUpdateTime() {
+ return updateTime;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseEndpoint that = (DatabaseEndpoint) o;
+ return Objects.equals(autoscalingLimitMaxCu, that.autoscalingLimitMaxCu)
+ && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu)
+ && Objects.equals(branchId, that.branchId)
+ && Objects.equals(createTime, that.createTime)
+ && Objects.equals(currentState, that.currentState)
+ && Objects.equals(disabled, that.disabled)
+ && Objects.equals(endpointId, that.endpointId)
+ && Objects.equals(host, that.host)
+ && Objects.equals(lastActiveTime, that.lastActiveTime)
+ && Objects.equals(pendingState, that.pendingState)
+ && Objects.equals(poolerMode, that.poolerMode)
+ && Objects.equals(projectId, that.projectId)
+ && Objects.equals(settings, that.settings)
+ && Objects.equals(startTime, that.startTime)
+ && Objects.equals(suspendTime, that.suspendTime)
+ && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration)
+ && Objects.equals(typeValue, that.typeValue)
+ && Objects.equals(updateTime, that.updateTime);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ autoscalingLimitMaxCu,
+ autoscalingLimitMinCu,
+ branchId,
+ createTime,
+ currentState,
+ disabled,
+ endpointId,
+ host,
+ lastActiveTime,
+ pendingState,
+ poolerMode,
+ projectId,
+ settings,
+ startTime,
+ suspendTime,
+ suspendTimeoutDuration,
+ typeValue,
+ updateTime);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseEndpoint.class)
+ .add("autoscalingLimitMaxCu", autoscalingLimitMaxCu)
+ .add("autoscalingLimitMinCu", autoscalingLimitMinCu)
+ .add("branchId", branchId)
+ .add("createTime", createTime)
+ .add("currentState", currentState)
+ .add("disabled", disabled)
+ .add("endpointId", endpointId)
+ .add("host", host)
+ .add("lastActiveTime", lastActiveTime)
+ .add("pendingState", pendingState)
+ .add("poolerMode", poolerMode)
+ .add("projectId", projectId)
+ .add("settings", settings)
+ .add("startTime", startTime)
+ .add("suspendTime", suspendTime)
+ .add("suspendTimeoutDuration", suspendTimeoutDuration)
+ .add("typeValue", typeValue)
+ .add("updateTime", updateTime)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointPoolerMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointPoolerMode.java
new file mode 100755
index 000000000..09bbcc3c1
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointPoolerMode.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+
+/** The connection pooler mode. Lakebase supports PgBouncer in `transaction` mode only. */
+@Generated
+public enum DatabaseEndpointPoolerMode {
+ TRANSACTION,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointSettings.java
new file mode 100755
index 000000000..dd6cfac29
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointSettings.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Map;
+import java.util.Objects;
+
+/** A collection of settings for a compute endpoint */
+@Generated
+public class DatabaseEndpointSettings {
+ /** A raw representation of Postgres settings. */
+ @JsonProperty("pg_settings")
+ private Map pgSettings;
+
+ /** A raw representation of PgBouncer settings. */
+ @JsonProperty("pgbouncer_settings")
+ private Map pgbouncerSettings;
+
+ public DatabaseEndpointSettings setPgSettings(Map pgSettings) {
+ this.pgSettings = pgSettings;
+ return this;
+ }
+
+ public Map getPgSettings() {
+ return pgSettings;
+ }
+
+ public DatabaseEndpointSettings setPgbouncerSettings(Map pgbouncerSettings) {
+ this.pgbouncerSettings = pgbouncerSettings;
+ return this;
+ }
+
+ public Map getPgbouncerSettings() {
+ return pgbouncerSettings;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseEndpointSettings that = (DatabaseEndpointSettings) o;
+ return Objects.equals(pgSettings, that.pgSettings)
+ && Objects.equals(pgbouncerSettings, that.pgbouncerSettings);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pgSettings, pgbouncerSettings);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseEndpointSettings.class)
+ .add("pgSettings", pgSettings)
+ .add("pgbouncerSettings", pgbouncerSettings)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointState.java
new file mode 100755
index 000000000..9227e98ad
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointState.java
@@ -0,0 +1,13 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+
+/** The state of the compute endpoint */
+@Generated
+public enum DatabaseEndpointState {
+ ACTIVE,
+ IDLE,
+ INIT,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointType.java
new file mode 100755
index 000000000..937c6db36
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointType.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+
+/** The compute endpoint type. Either `read_write` or `read_only`. */
+@Generated
+public enum DatabaseEndpointType {
+ READ_ONLY,
+ READ_WRITE,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
index f58d3a0bc..57faa5809 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
@@ -156,6 +156,20 @@ public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request)
}
}
+ @Override
+ public DatabaseInstance failoverDatabaseInstance(FailoverDatabaseInstanceRequest request) {
+ String path = String.format("/api/2.0/database/instances/%s/failover", request.getName());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseInstance.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidRequest request) {
String path = "/api/2.0/database/instances:findByUid";
@@ -335,6 +349,25 @@ public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest req
}
}
+ @Override
+ public DatabaseInstanceRole updateDatabaseInstanceRole(
+ UpdateDatabaseInstanceRoleRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/database/instances/%s/roles/%s",
+ request.getInstanceName(), request.getName());
+ try {
+ Request req =
+ new Request("PATCH", path, apiClient.serialize(request.getDatabaseInstanceRole()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseInstanceRole.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public SyncedDatabaseTable updateSyncedDatabaseTable(UpdateSyncedDatabaseTableRequest request) {
String path = String.format("/api/2.0/database/synced_tables/%s", request.getName());
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java
index 077608170..f52760290 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java
@@ -29,10 +29,21 @@ public class DatabaseInstance {
@JsonProperty("creator")
private String creator;
+ /**
+ * Custom tags associated with the instance. This field is only included on create and update
+ * responses.
+ */
+ @JsonProperty("custom_tags")
+ private Collection customTags;
+
/** Deprecated. The sku of the instance; this field will always match the value of capacity. */
@JsonProperty("effective_capacity")
private String effectiveCapacity;
+ /** The recorded custom tags associated with the instance. */
+ @JsonProperty("effective_custom_tags")
+ private Collection effectiveCustomTags;
+
/** Whether the instance has PG native password login enabled. */
@JsonProperty("effective_enable_pg_native_login")
private Boolean effectiveEnablePgNativeLogin;
@@ -59,6 +70,10 @@ public class DatabaseInstance {
@JsonProperty("effective_stopped")
private Boolean effectiveStopped;
+ /** The policy that is applied to the instance. */
+ @JsonProperty("effective_usage_policy_id")
+ private String effectiveUsagePolicyId;
+
/** Whether to enable PG native password login on the instance. Defaults to false. */
@JsonProperty("enable_pg_native_login")
private Boolean enablePgNativeLogin;
@@ -121,6 +136,10 @@ public class DatabaseInstance {
@JsonProperty("uid")
private String uid;
+ /** The desired usage policy to associate with the instance. */
+ @JsonProperty("usage_policy_id")
+ private String usagePolicyId;
+
public DatabaseInstance setCapacity(String capacity) {
this.capacity = capacity;
return this;
@@ -157,6 +176,15 @@ public String getCreator() {
return creator;
}
+ public DatabaseInstance setCustomTags(Collection customTags) {
+ this.customTags = customTags;
+ return this;
+ }
+
+ public Collection getCustomTags() {
+ return customTags;
+ }
+
public DatabaseInstance setEffectiveCapacity(String effectiveCapacity) {
this.effectiveCapacity = effectiveCapacity;
return this;
@@ -166,6 +194,15 @@ public String getEffectiveCapacity() {
return effectiveCapacity;
}
+ public DatabaseInstance setEffectiveCustomTags(Collection effectiveCustomTags) {
+ this.effectiveCustomTags = effectiveCustomTags;
+ return this;
+ }
+
+ public Collection getEffectiveCustomTags() {
+ return effectiveCustomTags;
+ }
+
public DatabaseInstance setEffectiveEnablePgNativeLogin(Boolean effectiveEnablePgNativeLogin) {
this.effectiveEnablePgNativeLogin = effectiveEnablePgNativeLogin;
return this;
@@ -212,6 +249,15 @@ public Boolean getEffectiveStopped() {
return effectiveStopped;
}
+ public DatabaseInstance setEffectiveUsagePolicyId(String effectiveUsagePolicyId) {
+ this.effectiveUsagePolicyId = effectiveUsagePolicyId;
+ return this;
+ }
+
+ public String getEffectiveUsagePolicyId() {
+ return effectiveUsagePolicyId;
+ }
+
public DatabaseInstance setEnablePgNativeLogin(Boolean enablePgNativeLogin) {
this.enablePgNativeLogin = enablePgNativeLogin;
return this;
@@ -320,6 +366,15 @@ public String getUid() {
return uid;
}
+ public DatabaseInstance setUsagePolicyId(String usagePolicyId) {
+ this.usagePolicyId = usagePolicyId;
+ return this;
+ }
+
+ public String getUsagePolicyId() {
+ return usagePolicyId;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -329,13 +384,16 @@ public boolean equals(Object o) {
&& Objects.equals(childInstanceRefs, that.childInstanceRefs)
&& Objects.equals(creationTime, that.creationTime)
&& Objects.equals(creator, that.creator)
+ && Objects.equals(customTags, that.customTags)
&& Objects.equals(effectiveCapacity, that.effectiveCapacity)
+ && Objects.equals(effectiveCustomTags, that.effectiveCustomTags)
&& Objects.equals(effectiveEnablePgNativeLogin, that.effectiveEnablePgNativeLogin)
&& Objects.equals(
effectiveEnableReadableSecondaries, that.effectiveEnableReadableSecondaries)
&& Objects.equals(effectiveNodeCount, that.effectiveNodeCount)
&& Objects.equals(effectiveRetentionWindowInDays, that.effectiveRetentionWindowInDays)
&& Objects.equals(effectiveStopped, that.effectiveStopped)
+ && Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId)
&& Objects.equals(enablePgNativeLogin, that.enablePgNativeLogin)
&& Objects.equals(enableReadableSecondaries, that.enableReadableSecondaries)
&& Objects.equals(name, that.name)
@@ -347,7 +405,8 @@ public boolean equals(Object o) {
&& Objects.equals(retentionWindowInDays, that.retentionWindowInDays)
&& Objects.equals(state, that.state)
&& Objects.equals(stopped, that.stopped)
- && Objects.equals(uid, that.uid);
+ && Objects.equals(uid, that.uid)
+ && Objects.equals(usagePolicyId, that.usagePolicyId);
}
@Override
@@ -357,12 +416,15 @@ public int hashCode() {
childInstanceRefs,
creationTime,
creator,
+ customTags,
effectiveCapacity,
+ effectiveCustomTags,
effectiveEnablePgNativeLogin,
effectiveEnableReadableSecondaries,
effectiveNodeCount,
effectiveRetentionWindowInDays,
effectiveStopped,
+ effectiveUsagePolicyId,
enablePgNativeLogin,
enableReadableSecondaries,
name,
@@ -374,7 +436,8 @@ public int hashCode() {
retentionWindowInDays,
state,
stopped,
- uid);
+ uid,
+ usagePolicyId);
}
@Override
@@ -384,12 +447,15 @@ public String toString() {
.add("childInstanceRefs", childInstanceRefs)
.add("creationTime", creationTime)
.add("creator", creator)
+ .add("customTags", customTags)
.add("effectiveCapacity", effectiveCapacity)
+ .add("effectiveCustomTags", effectiveCustomTags)
.add("effectiveEnablePgNativeLogin", effectiveEnablePgNativeLogin)
.add("effectiveEnableReadableSecondaries", effectiveEnableReadableSecondaries)
.add("effectiveNodeCount", effectiveNodeCount)
.add("effectiveRetentionWindowInDays", effectiveRetentionWindowInDays)
.add("effectiveStopped", effectiveStopped)
+ .add("effectiveUsagePolicyId", effectiveUsagePolicyId)
.add("enablePgNativeLogin", enablePgNativeLogin)
.add("enableReadableSecondaries", enableReadableSecondaries)
.add("name", name)
@@ -402,6 +468,7 @@ public String toString() {
.add("state", state)
.add("stopped", stopped)
.add("uid", uid)
+ .add("usagePolicyId", usagePolicyId)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRole.java
index cc9b99b4c..9f0c7010a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRole.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRole.java
@@ -10,14 +10,22 @@
/** A DatabaseInstanceRole represents a Postgres role in a database instance. */
@Generated
public class DatabaseInstanceRole {
- /** API-exposed Postgres role attributes */
+ /** The desired API-exposed Postgres role attribute to associate with the role. Optional. */
@JsonProperty("attributes")
private DatabaseInstanceRoleAttributes attributes;
+ /** The attributes that are applied to the role. */
+ @JsonProperty("effective_attributes")
+ private DatabaseInstanceRoleAttributes effectiveAttributes;
+
/** The type of the role. */
@JsonProperty("identity_type")
private DatabaseInstanceRoleIdentityType identityType;
+ /** */
+ @JsonProperty("instance_name")
+ private String instanceName;
+
/** An enum value for a standard role that this role is a member of. */
@JsonProperty("membership_role")
private DatabaseInstanceRoleMembershipRole membershipRole;
@@ -35,6 +43,16 @@ public DatabaseInstanceRoleAttributes getAttributes() {
return attributes;
}
+ public DatabaseInstanceRole setEffectiveAttributes(
+ DatabaseInstanceRoleAttributes effectiveAttributes) {
+ this.effectiveAttributes = effectiveAttributes;
+ return this;
+ }
+
+ public DatabaseInstanceRoleAttributes getEffectiveAttributes() {
+ return effectiveAttributes;
+ }
+
public DatabaseInstanceRole setIdentityType(DatabaseInstanceRoleIdentityType identityType) {
this.identityType = identityType;
return this;
@@ -44,6 +62,15 @@ public DatabaseInstanceRoleIdentityType getIdentityType() {
return identityType;
}
+ public DatabaseInstanceRole setInstanceName(String instanceName) {
+ this.instanceName = instanceName;
+ return this;
+ }
+
+ public String getInstanceName() {
+ return instanceName;
+ }
+
public DatabaseInstanceRole setMembershipRole(DatabaseInstanceRoleMembershipRole membershipRole) {
this.membershipRole = membershipRole;
return this;
@@ -68,21 +95,26 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
DatabaseInstanceRole that = (DatabaseInstanceRole) o;
return Objects.equals(attributes, that.attributes)
+ && Objects.equals(effectiveAttributes, that.effectiveAttributes)
&& Objects.equals(identityType, that.identityType)
+ && Objects.equals(instanceName, that.instanceName)
&& Objects.equals(membershipRole, that.membershipRole)
&& Objects.equals(name, that.name);
}
@Override
public int hashCode() {
- return Objects.hash(attributes, identityType, membershipRole, name);
+ return Objects.hash(
+ attributes, effectiveAttributes, identityType, instanceName, membershipRole, name);
}
@Override
public String toString() {
return new ToStringer(DatabaseInstanceRole.class)
.add("attributes", attributes)
+ .add("effectiveAttributes", effectiveAttributes)
.add("identityType", identityType)
+ .add("instanceName", instanceName)
.add("membershipRole", membershipRole)
.add("name", name)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProject.java
new file mode 100755
index 000000000..cf2969c40
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProject.java
@@ -0,0 +1,347 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class DatabaseProject {
+ /** The logical size limit for a branch. */
+ @JsonProperty("branch_logical_size_limit_bytes")
+ private Long branchLogicalSizeLimitBytes;
+
+ /**
+ * The desired budget policy to associate with the instance. This field is only returned on
+ * create/update responses, and represents the customer provided budget policy. See
+ * effective_budget_policy_id for the policy that is actually applied to the instance.
+ */
+ @JsonProperty("budget_policy_id")
+ private String budgetPolicyId;
+
+ /** The most recent time when any endpoint of this project was active. */
+ @JsonProperty("compute_last_active_time")
+ private String computeLastActiveTime;
+
+ /** A timestamp indicating when the project was created. */
+ @JsonProperty("create_time")
+ private String createTime;
+
+ /** Custom tags associated with the instance. */
+ @JsonProperty("custom_tags")
+ private Collection customTags;
+
+ /** */
+ @JsonProperty("default_endpoint_settings")
+ private DatabaseProjectDefaultEndpointSettings defaultEndpointSettings;
+
+ /** Human-readable project name. */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /** The policy that is applied to the instance. */
+ @JsonProperty("effective_budget_policy_id")
+ private String effectiveBudgetPolicyId;
+
+ /** */
+ @JsonProperty("effective_default_endpoint_settings")
+ private DatabaseProjectDefaultEndpointSettings effectiveDefaultEndpointSettings;
+
+ /** */
+ @JsonProperty("effective_display_name")
+ private String effectiveDisplayName;
+
+ /** */
+ @JsonProperty("effective_history_retention_duration")
+ private String effectiveHistoryRetentionDuration;
+
+ /** */
+ @JsonProperty("effective_pg_version")
+ private Long effectivePgVersion;
+
+ /** */
+ @JsonProperty("effective_settings")
+ private DatabaseProjectSettings effectiveSettings;
+
+ /**
+ * The number of seconds to retain the shared history for point in time recovery for all branches
+ * in this project.
+ */
+ @JsonProperty("history_retention_duration")
+ private String historyRetentionDuration;
+
+ /** The major Postgres version number. */
+ @JsonProperty("pg_version")
+ private Long pgVersion;
+
+ /** */
+ @JsonProperty("project_id")
+ private String projectId;
+
+ /** */
+ @JsonProperty("settings")
+ private DatabaseProjectSettings settings;
+
+ /**
+ * The current space occupied by the project in storage. Synthetic storage size combines the
+ * logical data size and Write-Ahead Log (WAL) size for all branches in a project.
+ */
+ @JsonProperty("synthetic_storage_size_bytes")
+ private Long syntheticStorageSizeBytes;
+
+ /** A timestamp indicating when the project was last updated. */
+ @JsonProperty("update_time")
+ private String updateTime;
+
+ public DatabaseProject setBranchLogicalSizeLimitBytes(Long branchLogicalSizeLimitBytes) {
+ this.branchLogicalSizeLimitBytes = branchLogicalSizeLimitBytes;
+ return this;
+ }
+
+ public Long getBranchLogicalSizeLimitBytes() {
+ return branchLogicalSizeLimitBytes;
+ }
+
+ public DatabaseProject setBudgetPolicyId(String budgetPolicyId) {
+ this.budgetPolicyId = budgetPolicyId;
+ return this;
+ }
+
+ public String getBudgetPolicyId() {
+ return budgetPolicyId;
+ }
+
+ public DatabaseProject setComputeLastActiveTime(String computeLastActiveTime) {
+ this.computeLastActiveTime = computeLastActiveTime;
+ return this;
+ }
+
+ public String getComputeLastActiveTime() {
+ return computeLastActiveTime;
+ }
+
+ public DatabaseProject setCreateTime(String createTime) {
+ this.createTime = createTime;
+ return this;
+ }
+
+ public String getCreateTime() {
+ return createTime;
+ }
+
+ public DatabaseProject setCustomTags(Collection customTags) {
+ this.customTags = customTags;
+ return this;
+ }
+
+ public Collection getCustomTags() {
+ return customTags;
+ }
+
+ public DatabaseProject setDefaultEndpointSettings(
+ DatabaseProjectDefaultEndpointSettings defaultEndpointSettings) {
+ this.defaultEndpointSettings = defaultEndpointSettings;
+ return this;
+ }
+
+ public DatabaseProjectDefaultEndpointSettings getDefaultEndpointSettings() {
+ return defaultEndpointSettings;
+ }
+
+ public DatabaseProject setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public DatabaseProject setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) {
+ this.effectiveBudgetPolicyId = effectiveBudgetPolicyId;
+ return this;
+ }
+
+ public String getEffectiveBudgetPolicyId() {
+ return effectiveBudgetPolicyId;
+ }
+
+ public DatabaseProject setEffectiveDefaultEndpointSettings(
+ DatabaseProjectDefaultEndpointSettings effectiveDefaultEndpointSettings) {
+ this.effectiveDefaultEndpointSettings = effectiveDefaultEndpointSettings;
+ return this;
+ }
+
+ public DatabaseProjectDefaultEndpointSettings getEffectiveDefaultEndpointSettings() {
+ return effectiveDefaultEndpointSettings;
+ }
+
+ public DatabaseProject setEffectiveDisplayName(String effectiveDisplayName) {
+ this.effectiveDisplayName = effectiveDisplayName;
+ return this;
+ }
+
+ public String getEffectiveDisplayName() {
+ return effectiveDisplayName;
+ }
+
+ public DatabaseProject setEffectiveHistoryRetentionDuration(
+ String effectiveHistoryRetentionDuration) {
+ this.effectiveHistoryRetentionDuration = effectiveHistoryRetentionDuration;
+ return this;
+ }
+
+ public String getEffectiveHistoryRetentionDuration() {
+ return effectiveHistoryRetentionDuration;
+ }
+
+ public DatabaseProject setEffectivePgVersion(Long effectivePgVersion) {
+ this.effectivePgVersion = effectivePgVersion;
+ return this;
+ }
+
+ public Long getEffectivePgVersion() {
+ return effectivePgVersion;
+ }
+
+ public DatabaseProject setEffectiveSettings(DatabaseProjectSettings effectiveSettings) {
+ this.effectiveSettings = effectiveSettings;
+ return this;
+ }
+
+ public DatabaseProjectSettings getEffectiveSettings() {
+ return effectiveSettings;
+ }
+
+ public DatabaseProject setHistoryRetentionDuration(String historyRetentionDuration) {
+ this.historyRetentionDuration = historyRetentionDuration;
+ return this;
+ }
+
+ public String getHistoryRetentionDuration() {
+ return historyRetentionDuration;
+ }
+
+ public DatabaseProject setPgVersion(Long pgVersion) {
+ this.pgVersion = pgVersion;
+ return this;
+ }
+
+ public Long getPgVersion() {
+ return pgVersion;
+ }
+
+ public DatabaseProject setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ public DatabaseProject setSettings(DatabaseProjectSettings settings) {
+ this.settings = settings;
+ return this;
+ }
+
+ public DatabaseProjectSettings getSettings() {
+ return settings;
+ }
+
+ public DatabaseProject setSyntheticStorageSizeBytes(Long syntheticStorageSizeBytes) {
+ this.syntheticStorageSizeBytes = syntheticStorageSizeBytes;
+ return this;
+ }
+
+ public Long getSyntheticStorageSizeBytes() {
+ return syntheticStorageSizeBytes;
+ }
+
+ public DatabaseProject setUpdateTime(String updateTime) {
+ this.updateTime = updateTime;
+ return this;
+ }
+
+ public String getUpdateTime() {
+ return updateTime;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseProject that = (DatabaseProject) o;
+ return Objects.equals(branchLogicalSizeLimitBytes, that.branchLogicalSizeLimitBytes)
+ && Objects.equals(budgetPolicyId, that.budgetPolicyId)
+ && Objects.equals(computeLastActiveTime, that.computeLastActiveTime)
+ && Objects.equals(createTime, that.createTime)
+ && Objects.equals(customTags, that.customTags)
+ && Objects.equals(defaultEndpointSettings, that.defaultEndpointSettings)
+ && Objects.equals(displayName, that.displayName)
+ && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId)
+ && Objects.equals(effectiveDefaultEndpointSettings, that.effectiveDefaultEndpointSettings)
+ && Objects.equals(effectiveDisplayName, that.effectiveDisplayName)
+ && Objects.equals(effectiveHistoryRetentionDuration, that.effectiveHistoryRetentionDuration)
+ && Objects.equals(effectivePgVersion, that.effectivePgVersion)
+ && Objects.equals(effectiveSettings, that.effectiveSettings)
+ && Objects.equals(historyRetentionDuration, that.historyRetentionDuration)
+ && Objects.equals(pgVersion, that.pgVersion)
+ && Objects.equals(projectId, that.projectId)
+ && Objects.equals(settings, that.settings)
+ && Objects.equals(syntheticStorageSizeBytes, that.syntheticStorageSizeBytes)
+ && Objects.equals(updateTime, that.updateTime);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ branchLogicalSizeLimitBytes,
+ budgetPolicyId,
+ computeLastActiveTime,
+ createTime,
+ customTags,
+ defaultEndpointSettings,
+ displayName,
+ effectiveBudgetPolicyId,
+ effectiveDefaultEndpointSettings,
+ effectiveDisplayName,
+ effectiveHistoryRetentionDuration,
+ effectivePgVersion,
+ effectiveSettings,
+ historyRetentionDuration,
+ pgVersion,
+ projectId,
+ settings,
+ syntheticStorageSizeBytes,
+ updateTime);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseProject.class)
+ .add("branchLogicalSizeLimitBytes", branchLogicalSizeLimitBytes)
+ .add("budgetPolicyId", budgetPolicyId)
+ .add("computeLastActiveTime", computeLastActiveTime)
+ .add("createTime", createTime)
+ .add("customTags", customTags)
+ .add("defaultEndpointSettings", defaultEndpointSettings)
+ .add("displayName", displayName)
+ .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId)
+ .add("effectiveDefaultEndpointSettings", effectiveDefaultEndpointSettings)
+ .add("effectiveDisplayName", effectiveDisplayName)
+ .add("effectiveHistoryRetentionDuration", effectiveHistoryRetentionDuration)
+ .add("effectivePgVersion", effectivePgVersion)
+ .add("effectiveSettings", effectiveSettings)
+ .add("historyRetentionDuration", historyRetentionDuration)
+ .add("pgVersion", pgVersion)
+ .add("projectId", projectId)
+ .add("settings", settings)
+ .add("syntheticStorageSizeBytes", syntheticStorageSizeBytes)
+ .add("updateTime", updateTime)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectAPI.java
new file mode 100755
index 000000000..90d1c45b8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectAPI.java
@@ -0,0 +1,184 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Database Projects provide access to a database via REST API or direct SQL. */
+@Generated
+public class DatabaseProjectAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(DatabaseProjectAPI.class);
+
+ private final DatabaseProjectService impl;
+
+ /** Regular-use constructor */
+ public DatabaseProjectAPI(ApiClient apiClient) {
+ impl = new DatabaseProjectImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public DatabaseProjectAPI(DatabaseProjectService mock) {
+ impl = mock;
+ }
+
+ /** Create a Database Branch. */
+ public DatabaseBranch createDatabaseBranch(CreateDatabaseBranchRequest request) {
+ return impl.createDatabaseBranch(request);
+ }
+
+ /** Create a Database Endpoint. */
+ public DatabaseEndpoint createDatabaseEndpoint(CreateDatabaseEndpointRequest request) {
+ return impl.createDatabaseEndpoint(request);
+ }
+
+ /** Create a Database Project. */
+ public DatabaseProject createDatabaseProject(CreateDatabaseProjectRequest request) {
+ return impl.createDatabaseProject(request);
+ }
+
+ public void deleteDatabaseBranch(String projectId, String branchId) {
+ deleteDatabaseBranch(
+ new DeleteDatabaseBranchRequest().setProjectId(projectId).setBranchId(branchId));
+ }
+
+ /** Delete a Database Branch. */
+ public void deleteDatabaseBranch(DeleteDatabaseBranchRequest request) {
+ impl.deleteDatabaseBranch(request);
+ }
+
+ public void deleteDatabaseEndpoint(String projectId, String branchId, String endpointId) {
+ deleteDatabaseEndpoint(
+ new DeleteDatabaseEndpointRequest()
+ .setProjectId(projectId)
+ .setBranchId(branchId)
+ .setEndpointId(endpointId));
+ }
+
+ /** Delete a Database Endpoint. */
+ public void deleteDatabaseEndpoint(DeleteDatabaseEndpointRequest request) {
+ impl.deleteDatabaseEndpoint(request);
+ }
+
+ public void deleteDatabaseProject(String projectId) {
+ deleteDatabaseProject(new DeleteDatabaseProjectRequest().setProjectId(projectId));
+ }
+
+ /** Delete a Database Project. */
+ public void deleteDatabaseProject(DeleteDatabaseProjectRequest request) {
+ impl.deleteDatabaseProject(request);
+ }
+
+ public DatabaseBranch getDatabaseBranch(String projectId, String branchId) {
+ return getDatabaseBranch(
+ new GetDatabaseBranchRequest().setProjectId(projectId).setBranchId(branchId));
+ }
+
+ /** Get a Database Branch. */
+ public DatabaseBranch getDatabaseBranch(GetDatabaseBranchRequest request) {
+ return impl.getDatabaseBranch(request);
+ }
+
+ public DatabaseEndpoint getDatabaseEndpoint(
+ String projectId, String branchId, String endpointId) {
+ return getDatabaseEndpoint(
+ new GetDatabaseEndpointRequest()
+ .setProjectId(projectId)
+ .setBranchId(branchId)
+ .setEndpointId(endpointId));
+ }
+
+ /** Get a Database Endpoint. */
+ public DatabaseEndpoint getDatabaseEndpoint(GetDatabaseEndpointRequest request) {
+ return impl.getDatabaseEndpoint(request);
+ }
+
+ public DatabaseProject getDatabaseProject(String projectId) {
+ return getDatabaseProject(new GetDatabaseProjectRequest().setProjectId(projectId));
+ }
+
+ /** Get a Database Project. */
+ public DatabaseProject getDatabaseProject(GetDatabaseProjectRequest request) {
+ return impl.getDatabaseProject(request);
+ }
+
+ public Iterable listDatabaseBranches(String projectId) {
+ return listDatabaseBranches(new ListDatabaseBranchesRequest().setProjectId(projectId));
+ }
+
+ /** List Database Branches. */
+ public Iterable listDatabaseBranches(ListDatabaseBranchesRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listDatabaseBranches,
+ ListDatabaseBranchesResponse::getDatabaseBranches,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ public Iterable listDatabaseEndpoints(String projectId, String branchId) {
+ return listDatabaseEndpoints(
+ new ListDatabaseEndpointsRequest().setProjectId(projectId).setBranchId(branchId));
+ }
+
+ /** List Database Endpoints. */
+ public Iterable listDatabaseEndpoints(ListDatabaseEndpointsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listDatabaseEndpoints,
+ ListDatabaseEndpointsResponse::getDatabaseEndpoints,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ /** List Database Instances. */
+ public Iterable listDatabaseProjects(ListDatabaseProjectsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listDatabaseProjects,
+ ListDatabaseProjectsResponse::getDatabaseProjects,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ /** Restart a Database Endpoint. TODO: should return databricks.longrunning.Operation */
+ public DatabaseEndpoint restartDatabaseEndpoint(RestartDatabaseEndpointRequest request) {
+ return impl.restartDatabaseEndpoint(request);
+ }
+
+ /** Update a Database Branch. */
+ public DatabaseBranch updateDatabaseBranch(UpdateDatabaseBranchRequest request) {
+ return impl.updateDatabaseBranch(request);
+ }
+
+ /** Update a Database Endpoint. TODO: should return databricks.longrunning.Operation { */
+ public DatabaseEndpoint updateDatabaseEndpoint(UpdateDatabaseEndpointRequest request) {
+ return impl.updateDatabaseEndpoint(request);
+ }
+
+ /** Update a Database Project. */
+ public DatabaseProject updateDatabaseProject(UpdateDatabaseProjectRequest request) {
+ return impl.updateDatabaseProject(request);
+ }
+
+ public DatabaseProjectService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectCustomTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectCustomTag.java
new file mode 100755
index 000000000..715cb3229
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectCustomTag.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DatabaseProjectCustomTag {
+ /** The key of the custom tag. */
+ @JsonProperty("key")
+ private String key;
+
+ /** The value of the custom tag. */
+ @JsonProperty("value")
+ private String value;
+
+ public DatabaseProjectCustomTag setKey(String key) {
+ this.key = key;
+ return this;
+ }
+
+ public String getKey() {
+ return key;
+ }
+
+ public DatabaseProjectCustomTag setValue(String value) {
+ this.value = value;
+ return this;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseProjectCustomTag that = (DatabaseProjectCustomTag) o;
+ return Objects.equals(key, that.key) && Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(key, value);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseProjectCustomTag.class)
+ .add("key", key)
+ .add("value", value)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectDefaultEndpointSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectDefaultEndpointSettings.java
new file mode 100755
index 000000000..699992cae
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectDefaultEndpointSettings.java
@@ -0,0 +1,115 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Map;
+import java.util.Objects;
+
+/** A collection of settings for a database endpoint. */
+@Generated
+public class DatabaseProjectDefaultEndpointSettings {
+ /** The maximum number of Compute Units. */
+ @JsonProperty("autoscaling_limit_max_cu")
+ private Double autoscalingLimitMaxCu;
+
+ /** The minimum number of Compute Units. */
+ @JsonProperty("autoscaling_limit_min_cu")
+ private Double autoscalingLimitMinCu;
+
+ /** A raw representation of Postgres settings. */
+ @JsonProperty("pg_settings")
+ private Map pgSettings;
+
+ /** A raw representation of PgBouncer settings. */
+ @JsonProperty("pgbouncer_settings")
+ private Map pgbouncerSettings;
+
+ /** Duration of inactivity after which the compute endpoint is automatically suspended. */
+ @JsonProperty("suspend_timeout_duration")
+ private String suspendTimeoutDuration;
+
+ public DatabaseProjectDefaultEndpointSettings setAutoscalingLimitMaxCu(
+ Double autoscalingLimitMaxCu) {
+ this.autoscalingLimitMaxCu = autoscalingLimitMaxCu;
+ return this;
+ }
+
+ public Double getAutoscalingLimitMaxCu() {
+ return autoscalingLimitMaxCu;
+ }
+
+ public DatabaseProjectDefaultEndpointSettings setAutoscalingLimitMinCu(
+ Double autoscalingLimitMinCu) {
+ this.autoscalingLimitMinCu = autoscalingLimitMinCu;
+ return this;
+ }
+
+ public Double getAutoscalingLimitMinCu() {
+ return autoscalingLimitMinCu;
+ }
+
+ public DatabaseProjectDefaultEndpointSettings setPgSettings(Map pgSettings) {
+ this.pgSettings = pgSettings;
+ return this;
+ }
+
+ public Map getPgSettings() {
+ return pgSettings;
+ }
+
+ public DatabaseProjectDefaultEndpointSettings setPgbouncerSettings(
+ Map pgbouncerSettings) {
+ this.pgbouncerSettings = pgbouncerSettings;
+ return this;
+ }
+
+ public Map getPgbouncerSettings() {
+ return pgbouncerSettings;
+ }
+
+ public DatabaseProjectDefaultEndpointSettings setSuspendTimeoutDuration(
+ String suspendTimeoutDuration) {
+ this.suspendTimeoutDuration = suspendTimeoutDuration;
+ return this;
+ }
+
+ public String getSuspendTimeoutDuration() {
+ return suspendTimeoutDuration;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseProjectDefaultEndpointSettings that = (DatabaseProjectDefaultEndpointSettings) o;
+ return Objects.equals(autoscalingLimitMaxCu, that.autoscalingLimitMaxCu)
+ && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu)
+ && Objects.equals(pgSettings, that.pgSettings)
+ && Objects.equals(pgbouncerSettings, that.pgbouncerSettings)
+ && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ autoscalingLimitMaxCu,
+ autoscalingLimitMinCu,
+ pgSettings,
+ pgbouncerSettings,
+ suspendTimeoutDuration);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseProjectDefaultEndpointSettings.class)
+ .add("autoscalingLimitMaxCu", autoscalingLimitMaxCu)
+ .add("autoscalingLimitMinCu", autoscalingLimitMinCu)
+ .add("pgSettings", pgSettings)
+ .add("pgbouncerSettings", pgbouncerSettings)
+ .add("suspendTimeoutDuration", suspendTimeoutDuration)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectImpl.java
new file mode 100755
index 000000000..bf9ff3aa4
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectImpl.java
@@ -0,0 +1,260 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of DatabaseProject */
+@Generated
+class DatabaseProjectImpl implements DatabaseProjectService {
+ private final ApiClient apiClient;
+
+ public DatabaseProjectImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public DatabaseBranch createDatabaseBranch(CreateDatabaseBranchRequest request) {
+ String path = String.format("/api/2.0/database/projects/%s/branches", request.getProjectId());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getDatabaseBranch()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseBranch.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DatabaseEndpoint createDatabaseEndpoint(CreateDatabaseEndpointRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/database/projects/%s/branches/%s/endpoints",
+ request.getProjectId(), request.getBranchId());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getDatabaseEndpoint()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseEndpoint.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DatabaseProject createDatabaseProject(CreateDatabaseProjectRequest request) {
+ String path = "/api/2.0/database/projects";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getDatabaseProject()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseProject.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteDatabaseBranch(DeleteDatabaseBranchRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/database/projects/%s/branches/%s",
+ request.getProjectId(), request.getBranchId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteDatabaseEndpoint(DeleteDatabaseEndpointRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/database/projects/%s/branches/%s/endpoints/%s",
+ request.getProjectId(), request.getBranchId(), request.getEndpointId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteDatabaseProject(DeleteDatabaseProjectRequest request) {
+ String path = String.format("/api/2.0/database/projects/%s", request.getProjectId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DatabaseBranch getDatabaseBranch(GetDatabaseBranchRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/database/projects/%s/branches/%s",
+ request.getProjectId(), request.getBranchId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, DatabaseBranch.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DatabaseEndpoint getDatabaseEndpoint(GetDatabaseEndpointRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/database/projects/%s/branches/%s/endpoints/%s",
+ request.getProjectId(), request.getBranchId(), request.getEndpointId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, DatabaseEndpoint.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DatabaseProject getDatabaseProject(GetDatabaseProjectRequest request) {
+ String path = String.format("/api/2.0/database/projects/%s", request.getProjectId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, DatabaseProject.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListDatabaseBranchesResponse listDatabaseBranches(ListDatabaseBranchesRequest request) {
+ String path = String.format("/api/2.0/database/projects/%s/branches", request.getProjectId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListDatabaseBranchesResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListDatabaseEndpointsResponse listDatabaseEndpoints(ListDatabaseEndpointsRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/database/projects/%s/branches/%s/endpoints",
+ request.getProjectId(), request.getBranchId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListDatabaseEndpointsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListDatabaseProjectsResponse listDatabaseProjects(ListDatabaseProjectsRequest request) {
+ String path = "/api/2.0/database/projects";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListDatabaseProjectsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DatabaseEndpoint restartDatabaseEndpoint(RestartDatabaseEndpointRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/database/projects/%s/branches/%s/endpoints/%s/restart",
+ request.getProjectId(), request.getBranchId(), request.getEndpointId());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseEndpoint.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DatabaseBranch updateDatabaseBranch(UpdateDatabaseBranchRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/database/projects/%s/branches/%s",
+ request.getProjectId(), request.getBranchId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getDatabaseBranch()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseBranch.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DatabaseEndpoint updateDatabaseEndpoint(UpdateDatabaseEndpointRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/database/projects/%s/branches/%s/endpoints/%s",
+ request.getProjectId(), request.getBranchId(), request.getEndpointId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getDatabaseEndpoint()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseEndpoint.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DatabaseProject updateDatabaseProject(UpdateDatabaseProjectRequest request) {
+ String path = String.format("/api/2.0/database/projects/%s", request.getProjectId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getDatabaseProject()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseProject.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectService.java
new file mode 100755
index 000000000..cd2a49bd0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectService.java
@@ -0,0 +1,68 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Database Projects provide access to a database via REST API or direct SQL.
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface DatabaseProjectService {
+ /** Create a Database Branch. */
+ DatabaseBranch createDatabaseBranch(CreateDatabaseBranchRequest createDatabaseBranchRequest);
+
+ /** Create a Database Endpoint. */
+ DatabaseEndpoint createDatabaseEndpoint(
+ CreateDatabaseEndpointRequest createDatabaseEndpointRequest);
+
+ /** Create a Database Project. */
+ DatabaseProject createDatabaseProject(CreateDatabaseProjectRequest createDatabaseProjectRequest);
+
+ /** Delete a Database Branch. */
+ void deleteDatabaseBranch(DeleteDatabaseBranchRequest deleteDatabaseBranchRequest);
+
+ /** Delete a Database Endpoint. */
+ void deleteDatabaseEndpoint(DeleteDatabaseEndpointRequest deleteDatabaseEndpointRequest);
+
+ /** Delete a Database Project. */
+ void deleteDatabaseProject(DeleteDatabaseProjectRequest deleteDatabaseProjectRequest);
+
+ /** Get a Database Branch. */
+ DatabaseBranch getDatabaseBranch(GetDatabaseBranchRequest getDatabaseBranchRequest);
+
+ /** Get a Database Endpoint. */
+ DatabaseEndpoint getDatabaseEndpoint(GetDatabaseEndpointRequest getDatabaseEndpointRequest);
+
+ /** Get a Database Project. */
+ DatabaseProject getDatabaseProject(GetDatabaseProjectRequest getDatabaseProjectRequest);
+
+ /** List Database Branches. */
+ ListDatabaseBranchesResponse listDatabaseBranches(
+ ListDatabaseBranchesRequest listDatabaseBranchesRequest);
+
+ /** List Database Endpoints. */
+ ListDatabaseEndpointsResponse listDatabaseEndpoints(
+ ListDatabaseEndpointsRequest listDatabaseEndpointsRequest);
+
+ /** List Database Instances. */
+ ListDatabaseProjectsResponse listDatabaseProjects(
+ ListDatabaseProjectsRequest listDatabaseProjectsRequest);
+
+ /** Restart a Database Endpoint. TODO: should return databricks.longrunning.Operation */
+ DatabaseEndpoint restartDatabaseEndpoint(
+ RestartDatabaseEndpointRequest restartDatabaseEndpointRequest);
+
+ /** Update a Database Branch. */
+ DatabaseBranch updateDatabaseBranch(UpdateDatabaseBranchRequest updateDatabaseBranchRequest);
+
+ /** Update a Database Endpoint. TODO: should return databricks.longrunning.Operation { */
+ DatabaseEndpoint updateDatabaseEndpoint(
+ UpdateDatabaseEndpointRequest updateDatabaseEndpointRequest);
+
+ /** Update a Database Project. */
+ DatabaseProject updateDatabaseProject(UpdateDatabaseProjectRequest updateDatabaseProjectRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectSettings.java
new file mode 100755
index 000000000..bf18dcd42
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectSettings.java
@@ -0,0 +1,47 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DatabaseProjectSettings {
+ /**
+ * Sets wal_level=logical for all compute endpoints in this project. All active endpoints will be
+ * suspended. Once enabled, logical replication cannot be disabled.
+ */
+ @JsonProperty("enable_logical_replication")
+ private Boolean enableLogicalReplication;
+
+ public DatabaseProjectSettings setEnableLogicalReplication(Boolean enableLogicalReplication) {
+ this.enableLogicalReplication = enableLogicalReplication;
+ return this;
+ }
+
+ public Boolean getEnableLogicalReplication() {
+ return enableLogicalReplication;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseProjectSettings that = (DatabaseProjectSettings) o;
+ return Objects.equals(enableLogicalReplication, that.enableLogicalReplication);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(enableLogicalReplication);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseProjectSettings.class)
+ .add("enableLogicalReplication", enableLogicalReplication)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
index 4f6d40276..182ba0435 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
@@ -49,6 +49,10 @@ void deleteDatabaseInstanceRole(
/** Delete a Synced Database Table. */
void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest deleteSyncedDatabaseTableRequest);
+ /** Failover the primary node of a Database Instance to a secondary. */
+ DatabaseInstance failoverDatabaseInstance(
+ FailoverDatabaseInstanceRequest failoverDatabaseInstanceRequest);
+
/** Find a Database Instance by uid. */
DatabaseInstance findDatabaseInstanceByUid(
FindDatabaseInstanceByUidRequest findDatabaseInstanceByUidRequest);
@@ -102,6 +106,10 @@ ListSyncedDatabaseTablesResponse listSyncedDatabaseTables(
DatabaseInstance updateDatabaseInstance(
UpdateDatabaseInstanceRequest updateDatabaseInstanceRequest);
+ /** Update a role for a Database Instance. */
+ DatabaseInstanceRole updateDatabaseInstanceRole(
+ UpdateDatabaseInstanceRoleRequest updateDatabaseInstanceRoleRequest);
+
/** This API is currently unimplemented, but exposed for Terraform support. */
SyncedDatabaseTable updateSyncedDatabaseTable(
UpdateSyncedDatabaseTableRequest updateSyncedDatabaseTableRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
index 8bae3d07a..42e127417 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
@@ -37,6 +37,10 @@ public class DatabaseTable {
@JsonProperty("name")
private String name;
+ /** Data serving REST API URL for this table */
+ @JsonProperty("table_serving_url")
+ private String tableServingUrl;
+
public DatabaseTable setDatabaseInstanceName(String databaseInstanceName) {
this.databaseInstanceName = databaseInstanceName;
return this;
@@ -64,6 +68,15 @@ public String getName() {
return name;
}
+ public DatabaseTable setTableServingUrl(String tableServingUrl) {
+ this.tableServingUrl = tableServingUrl;
+ return this;
+ }
+
+ public String getTableServingUrl() {
+ return tableServingUrl;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -71,12 +84,13 @@ public boolean equals(Object o) {
DatabaseTable that = (DatabaseTable) o;
return Objects.equals(databaseInstanceName, that.databaseInstanceName)
&& Objects.equals(logicalDatabaseName, that.logicalDatabaseName)
- && Objects.equals(name, that.name);
+ && Objects.equals(name, that.name)
+ && Objects.equals(tableServingUrl, that.tableServingUrl);
}
@Override
public int hashCode() {
- return Objects.hash(databaseInstanceName, logicalDatabaseName, name);
+ return Objects.hash(databaseInstanceName, logicalDatabaseName, name, tableServingUrl);
}
@Override
@@ -85,6 +99,7 @@ public String toString() {
.add("databaseInstanceName", databaseInstanceName)
.add("logicalDatabaseName", logicalDatabaseName)
.add("name", name)
+ .add("tableServingUrl", tableServingUrl)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseBranchRequest.java
new file mode 100755
index 000000000..895faa2a1
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseBranchRequest.java
@@ -0,0 +1,56 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteDatabaseBranchRequest {
+ /** */
+ @JsonIgnore private String branchId;
+
+ /** */
+ @JsonIgnore private String projectId;
+
+ public DeleteDatabaseBranchRequest setBranchId(String branchId) {
+ this.branchId = branchId;
+ return this;
+ }
+
+ public String getBranchId() {
+ return branchId;
+ }
+
+ public DeleteDatabaseBranchRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDatabaseBranchRequest that = (DeleteDatabaseBranchRequest) o;
+ return Objects.equals(branchId, that.branchId) && Objects.equals(projectId, that.projectId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(branchId, projectId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDatabaseBranchRequest.class)
+ .add("branchId", branchId)
+ .add("projectId", projectId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseEndpointRequest.java
new file mode 100755
index 000000000..4d123c6f4
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseEndpointRequest.java
@@ -0,0 +1,71 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteDatabaseEndpointRequest {
+ /** */
+ @JsonIgnore private String branchId;
+
+ /** */
+ @JsonIgnore private String endpointId;
+
+ /** */
+ @JsonIgnore private String projectId;
+
+ public DeleteDatabaseEndpointRequest setBranchId(String branchId) {
+ this.branchId = branchId;
+ return this;
+ }
+
+ public String getBranchId() {
+ return branchId;
+ }
+
+ public DeleteDatabaseEndpointRequest setEndpointId(String endpointId) {
+ this.endpointId = endpointId;
+ return this;
+ }
+
+ public String getEndpointId() {
+ return endpointId;
+ }
+
+ public DeleteDatabaseEndpointRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDatabaseEndpointRequest that = (DeleteDatabaseEndpointRequest) o;
+ return Objects.equals(branchId, that.branchId)
+ && Objects.equals(endpointId, that.endpointId)
+ && Objects.equals(projectId, that.projectId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(branchId, endpointId, projectId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDatabaseEndpointRequest.class)
+ .add("branchId", branchId)
+ .add("endpointId", endpointId)
+ .add("projectId", projectId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseProjectRequest.java
new file mode 100755
index 000000000..557cef9f7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseProjectRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteDatabaseProjectRequest {
+ /** */
+ @JsonIgnore private String projectId;
+
+ public DeleteDatabaseProjectRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDatabaseProjectRequest that = (DeleteDatabaseProjectRequest) o;
+ return Objects.equals(projectId, that.projectId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(projectId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDatabaseProjectRequest.class)
+ .add("projectId", projectId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FailoverDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FailoverDatabaseInstanceRequest.java
new file mode 100755
index 000000000..24cda6829
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FailoverDatabaseInstanceRequest.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class FailoverDatabaseInstanceRequest {
+ /** */
+ @JsonProperty("failover_target_database_instance_name")
+ private String failoverTargetDatabaseInstanceName;
+
+ /** Name of the instance to failover. */
+ @JsonIgnore private String name;
+
+ public FailoverDatabaseInstanceRequest setFailoverTargetDatabaseInstanceName(
+ String failoverTargetDatabaseInstanceName) {
+ this.failoverTargetDatabaseInstanceName = failoverTargetDatabaseInstanceName;
+ return this;
+ }
+
+ public String getFailoverTargetDatabaseInstanceName() {
+ return failoverTargetDatabaseInstanceName;
+ }
+
+ public FailoverDatabaseInstanceRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ FailoverDatabaseInstanceRequest that = (FailoverDatabaseInstanceRequest) o;
+ return Objects.equals(
+ failoverTargetDatabaseInstanceName, that.failoverTargetDatabaseInstanceName)
+ && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(failoverTargetDatabaseInstanceName, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(FailoverDatabaseInstanceRequest.class)
+ .add("failoverTargetDatabaseInstanceName", failoverTargetDatabaseInstanceName)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseBranchRequest.java
new file mode 100755
index 000000000..d695581dc
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseBranchRequest.java
@@ -0,0 +1,56 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetDatabaseBranchRequest {
+ /** */
+ @JsonIgnore private String branchId;
+
+ /** */
+ @JsonIgnore private String projectId;
+
+ public GetDatabaseBranchRequest setBranchId(String branchId) {
+ this.branchId = branchId;
+ return this;
+ }
+
+ public String getBranchId() {
+ return branchId;
+ }
+
+ public GetDatabaseBranchRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetDatabaseBranchRequest that = (GetDatabaseBranchRequest) o;
+ return Objects.equals(branchId, that.branchId) && Objects.equals(projectId, that.projectId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(branchId, projectId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetDatabaseBranchRequest.class)
+ .add("branchId", branchId)
+ .add("projectId", projectId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseEndpointRequest.java
new file mode 100755
index 000000000..a0c8e44a3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseEndpointRequest.java
@@ -0,0 +1,71 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetDatabaseEndpointRequest {
+ /** */
+ @JsonIgnore private String branchId;
+
+ /** */
+ @JsonIgnore private String endpointId;
+
+ /** */
+ @JsonIgnore private String projectId;
+
+ public GetDatabaseEndpointRequest setBranchId(String branchId) {
+ this.branchId = branchId;
+ return this;
+ }
+
+ public String getBranchId() {
+ return branchId;
+ }
+
+ public GetDatabaseEndpointRequest setEndpointId(String endpointId) {
+ this.endpointId = endpointId;
+ return this;
+ }
+
+ public String getEndpointId() {
+ return endpointId;
+ }
+
+ public GetDatabaseEndpointRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetDatabaseEndpointRequest that = (GetDatabaseEndpointRequest) o;
+ return Objects.equals(branchId, that.branchId)
+ && Objects.equals(endpointId, that.endpointId)
+ && Objects.equals(projectId, that.projectId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(branchId, endpointId, projectId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetDatabaseEndpointRequest.class)
+ .add("branchId", branchId)
+ .add("endpointId", endpointId)
+ .add("projectId", projectId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseProjectRequest.java
new file mode 100755
index 000000000..21c5cc5b5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseProjectRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetDatabaseProjectRequest {
+ /** */
+ @JsonIgnore private String projectId;
+
+ public GetDatabaseProjectRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetDatabaseProjectRequest that = (GetDatabaseProjectRequest) o;
+ return Objects.equals(projectId, that.projectId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(projectId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetDatabaseProjectRequest.class).add("projectId", projectId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesRequest.java
new file mode 100755
index 000000000..15d795165
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesRequest.java
@@ -0,0 +1,78 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListDatabaseBranchesRequest {
+ /** Upper bound for items returned. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * Pagination token to go to the next page of Database Branches. Requests first page if absent.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ /** */
+ @JsonIgnore private String projectId;
+
+ public ListDatabaseBranchesRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListDatabaseBranchesRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public ListDatabaseBranchesRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDatabaseBranchesRequest that = (ListDatabaseBranchesRequest) o;
+ return Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(projectId, that.projectId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken, projectId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDatabaseBranchesRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .add("projectId", projectId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesResponse.java
new file mode 100755
index 000000000..6ffedd7a8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListDatabaseBranchesResponse {
+ /** List of branches. */
+ @JsonProperty("database_branches")
+ private Collection databaseBranches;
+
+ /** Pagination token to request the next page of instances. */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListDatabaseBranchesResponse setDatabaseBranches(
+ Collection databaseBranches) {
+ this.databaseBranches = databaseBranches;
+ return this;
+ }
+
+ public Collection getDatabaseBranches() {
+ return databaseBranches;
+ }
+
+ public ListDatabaseBranchesResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDatabaseBranchesResponse that = (ListDatabaseBranchesResponse) o;
+ return Objects.equals(databaseBranches, that.databaseBranches)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseBranches, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDatabaseBranchesResponse.class)
+ .add("databaseBranches", databaseBranches)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsRequest.java
new file mode 100755
index 000000000..7702ea0b2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsRequest.java
@@ -0,0 +1,92 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListDatabaseEndpointsRequest {
+ /** */
+ @JsonIgnore private String branchId;
+
+ /** Upper bound for items returned. If specified must be at least 10. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * Pagination token to go to the next page of Database Endpoints. Requests first page if absent.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ /** */
+ @JsonIgnore private String projectId;
+
+ public ListDatabaseEndpointsRequest setBranchId(String branchId) {
+ this.branchId = branchId;
+ return this;
+ }
+
+ public String getBranchId() {
+ return branchId;
+ }
+
+ public ListDatabaseEndpointsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListDatabaseEndpointsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public ListDatabaseEndpointsRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDatabaseEndpointsRequest that = (ListDatabaseEndpointsRequest) o;
+ return Objects.equals(branchId, that.branchId)
+ && Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(projectId, that.projectId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(branchId, pageSize, pageToken, projectId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDatabaseEndpointsRequest.class)
+ .add("branchId", branchId)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .add("projectId", projectId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsResponse.java
new file mode 100755
index 000000000..331b1615a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListDatabaseEndpointsResponse {
+ /** List of endpoints. */
+ @JsonProperty("database_endpoints")
+ private Collection databaseEndpoints;
+
+ /** Pagination token to request the next page of instances. */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListDatabaseEndpointsResponse setDatabaseEndpoints(
+ Collection databaseEndpoints) {
+ this.databaseEndpoints = databaseEndpoints;
+ return this;
+ }
+
+ public Collection getDatabaseEndpoints() {
+ return databaseEndpoints;
+ }
+
+ public ListDatabaseEndpointsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDatabaseEndpointsResponse that = (ListDatabaseEndpointsResponse) o;
+ return Objects.equals(databaseEndpoints, that.databaseEndpoints)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseEndpoints, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDatabaseEndpointsResponse.class)
+ .add("databaseEndpoints", databaseEndpoints)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsRequest.java
new file mode 100755
index 000000000..e29f7d6e4
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsRequest.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListDatabaseProjectsRequest {
+ /** Upper bound for items returned. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * Pagination token to go to the next page of Database Projects. Requests first page if absent.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListDatabaseProjectsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListDatabaseProjectsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDatabaseProjectsRequest that = (ListDatabaseProjectsRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDatabaseProjectsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsResponse.java
new file mode 100755
index 000000000..ab8825496
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListDatabaseProjectsResponse {
+ /** List of projects. */
+ @JsonProperty("database_projects")
+ private Collection databaseProjects;
+
+ /** Pagination token to request the next page of instances. */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListDatabaseProjectsResponse setDatabaseProjects(
+ Collection databaseProjects) {
+ this.databaseProjects = databaseProjects;
+ return this;
+ }
+
+ public Collection getDatabaseProjects() {
+ return databaseProjects;
+ }
+
+ public ListDatabaseProjectsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDatabaseProjectsResponse that = (ListDatabaseProjectsResponse) o;
+ return Objects.equals(databaseProjects, that.databaseProjects)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseProjects, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDatabaseProjectsResponse.class)
+ .add("databaseProjects", databaseProjects)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java
index f3a3befad..8b9f1fd19 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java
@@ -13,6 +13,10 @@
*/
@Generated
public class NewPipelineSpec {
+ /** Budget policy of this pipeline. */
+ @JsonProperty("budget_policy_id")
+ private String budgetPolicyId;
+
/**
* This field needs to be specified if the destination catalog is a managed postgres catalog.
*
@@ -31,6 +35,15 @@ public class NewPipelineSpec {
@JsonProperty("storage_schema")
private String storageSchema;
+ public NewPipelineSpec setBudgetPolicyId(String budgetPolicyId) {
+ this.budgetPolicyId = budgetPolicyId;
+ return this;
+ }
+
+ public String getBudgetPolicyId() {
+ return budgetPolicyId;
+ }
+
public NewPipelineSpec setStorageCatalog(String storageCatalog) {
this.storageCatalog = storageCatalog;
return this;
@@ -54,18 +67,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
NewPipelineSpec that = (NewPipelineSpec) o;
- return Objects.equals(storageCatalog, that.storageCatalog)
+ return Objects.equals(budgetPolicyId, that.budgetPolicyId)
+ && Objects.equals(storageCatalog, that.storageCatalog)
&& Objects.equals(storageSchema, that.storageSchema);
}
@Override
public int hashCode() {
- return Objects.hash(storageCatalog, storageSchema);
+ return Objects.hash(budgetPolicyId, storageCatalog, storageSchema);
}
@Override
public String toString() {
return new ToStringer(NewPipelineSpec.class)
+ .add("budgetPolicyId", budgetPolicyId)
.add("storageCatalog", storageCatalog)
.add("storageSchema", storageSchema)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RestartDatabaseEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RestartDatabaseEndpointRequest.java
new file mode 100755
index 000000000..88ebd0a59
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RestartDatabaseEndpointRequest.java
@@ -0,0 +1,71 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class RestartDatabaseEndpointRequest {
+ /** */
+ @JsonIgnore private String branchId;
+
+ /** */
+ @JsonIgnore private String endpointId;
+
+ /** */
+ @JsonIgnore private String projectId;
+
+ public RestartDatabaseEndpointRequest setBranchId(String branchId) {
+ this.branchId = branchId;
+ return this;
+ }
+
+ public String getBranchId() {
+ return branchId;
+ }
+
+ public RestartDatabaseEndpointRequest setEndpointId(String endpointId) {
+ this.endpointId = endpointId;
+ return this;
+ }
+
+ public String getEndpointId() {
+ return endpointId;
+ }
+
+ public RestartDatabaseEndpointRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RestartDatabaseEndpointRequest that = (RestartDatabaseEndpointRequest) o;
+ return Objects.equals(branchId, that.branchId)
+ && Objects.equals(endpointId, that.endpointId)
+ && Objects.equals(projectId, that.projectId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(branchId, endpointId, projectId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RestartDatabaseEndpointRequest.class)
+ .add("branchId", branchId)
+ .add("endpointId", endpointId)
+ .add("projectId", projectId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java
index 090724e72..adbd3bece 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java
@@ -7,13 +7,17 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Next field marker: 14 */
+/** Next field marker: 18 */
@Generated
public class SyncedDatabaseTable {
/** Synced Table data synchronization status */
@JsonProperty("data_synchronization_status")
private SyncedTableStatus dataSynchronizationStatus;
+ /** The branch_id of the database branch associated with the table. */
+ @JsonProperty("database_branch_id")
+ private String databaseBranchId;
+
/**
* Name of the target database instance. This is required when creating synced database tables in
* standard catalogs. This is optional when creating synced database tables in registered
@@ -24,6 +28,14 @@ public class SyncedDatabaseTable {
@JsonProperty("database_instance_name")
private String databaseInstanceName;
+ /** The project_id of the database project associated with the table. */
+ @JsonProperty("database_project_id")
+ private String databaseProjectId;
+
+ /** The branch_id of the database branch associated with the table. */
+ @JsonProperty("effective_database_branch_id")
+ private String effectiveDatabaseBranchId;
+
/**
* The name of the database instance that this table is registered to. This field is always
* returned, and for tables inside database catalogs is inferred database instance associated with
@@ -32,6 +44,10 @@ public class SyncedDatabaseTable {
@JsonProperty("effective_database_instance_name")
private String effectiveDatabaseInstanceName;
+ /** The project_id of the database project associated with the table. */
+ @JsonProperty("effective_database_project_id")
+ private String effectiveDatabaseProjectId;
+
/** The name of the logical database that this table is registered to. */
@JsonProperty("effective_logical_database_name")
private String effectiveLogicalDatabaseName;
@@ -59,6 +75,10 @@ public class SyncedDatabaseTable {
@JsonProperty("spec")
private SyncedTableSpec spec;
+ /** Data serving REST API URL for this table */
+ @JsonProperty("table_serving_url")
+ private String tableServingUrl;
+
/**
* The provisioning state of the synced table entity in Unity Catalog. This is distinct from the
* state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
@@ -77,6 +97,15 @@ public SyncedTableStatus getDataSynchronizationStatus() {
return dataSynchronizationStatus;
}
+ public SyncedDatabaseTable setDatabaseBranchId(String databaseBranchId) {
+ this.databaseBranchId = databaseBranchId;
+ return this;
+ }
+
+ public String getDatabaseBranchId() {
+ return databaseBranchId;
+ }
+
public SyncedDatabaseTable setDatabaseInstanceName(String databaseInstanceName) {
this.databaseInstanceName = databaseInstanceName;
return this;
@@ -86,6 +115,24 @@ public String getDatabaseInstanceName() {
return databaseInstanceName;
}
+ public SyncedDatabaseTable setDatabaseProjectId(String databaseProjectId) {
+ this.databaseProjectId = databaseProjectId;
+ return this;
+ }
+
+ public String getDatabaseProjectId() {
+ return databaseProjectId;
+ }
+
+ public SyncedDatabaseTable setEffectiveDatabaseBranchId(String effectiveDatabaseBranchId) {
+ this.effectiveDatabaseBranchId = effectiveDatabaseBranchId;
+ return this;
+ }
+
+ public String getEffectiveDatabaseBranchId() {
+ return effectiveDatabaseBranchId;
+ }
+
public SyncedDatabaseTable setEffectiveDatabaseInstanceName(
String effectiveDatabaseInstanceName) {
this.effectiveDatabaseInstanceName = effectiveDatabaseInstanceName;
@@ -96,6 +143,15 @@ public String getEffectiveDatabaseInstanceName() {
return effectiveDatabaseInstanceName;
}
+ public SyncedDatabaseTable setEffectiveDatabaseProjectId(String effectiveDatabaseProjectId) {
+ this.effectiveDatabaseProjectId = effectiveDatabaseProjectId;
+ return this;
+ }
+
+ public String getEffectiveDatabaseProjectId() {
+ return effectiveDatabaseProjectId;
+ }
+
public SyncedDatabaseTable setEffectiveLogicalDatabaseName(String effectiveLogicalDatabaseName) {
this.effectiveLogicalDatabaseName = effectiveLogicalDatabaseName;
return this;
@@ -132,6 +188,15 @@ public SyncedTableSpec getSpec() {
return spec;
}
+ public SyncedDatabaseTable setTableServingUrl(String tableServingUrl) {
+ this.tableServingUrl = tableServingUrl;
+ return this;
+ }
+
+ public String getTableServingUrl() {
+ return tableServingUrl;
+ }
+
public SyncedDatabaseTable setUnityCatalogProvisioningState(
ProvisioningInfoState unityCatalogProvisioningState) {
this.unityCatalogProvisioningState = unityCatalogProvisioningState;
@@ -148,12 +213,17 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
SyncedDatabaseTable that = (SyncedDatabaseTable) o;
return Objects.equals(dataSynchronizationStatus, that.dataSynchronizationStatus)
+ && Objects.equals(databaseBranchId, that.databaseBranchId)
&& Objects.equals(databaseInstanceName, that.databaseInstanceName)
+ && Objects.equals(databaseProjectId, that.databaseProjectId)
+ && Objects.equals(effectiveDatabaseBranchId, that.effectiveDatabaseBranchId)
&& Objects.equals(effectiveDatabaseInstanceName, that.effectiveDatabaseInstanceName)
+ && Objects.equals(effectiveDatabaseProjectId, that.effectiveDatabaseProjectId)
&& Objects.equals(effectiveLogicalDatabaseName, that.effectiveLogicalDatabaseName)
&& Objects.equals(logicalDatabaseName, that.logicalDatabaseName)
&& Objects.equals(name, that.name)
&& Objects.equals(spec, that.spec)
+ && Objects.equals(tableServingUrl, that.tableServingUrl)
&& Objects.equals(unityCatalogProvisioningState, that.unityCatalogProvisioningState);
}
@@ -161,12 +231,17 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
dataSynchronizationStatus,
+ databaseBranchId,
databaseInstanceName,
+ databaseProjectId,
+ effectiveDatabaseBranchId,
effectiveDatabaseInstanceName,
+ effectiveDatabaseProjectId,
effectiveLogicalDatabaseName,
logicalDatabaseName,
name,
spec,
+ tableServingUrl,
unityCatalogProvisioningState);
}
@@ -174,12 +249,17 @@ public int hashCode() {
public String toString() {
return new ToStringer(SyncedDatabaseTable.class)
.add("dataSynchronizationStatus", dataSynchronizationStatus)
+ .add("databaseBranchId", databaseBranchId)
.add("databaseInstanceName", databaseInstanceName)
+ .add("databaseProjectId", databaseProjectId)
+ .add("effectiveDatabaseBranchId", effectiveDatabaseBranchId)
.add("effectiveDatabaseInstanceName", effectiveDatabaseInstanceName)
+ .add("effectiveDatabaseProjectId", effectiveDatabaseProjectId)
.add("effectiveLogicalDatabaseName", effectiveLogicalDatabaseName)
.add("logicalDatabaseName", logicalDatabaseName)
.add("name", name)
.add("spec", spec)
+ .add("tableServingUrl", tableServingUrl)
.add("unityCatalogProvisioningState", unityCatalogProvisioningState)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseBranchRequest.java
new file mode 100755
index 000000000..c907a5a08
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseBranchRequest.java
@@ -0,0 +1,90 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateDatabaseBranchRequest {
+ /** */
+ @JsonIgnore private String branchId;
+
+ /** */
+ @JsonProperty("database_branch")
+ private DatabaseBranch databaseBranch;
+
+ /** */
+ @JsonIgnore private String projectId;
+
+ /** The list of fields to update. If unspecified, all fields will be updated when possible. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateDatabaseBranchRequest setBranchId(String branchId) {
+ this.branchId = branchId;
+ return this;
+ }
+
+ public String getBranchId() {
+ return branchId;
+ }
+
+ public UpdateDatabaseBranchRequest setDatabaseBranch(DatabaseBranch databaseBranch) {
+ this.databaseBranch = databaseBranch;
+ return this;
+ }
+
+ public DatabaseBranch getDatabaseBranch() {
+ return databaseBranch;
+ }
+
+ public UpdateDatabaseBranchRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ public UpdateDatabaseBranchRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDatabaseBranchRequest that = (UpdateDatabaseBranchRequest) o;
+ return Objects.equals(branchId, that.branchId)
+ && Objects.equals(databaseBranch, that.databaseBranch)
+ && Objects.equals(projectId, that.projectId)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(branchId, databaseBranch, projectId, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDatabaseBranchRequest.class)
+ .add("branchId", branchId)
+ .add("databaseBranch", databaseBranch)
+ .add("projectId", projectId)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseEndpointRequest.java
new file mode 100755
index 000000000..a679de9f0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseEndpointRequest.java
@@ -0,0 +1,104 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateDatabaseEndpointRequest {
+ /** */
+ @JsonIgnore private String branchId;
+
+ /** */
+ @JsonProperty("database_endpoint")
+ private DatabaseEndpoint databaseEndpoint;
+
+ /** */
+ @JsonIgnore private String endpointId;
+
+ /** */
+ @JsonIgnore private String projectId;
+
+ /** The list of fields to update. If unspecified, all fields will be updated when possible. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateDatabaseEndpointRequest setBranchId(String branchId) {
+ this.branchId = branchId;
+ return this;
+ }
+
+ public String getBranchId() {
+ return branchId;
+ }
+
+ public UpdateDatabaseEndpointRequest setDatabaseEndpoint(DatabaseEndpoint databaseEndpoint) {
+ this.databaseEndpoint = databaseEndpoint;
+ return this;
+ }
+
+ public DatabaseEndpoint getDatabaseEndpoint() {
+ return databaseEndpoint;
+ }
+
+ public UpdateDatabaseEndpointRequest setEndpointId(String endpointId) {
+ this.endpointId = endpointId;
+ return this;
+ }
+
+ public String getEndpointId() {
+ return endpointId;
+ }
+
+ public UpdateDatabaseEndpointRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ public UpdateDatabaseEndpointRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDatabaseEndpointRequest that = (UpdateDatabaseEndpointRequest) o;
+ return Objects.equals(branchId, that.branchId)
+ && Objects.equals(databaseEndpoint, that.databaseEndpoint)
+ && Objects.equals(endpointId, that.endpointId)
+ && Objects.equals(projectId, that.projectId)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(branchId, databaseEndpoint, endpointId, projectId, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDatabaseEndpointRequest.class)
+ .add("branchId", branchId)
+ .add("databaseEndpoint", databaseEndpoint)
+ .add("endpointId", endpointId)
+ .add("projectId", projectId)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRoleRequest.java
new file mode 100755
index 000000000..bc2ad46c3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRoleRequest.java
@@ -0,0 +1,91 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateDatabaseInstanceRoleRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("database_instance_name")
+ private String databaseInstanceName;
+
+ /** */
+ @JsonProperty("database_instance_role")
+ private DatabaseInstanceRole databaseInstanceRole;
+
+ /** */
+ @JsonIgnore private String instanceName;
+
+ /** The name of the role. This is the unique identifier for the role in an instance. */
+ @JsonIgnore private String name;
+
+ public UpdateDatabaseInstanceRoleRequest setDatabaseInstanceName(String databaseInstanceName) {
+ this.databaseInstanceName = databaseInstanceName;
+ return this;
+ }
+
+ public String getDatabaseInstanceName() {
+ return databaseInstanceName;
+ }
+
+ public UpdateDatabaseInstanceRoleRequest setDatabaseInstanceRole(
+ DatabaseInstanceRole databaseInstanceRole) {
+ this.databaseInstanceRole = databaseInstanceRole;
+ return this;
+ }
+
+ public DatabaseInstanceRole getDatabaseInstanceRole() {
+ return databaseInstanceRole;
+ }
+
+ public UpdateDatabaseInstanceRoleRequest setInstanceName(String instanceName) {
+ this.instanceName = instanceName;
+ return this;
+ }
+
+ public String getInstanceName() {
+ return instanceName;
+ }
+
+ public UpdateDatabaseInstanceRoleRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDatabaseInstanceRoleRequest that = (UpdateDatabaseInstanceRoleRequest) o;
+ return Objects.equals(databaseInstanceName, that.databaseInstanceName)
+ && Objects.equals(databaseInstanceRole, that.databaseInstanceRole)
+ && Objects.equals(instanceName, that.instanceName)
+ && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseInstanceName, databaseInstanceRole, instanceName, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDatabaseInstanceRoleRequest.class)
+ .add("databaseInstanceName", databaseInstanceName)
+ .add("databaseInstanceRole", databaseInstanceRole)
+ .add("instanceName", instanceName)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseProjectRequest.java
new file mode 100755
index 000000000..60b4844da
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseProjectRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateDatabaseProjectRequest {
+ /** */
+ @JsonProperty("database_project")
+ private DatabaseProject databaseProject;
+
+ /** */
+ @JsonIgnore private String projectId;
+
+ /** The list of fields to update. If unspecified, all fields will be updated when possible. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateDatabaseProjectRequest setDatabaseProject(DatabaseProject databaseProject) {
+ this.databaseProject = databaseProject;
+ return this;
+ }
+
+ public DatabaseProject getDatabaseProject() {
+ return databaseProject;
+ }
+
+ public UpdateDatabaseProjectRequest setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ public UpdateDatabaseProjectRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDatabaseProjectRequest that = (UpdateDatabaseProjectRequest) o;
+ return Objects.equals(databaseProject, that.databaseProject)
+ && Objects.equals(projectId, that.projectId)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseProject, projectId, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDatabaseProjectRequest.class)
+ .add("databaseProject", databaseProject)
+ .add("projectId", projectId)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AggregationGranularity.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AggregationGranularity.java
new file mode 100755
index 000000000..1bff8d647
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AggregationGranularity.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+
+/** The granularity for aggregating data into time windows based on their timestamp. */
+@Generated
+public enum AggregationGranularity {
+ AGGREGATION_GRANULARITY_1_DAY,
+ AGGREGATION_GRANULARITY_1_HOUR,
+ AGGREGATION_GRANULARITY_1_MONTH,
+ AGGREGATION_GRANULARITY_1_WEEK,
+ AGGREGATION_GRANULARITY_1_YEAR,
+ AGGREGATION_GRANULARITY_2_WEEKS,
+ AGGREGATION_GRANULARITY_30_MINUTES,
+ AGGREGATION_GRANULARITY_3_WEEKS,
+ AGGREGATION_GRANULARITY_4_WEEKS,
+ AGGREGATION_GRANULARITY_5_MINUTES,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java
new file mode 100755
index 000000000..6b99c71fa
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Anomaly Detection Configurations. */
+@Generated
+public class AnomalyDetectionConfig {
+ /**
+ * The id of the workflow that detects the anomaly. This field will only be returned in the
+ * Get/Update response, if the request comes from the workspace where this anomaly detection job
+ * is created.
+ */
+ @JsonProperty("anomaly_detection_workflow_id")
+ private Long anomalyDetectionWorkflowId;
+
+ /** The type of the last run of the workflow. */
+ @JsonProperty("job_type")
+ private AnomalyDetectionJobType jobType;
+
+ /** If the health indicator should be shown. */
+ @JsonProperty("publish_health_indicator")
+ private Boolean publishHealthIndicator;
+
+ public AnomalyDetectionConfig setAnomalyDetectionWorkflowId(Long anomalyDetectionWorkflowId) {
+ this.anomalyDetectionWorkflowId = anomalyDetectionWorkflowId;
+ return this;
+ }
+
+ public Long getAnomalyDetectionWorkflowId() {
+ return anomalyDetectionWorkflowId;
+ }
+
+ public AnomalyDetectionConfig setJobType(AnomalyDetectionJobType jobType) {
+ this.jobType = jobType;
+ return this;
+ }
+
+ public AnomalyDetectionJobType getJobType() {
+ return jobType;
+ }
+
+ public AnomalyDetectionConfig setPublishHealthIndicator(Boolean publishHealthIndicator) {
+ this.publishHealthIndicator = publishHealthIndicator;
+ return this;
+ }
+
+ public Boolean getPublishHealthIndicator() {
+ return publishHealthIndicator;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AnomalyDetectionConfig that = (AnomalyDetectionConfig) o;
+ return Objects.equals(anomalyDetectionWorkflowId, that.anomalyDetectionWorkflowId)
+ && Objects.equals(jobType, that.jobType)
+ && Objects.equals(publishHealthIndicator, that.publishHealthIndicator);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(anomalyDetectionWorkflowId, jobType, publishHealthIndicator);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AnomalyDetectionConfig.class)
+ .add("anomalyDetectionWorkflowId", anomalyDetectionWorkflowId)
+ .add("jobType", jobType)
+ .add("publishHealthIndicator", publishHealthIndicator)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionJobType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionJobType.java
new file mode 100755
index 000000000..8176bb473
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionJobType.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+
+/** Anomaly Detection job type. */
+@Generated
+public enum AnomalyDetectionJobType {
+ ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN,
+ ANOMALY_DETECTION_JOB_TYPE_NORMAL,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java
new file mode 100755
index 000000000..7d24f64ad
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java
@@ -0,0 +1,72 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Request to cancel a refresh. */
+@Generated
+public class CancelRefreshRequest {
+ /** The UUID of the request object. For example, schema id. */
+ @JsonIgnore private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: `schema` or `table`. */
+ @JsonIgnore private String objectType;
+
+ /** Unique id of the refresh operation. */
+ @JsonIgnore private Long refreshId;
+
+ public CancelRefreshRequest setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public CancelRefreshRequest setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ public CancelRefreshRequest setRefreshId(Long refreshId) {
+ this.refreshId = refreshId;
+ return this;
+ }
+
+ public Long getRefreshId() {
+ return refreshId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelRefreshRequest that = (CancelRefreshRequest) o;
+ return Objects.equals(objectId, that.objectId)
+ && Objects.equals(objectType, that.objectType)
+ && Objects.equals(refreshId, that.refreshId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(objectId, objectType, refreshId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelRefreshRequest.class)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .add("refreshId", refreshId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshResponse.java
new file mode 100755
index 000000000..3f81c4845
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshResponse.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Response to cancelling a refresh. */
+@Generated
+public class CancelRefreshResponse {
+ /** The refresh to cancel. */
+ @JsonProperty("refresh")
+ private Refresh refresh;
+
+ public CancelRefreshResponse setRefresh(Refresh refresh) {
+ this.refresh = refresh;
+ return this;
+ }
+
+ public Refresh getRefresh() {
+ return refresh;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelRefreshResponse that = (CancelRefreshResponse) o;
+ return Objects.equals(refresh, that.refresh);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(refresh);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelRefreshResponse.class).add("refresh", refresh).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateMonitorRequest.java
new file mode 100755
index 000000000..b98cb431c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateMonitorRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateMonitorRequest {
+ /** The monitor to create. */
+ @JsonProperty("monitor")
+ private Monitor monitor;
+
+ public CreateMonitorRequest setMonitor(Monitor monitor) {
+ this.monitor = monitor;
+ return this;
+ }
+
+ public Monitor getMonitor() {
+ return monitor;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateMonitorRequest that = (CreateMonitorRequest) o;
+ return Objects.equals(monitor, that.monitor);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(monitor);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateMonitorRequest.class).add("monitor", monitor).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateRefreshRequest.java
new file mode 100755
index 000000000..25ba06bc9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateRefreshRequest.java
@@ -0,0 +1,73 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateRefreshRequest {
+ /** The UUID of the request object. For example, table id. */
+ @JsonIgnore private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: `schema`or `table`. */
+ @JsonIgnore private String objectType;
+
+ /** The refresh to create */
+ @JsonProperty("refresh")
+ private Refresh refresh;
+
+ public CreateRefreshRequest setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public CreateRefreshRequest setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ public CreateRefreshRequest setRefresh(Refresh refresh) {
+ this.refresh = refresh;
+ return this;
+ }
+
+ public Refresh getRefresh() {
+ return refresh;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateRefreshRequest that = (CreateRefreshRequest) o;
+ return Objects.equals(objectId, that.objectId)
+ && Objects.equals(objectType, that.objectType)
+ && Objects.equals(refresh, that.refresh);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(objectId, objectType, refresh);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateRefreshRequest.class)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .add("refresh", refresh)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedule.java
new file mode 100755
index 000000000..72f07e684
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedule.java
@@ -0,0 +1,85 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** The data quality monitoring workflow cron schedule. */
+@Generated
+public class CronSchedule {
+ /** Read only field that indicates whether the schedule is paused or not. */
+ @JsonProperty("pause_status")
+ private CronSchedulePauseStatus pauseStatus;
+
+ /**
+ * The expression that determines when to run the monitor. See [examples].
+ *
+ * [examples]:
+ * https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html
+ */
+ @JsonProperty("quartz_cron_expression")
+ private String quartzCronExpression;
+
+ /**
+ * A Java timezone id. The schedule for a job will be resolved with respect to this timezone. See
+ * `Java TimeZone `_ for
+ * details. The timezone id (e.g., ``America/Los_Angeles``) in which to evaluate the quartz
+ * expression.
+ */
+ @JsonProperty("timezone_id")
+ private String timezoneId;
+
+ public CronSchedule setPauseStatus(CronSchedulePauseStatus pauseStatus) {
+ this.pauseStatus = pauseStatus;
+ return this;
+ }
+
+ public CronSchedulePauseStatus getPauseStatus() {
+ return pauseStatus;
+ }
+
+ public CronSchedule setQuartzCronExpression(String quartzCronExpression) {
+ this.quartzCronExpression = quartzCronExpression;
+ return this;
+ }
+
+ public String getQuartzCronExpression() {
+ return quartzCronExpression;
+ }
+
+ public CronSchedule setTimezoneId(String timezoneId) {
+ this.timezoneId = timezoneId;
+ return this;
+ }
+
+ public String getTimezoneId() {
+ return timezoneId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CronSchedule that = (CronSchedule) o;
+ return Objects.equals(pauseStatus, that.pauseStatus)
+ && Objects.equals(quartzCronExpression, that.quartzCronExpression)
+ && Objects.equals(timezoneId, that.timezoneId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pauseStatus, quartzCronExpression, timezoneId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CronSchedule.class)
+ .add("pauseStatus", pauseStatus)
+ .add("quartzCronExpression", quartzCronExpression)
+ .add("timezoneId", timezoneId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedulePauseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedulePauseStatus.java
new file mode 100755
index 000000000..55d5db475
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedulePauseStatus.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+
+/** The data quality monitoring workflow cron schedule pause status. */
+@Generated
+public enum CronSchedulePauseStatus {
+ CRON_SCHEDULE_PAUSE_STATUS_PAUSED,
+ CRON_SCHEDULE_PAUSE_STATUS_UNPAUSED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingConfig.java
new file mode 100755
index 000000000..24c38e762
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingConfig.java
@@ -0,0 +1,373 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Data Profiling Configurations. */
+@Generated
+public class DataProfilingConfig {
+ /**
+ * Field for specifying the absolute path to a custom directory to store data-monitoring assets.
+ * Normally prepopulated to a default user location via UI and Python APIs.
+ */
+ @JsonProperty("assets_dir")
+ private String assetsDir;
+
+ /**
+ * Baseline table name. Baseline data is used to compute drift from the data in the monitored
+ * `table_name`. The baseline table and the monitored table shall have the same schema.
+ */
+ @JsonProperty("baseline_table_name")
+ private String baselineTableName;
+
+ /** Custom metrics. */
+ @JsonProperty("custom_metrics")
+ private Collection customMetrics;
+
+ /**
+ * Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in
+ * PENDING state.
+ */
+ @JsonProperty("dashboard_id")
+ private String dashboardId;
+
+ /** Table that stores drift metrics data. Format: `catalog.schema.table_name`. */
+ @JsonProperty("drift_metrics_table_name")
+ private String driftMetricsTableName;
+
+ /** The warehouse for dashboard creation */
+ @JsonProperty("effective_warehouse_id")
+ private String effectiveWarehouseId;
+
+ /** Configuration for monitoring inference log tables. */
+ @JsonProperty("inference_log")
+ private InferenceLogConfig inferenceLog;
+
+ /** The latest error message for a monitor failure. */
+ @JsonProperty("latest_monitor_failure_message")
+ private String latestMonitorFailureMessage;
+
+ /**
+ * Represents the current monitor configuration version in use. The version will be represented in
+ * a numeric fashion (1,2,3...). The field has flexibility to take on negative values, which can
+ * indicate corrupted monitor_version numbers.
+ */
+ @JsonProperty("monitor_version")
+ private Long monitorVersion;
+
+ /** Unity Catalog table to monitor. Format: `catalog.schema.table_name` */
+ @JsonProperty("monitored_table_name")
+ private String monitoredTableName;
+
+ /** Field for specifying notification settings. */
+ @JsonProperty("notification_settings")
+ private NotificationSettings notificationSettings;
+
+ /** ID of the schema where output tables are created. */
+ @JsonProperty("output_schema_id")
+ private String outputSchemaId;
+
+ /** Table that stores profile metrics data. Format: `catalog.schema.table_name`. */
+ @JsonProperty("profile_metrics_table_name")
+ private String profileMetricsTableName;
+
+ /** The cron schedule. */
+ @JsonProperty("schedule")
+ private CronSchedule schedule;
+
+ /** Whether to skip creating a default dashboard summarizing data quality metrics. */
+ @JsonProperty("skip_builtin_dashboard")
+ private Boolean skipBuiltinDashboard;
+
+ /**
+ * List of column expressions to slice data with for targeted analysis. The data is grouped by
+ * each expression independently, resulting in a separate slice for each predicate and its
+ * complements. For example `slicing_exprs=[“col_1”, “col_2 > 10”]` will generate the following
+ * slices: two slices for `col_2 > 10` (True and False), and one slice per unique value in `col1`.
+ * For high-cardinality columns, only the top 100 unique values by frequency will generate slices.
+ */
+ @JsonProperty("slicing_exprs")
+ private Collection slicingExprs;
+
+ /** Configuration for monitoring snapshot tables. */
+ @JsonProperty("snapshot")
+ private SnapshotConfig snapshot;
+
+ /** The data profiling monitor status. */
+ @JsonProperty("status")
+ private DataProfilingStatus status;
+
+ /** Configuration for monitoring time series tables. */
+ @JsonProperty("time_series")
+ private TimeSeriesConfig timeSeries;
+
+ /**
+ * Optional argument to specify the warehouse for dashboard creation. If not specified, the first
+ * running warehouse will be used.
+ */
+ @JsonProperty("warehouse_id")
+ private String warehouseId;
+
+ public DataProfilingConfig setAssetsDir(String assetsDir) {
+ this.assetsDir = assetsDir;
+ return this;
+ }
+
+ public String getAssetsDir() {
+ return assetsDir;
+ }
+
+ public DataProfilingConfig setBaselineTableName(String baselineTableName) {
+ this.baselineTableName = baselineTableName;
+ return this;
+ }
+
+ public String getBaselineTableName() {
+ return baselineTableName;
+ }
+
+ public DataProfilingConfig setCustomMetrics(Collection customMetrics) {
+ this.customMetrics = customMetrics;
+ return this;
+ }
+
+ public Collection getCustomMetrics() {
+ return customMetrics;
+ }
+
+ public DataProfilingConfig setDashboardId(String dashboardId) {
+ this.dashboardId = dashboardId;
+ return this;
+ }
+
+ public String getDashboardId() {
+ return dashboardId;
+ }
+
+ public DataProfilingConfig setDriftMetricsTableName(String driftMetricsTableName) {
+ this.driftMetricsTableName = driftMetricsTableName;
+ return this;
+ }
+
+ public String getDriftMetricsTableName() {
+ return driftMetricsTableName;
+ }
+
+ public DataProfilingConfig setEffectiveWarehouseId(String effectiveWarehouseId) {
+ this.effectiveWarehouseId = effectiveWarehouseId;
+ return this;
+ }
+
+ public String getEffectiveWarehouseId() {
+ return effectiveWarehouseId;
+ }
+
+ public DataProfilingConfig setInferenceLog(InferenceLogConfig inferenceLog) {
+ this.inferenceLog = inferenceLog;
+ return this;
+ }
+
+ public InferenceLogConfig getInferenceLog() {
+ return inferenceLog;
+ }
+
+ public DataProfilingConfig setLatestMonitorFailureMessage(String latestMonitorFailureMessage) {
+ this.latestMonitorFailureMessage = latestMonitorFailureMessage;
+ return this;
+ }
+
+ public String getLatestMonitorFailureMessage() {
+ return latestMonitorFailureMessage;
+ }
+
+ public DataProfilingConfig setMonitorVersion(Long monitorVersion) {
+ this.monitorVersion = monitorVersion;
+ return this;
+ }
+
+ public Long getMonitorVersion() {
+ return monitorVersion;
+ }
+
+ public DataProfilingConfig setMonitoredTableName(String monitoredTableName) {
+ this.monitoredTableName = monitoredTableName;
+ return this;
+ }
+
+ public String getMonitoredTableName() {
+ return monitoredTableName;
+ }
+
+ public DataProfilingConfig setNotificationSettings(NotificationSettings notificationSettings) {
+ this.notificationSettings = notificationSettings;
+ return this;
+ }
+
+ public NotificationSettings getNotificationSettings() {
+ return notificationSettings;
+ }
+
+ public DataProfilingConfig setOutputSchemaId(String outputSchemaId) {
+ this.outputSchemaId = outputSchemaId;
+ return this;
+ }
+
+ public String getOutputSchemaId() {
+ return outputSchemaId;
+ }
+
+ public DataProfilingConfig setProfileMetricsTableName(String profileMetricsTableName) {
+ this.profileMetricsTableName = profileMetricsTableName;
+ return this;
+ }
+
+ public String getProfileMetricsTableName() {
+ return profileMetricsTableName;
+ }
+
+ public DataProfilingConfig setSchedule(CronSchedule schedule) {
+ this.schedule = schedule;
+ return this;
+ }
+
+ public CronSchedule getSchedule() {
+ return schedule;
+ }
+
+ public DataProfilingConfig setSkipBuiltinDashboard(Boolean skipBuiltinDashboard) {
+ this.skipBuiltinDashboard = skipBuiltinDashboard;
+ return this;
+ }
+
+ public Boolean getSkipBuiltinDashboard() {
+ return skipBuiltinDashboard;
+ }
+
+ public DataProfilingConfig setSlicingExprs(Collection slicingExprs) {
+ this.slicingExprs = slicingExprs;
+ return this;
+ }
+
+ public Collection getSlicingExprs() {
+ return slicingExprs;
+ }
+
+ public DataProfilingConfig setSnapshot(SnapshotConfig snapshot) {
+ this.snapshot = snapshot;
+ return this;
+ }
+
+ public SnapshotConfig getSnapshot() {
+ return snapshot;
+ }
+
+ public DataProfilingConfig setStatus(DataProfilingStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public DataProfilingStatus getStatus() {
+ return status;
+ }
+
+ public DataProfilingConfig setTimeSeries(TimeSeriesConfig timeSeries) {
+ this.timeSeries = timeSeries;
+ return this;
+ }
+
+ public TimeSeriesConfig getTimeSeries() {
+ return timeSeries;
+ }
+
+ public DataProfilingConfig setWarehouseId(String warehouseId) {
+ this.warehouseId = warehouseId;
+ return this;
+ }
+
+ public String getWarehouseId() {
+ return warehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DataProfilingConfig that = (DataProfilingConfig) o;
+ return Objects.equals(assetsDir, that.assetsDir)
+ && Objects.equals(baselineTableName, that.baselineTableName)
+ && Objects.equals(customMetrics, that.customMetrics)
+ && Objects.equals(dashboardId, that.dashboardId)
+ && Objects.equals(driftMetricsTableName, that.driftMetricsTableName)
+ && Objects.equals(effectiveWarehouseId, that.effectiveWarehouseId)
+ && Objects.equals(inferenceLog, that.inferenceLog)
+ && Objects.equals(latestMonitorFailureMessage, that.latestMonitorFailureMessage)
+ && Objects.equals(monitorVersion, that.monitorVersion)
+ && Objects.equals(monitoredTableName, that.monitoredTableName)
+ && Objects.equals(notificationSettings, that.notificationSettings)
+ && Objects.equals(outputSchemaId, that.outputSchemaId)
+ && Objects.equals(profileMetricsTableName, that.profileMetricsTableName)
+ && Objects.equals(schedule, that.schedule)
+ && Objects.equals(skipBuiltinDashboard, that.skipBuiltinDashboard)
+ && Objects.equals(slicingExprs, that.slicingExprs)
+ && Objects.equals(snapshot, that.snapshot)
+ && Objects.equals(status, that.status)
+ && Objects.equals(timeSeries, that.timeSeries)
+ && Objects.equals(warehouseId, that.warehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ assetsDir,
+ baselineTableName,
+ customMetrics,
+ dashboardId,
+ driftMetricsTableName,
+ effectiveWarehouseId,
+ inferenceLog,
+ latestMonitorFailureMessage,
+ monitorVersion,
+ monitoredTableName,
+ notificationSettings,
+ outputSchemaId,
+ profileMetricsTableName,
+ schedule,
+ skipBuiltinDashboard,
+ slicingExprs,
+ snapshot,
+ status,
+ timeSeries,
+ warehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DataProfilingConfig.class)
+ .add("assetsDir", assetsDir)
+ .add("baselineTableName", baselineTableName)
+ .add("customMetrics", customMetrics)
+ .add("dashboardId", dashboardId)
+ .add("driftMetricsTableName", driftMetricsTableName)
+ .add("effectiveWarehouseId", effectiveWarehouseId)
+ .add("inferenceLog", inferenceLog)
+ .add("latestMonitorFailureMessage", latestMonitorFailureMessage)
+ .add("monitorVersion", monitorVersion)
+ .add("monitoredTableName", monitoredTableName)
+ .add("notificationSettings", notificationSettings)
+ .add("outputSchemaId", outputSchemaId)
+ .add("profileMetricsTableName", profileMetricsTableName)
+ .add("schedule", schedule)
+ .add("skipBuiltinDashboard", skipBuiltinDashboard)
+ .add("slicingExprs", slicingExprs)
+ .add("snapshot", snapshot)
+ .add("status", status)
+ .add("timeSeries", timeSeries)
+ .add("warehouseId", warehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetric.java
new file mode 100755
index 000000000..0fd468a79
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetric.java
@@ -0,0 +1,115 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Custom metric definition. */
+@Generated
+public class DataProfilingCustomMetric {
+ /**
+ * Jinja template for a SQL expression that specifies how to compute the metric. See [create
+ * metric definition].
+ *
+ * [create metric definition]:
+ * https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition
+ */
+ @JsonProperty("definition")
+ private String definition;
+
+ /**
+ * A list of column names in the input table the metric should be computed for. Can use
+ * ``":table"`` to indicate that the metric needs information from multiple columns.
+ */
+ @JsonProperty("input_columns")
+ private Collection inputColumns;
+
+ /** Name of the metric in the output tables. */
+ @JsonProperty("name")
+ private String name;
+
+ /** The output type of the custom metric. */
+ @JsonProperty("output_data_type")
+ private String outputDataType;
+
+ /** The type of the custom metric. */
+ @JsonProperty("type")
+ private DataProfilingCustomMetricType typeValue;
+
+ public DataProfilingCustomMetric setDefinition(String definition) {
+ this.definition = definition;
+ return this;
+ }
+
+ public String getDefinition() {
+ return definition;
+ }
+
+ public DataProfilingCustomMetric setInputColumns(Collection inputColumns) {
+ this.inputColumns = inputColumns;
+ return this;
+ }
+
+ public Collection getInputColumns() {
+ return inputColumns;
+ }
+
+ public DataProfilingCustomMetric setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public DataProfilingCustomMetric setOutputDataType(String outputDataType) {
+ this.outputDataType = outputDataType;
+ return this;
+ }
+
+ public String getOutputDataType() {
+ return outputDataType;
+ }
+
+ public DataProfilingCustomMetric setType(DataProfilingCustomMetricType typeValue) {
+ this.typeValue = typeValue;
+ return this;
+ }
+
+ public DataProfilingCustomMetricType getType() {
+ return typeValue;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DataProfilingCustomMetric that = (DataProfilingCustomMetric) o;
+ return Objects.equals(definition, that.definition)
+ && Objects.equals(inputColumns, that.inputColumns)
+ && Objects.equals(name, that.name)
+ && Objects.equals(outputDataType, that.outputDataType)
+ && Objects.equals(typeValue, that.typeValue);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(definition, inputColumns, name, outputDataType, typeValue);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DataProfilingCustomMetric.class)
+ .add("definition", definition)
+ .add("inputColumns", inputColumns)
+ .add("name", name)
+ .add("outputDataType", outputDataType)
+ .add("typeValue", typeValue)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetricType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetricType.java
new file mode 100755
index 000000000..63122073c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetricType.java
@@ -0,0 +1,13 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+
+/** The custom metric type. */
+@Generated
+public enum DataProfilingCustomMetricType {
+ DATA_PROFILING_CUSTOM_METRIC_TYPE_AGGREGATE,
+ DATA_PROFILING_CUSTOM_METRIC_TYPE_DERIVED,
+ DATA_PROFILING_CUSTOM_METRIC_TYPE_DRIFT,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingStatus.java
new file mode 100755
index 000000000..d49a4406d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingStatus.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+
+/** The status of the data profiling monitor. */
+@Generated
+public enum DataProfilingStatus {
+ DATA_PROFILING_STATUS_ACTIVE,
+ DATA_PROFILING_STATUS_DELETE_PENDING,
+ DATA_PROFILING_STATUS_ERROR,
+ DATA_PROFILING_STATUS_FAILED,
+ DATA_PROFILING_STATUS_PENDING,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java
new file mode 100755
index 000000000..0226e6c69
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java
@@ -0,0 +1,197 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Manage the data quality of Unity Catalog objects (currently support `schema` and `table`) */
+@Generated
+public class DataQualityAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(DataQualityAPI.class);
+
+ private final DataQualityService impl;
+
+ /** Regular-use constructor */
+ public DataQualityAPI(ApiClient apiClient) {
+ impl = new DataQualityImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public DataQualityAPI(DataQualityService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Cancels a data quality monitor refresh. Currently only supported for the `table` `object_type`.
+ */
+ public CancelRefreshResponse cancelRefresh(CancelRefreshRequest request) {
+ return impl.cancelRefresh(request);
+ }
+
+ /**
+ * Create a data quality monitor on a Unity Catalog object. The caller must provide either
+ * `anomaly_detection_config` for a schema monitor or `data_profiling_config` for a table monitor.
+ *
+ * For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
+ * catalog, have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the
+ * table 2. have **USE_CATALOG** on the table's parent catalog, be an owner of the table's parent
+ * schema, and have **SELECT** access on the table. 3. have the following permissions: -
+ * **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
+ * be an owner of the table.
+ *
+ *
Workspace assets, such as the dashboard, will be created in the workspace where this call
+ * was made.
+ */
+ public Monitor createMonitor(CreateMonitorRequest request) {
+ return impl.createMonitor(request);
+ }
+
+ /**
+ * Creates a refresh. Currently only supported for the `table` `object_type`.
+ *
+ *
The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
+ * on the table's parent catalog and be an owner of the table's parent schema 3. have the
+ * following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the
+ * table's parent schema - be an owner of the table
+ */
+ public Refresh createRefresh(CreateRefreshRequest request) {
+ return impl.createRefresh(request);
+ }
+
+ public void deleteMonitor(String objectType, String objectId) {
+ deleteMonitor(new DeleteMonitorRequest().setObjectType(objectType).setObjectId(objectId));
+ }
+
+ /**
+ * Delete a data quality monitor on Unity Catalog object.
+ *
+ *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
+ * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
+ * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
+ * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table.
+ *
+ *
Note that the metric tables and dashboard will not be deleted as part of this call; those
+ * assets must be manually cleaned up (if desired).
+ */
+ public void deleteMonitor(DeleteMonitorRequest request) {
+ impl.deleteMonitor(request);
+ }
+
+ public void deleteRefresh(String objectType, String objectId, long refreshId) {
+ deleteRefresh(
+ new DeleteRefreshRequest()
+ .setObjectType(objectType)
+ .setObjectId(objectId)
+ .setRefreshId(refreshId));
+ }
+
+ /** (Unimplemented) Delete a refresh */
+ public void deleteRefresh(DeleteRefreshRequest request) {
+ impl.deleteRefresh(request);
+ }
+
+ public Monitor getMonitor(String objectType, String objectId) {
+ return getMonitor(new GetMonitorRequest().setObjectType(objectType).setObjectId(objectId));
+ }
+
+ /**
+ * Read a data quality monitor on Unity Catalog object.
+ *
+ *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
+ * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
+ * parent schema. 3. have the following permissions: - **USE_CATALOG** on the table's parent
+ * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table.
+ *
+ *
The returned information includes configuration values, as well as information on assets
+ * created by the monitor. Some information (e.g., dashboard) may be filtered out if the caller is
+ * in a different workspace than where the monitor was created.
+ */
+ public Monitor getMonitor(GetMonitorRequest request) {
+ return impl.getMonitor(request);
+ }
+
+ public Refresh getRefresh(String objectType, String objectId, long refreshId) {
+ return getRefresh(
+ new GetRefreshRequest()
+ .setObjectType(objectType)
+ .setObjectId(objectId)
+ .setRefreshId(refreshId));
+ }
+
+ /**
+ * Get data quality monitor refresh.
+ *
+ *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
+ * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
+ * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
+ * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table.
+ */
+ public Refresh getRefresh(GetRefreshRequest request) {
+ return impl.getRefresh(request);
+ }
+
+ /** (Unimplemented) List data quality monitors. */
+ public Iterable listMonitor(ListMonitorRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listMonitor,
+ ListMonitorResponse::getMonitors,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ public Iterable listRefresh(String objectType, String objectId) {
+ return listRefresh(new ListRefreshRequest().setObjectType(objectType).setObjectId(objectId));
+ }
+
+ /**
+ * List data quality monitor refreshes.
+ *
+ * For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
+ * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
+ * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
+ * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table.
+ */
+ public Iterable listRefresh(ListRefreshRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listRefresh,
+ ListRefreshResponse::getRefreshes,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ /**
+ * Update a data quality monitor on Unity Catalog object.
+ *
+ * For the `table` `object_type`, The caller must either: 1. be an owner of the table's parent
+ * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
+ * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
+ * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table.
+ */
+ public Monitor updateMonitor(UpdateMonitorRequest request) {
+ return impl.updateMonitor(request);
+ }
+
+ /** (Unimplemented) Update a refresh */
+ public Refresh updateRefresh(UpdateRefreshRequest request) {
+ return impl.updateRefresh(request);
+ }
+
+ public DataQualityService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityImpl.java
new file mode 100755
index 000000000..7411d79f5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityImpl.java
@@ -0,0 +1,190 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of DataQuality */
+@Generated
+class DataQualityImpl implements DataQualityService {
+ private final ApiClient apiClient;
+
+ public DataQualityImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public CancelRefreshResponse cancelRefresh(CancelRefreshRequest request) {
+ String path =
+ String.format(
+ "/api/data-quality/v1/monitors/%s/%s/refreshes/%s/cancel",
+ request.getObjectType(), request.getObjectId(), request.getRefreshId());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, CancelRefreshResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Monitor createMonitor(CreateMonitorRequest request) {
+ String path = "/api/data-quality/v1/monitors";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getMonitor()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, Monitor.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Refresh createRefresh(CreateRefreshRequest request) {
+ String path =
+ String.format(
+ "/api/data-quality/v1/monitors/%s/%s/refreshes",
+ request.getObjectType(), request.getObjectId());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getRefresh()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, Refresh.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteMonitor(DeleteMonitorRequest request) {
+ String path =
+ String.format(
+ "/api/data-quality/v1/monitors/%s/%s", request.getObjectType(), request.getObjectId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteRefresh(DeleteRefreshRequest request) {
+ String path =
+ String.format(
+ "/api/data-quality/v1/monitors/%s/%s/refreshes/%s",
+ request.getObjectType(), request.getObjectId(), request.getRefreshId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Monitor getMonitor(GetMonitorRequest request) {
+ String path =
+ String.format(
+ "/api/data-quality/v1/monitors/%s/%s", request.getObjectType(), request.getObjectId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, Monitor.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Refresh getRefresh(GetRefreshRequest request) {
+ String path =
+ String.format(
+ "/api/data-quality/v1/monitors/%s/%s/refreshes/%s",
+ request.getObjectType(), request.getObjectId(), request.getRefreshId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, Refresh.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListMonitorResponse listMonitor(ListMonitorRequest request) {
+ String path = "/api/data-quality/v1/monitors";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListMonitorResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListRefreshResponse listRefresh(ListRefreshRequest request) {
+ String path =
+ String.format(
+ "/api/data-quality/v1/monitors/%s/%s/refreshes",
+ request.getObjectType(), request.getObjectId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListRefreshResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Monitor updateMonitor(UpdateMonitorRequest request) {
+ String path =
+ String.format(
+ "/api/data-quality/v1/monitors/%s/%s", request.getObjectType(), request.getObjectId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getMonitor()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, Monitor.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Refresh updateRefresh(UpdateRefreshRequest request) {
+ String path =
+ String.format(
+ "/api/data-quality/v1/monitors/%s/%s/refreshes/%s",
+ request.getObjectType(), request.getObjectId(), request.getRefreshId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getRefresh()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, Refresh.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityService.java
new file mode 100755
index 000000000..1e5487768
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityService.java
@@ -0,0 +1,111 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Manage the data quality of Unity Catalog objects (currently support `schema` and `table`)
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface DataQualityService {
+ /**
+ * Cancels a data quality monitor refresh. Currently only supported for the `table` `object_type`.
+ */
+ CancelRefreshResponse cancelRefresh(CancelRefreshRequest cancelRefreshRequest);
+
+ /**
+ * Create a data quality monitor on a Unity Catalog object. The caller must provide either
+ * `anomaly_detection_config` for a schema monitor or `data_profiling_config` for a table monitor.
+ *
+ *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
+ * catalog, have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the
+ * table 2. have **USE_CATALOG** on the table's parent catalog, be an owner of the table's parent
+ * schema, and have **SELECT** access on the table. 3. have the following permissions: -
+ * **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
+ * be an owner of the table.
+ *
+ *
Workspace assets, such as the dashboard, will be created in the workspace where this call
+ * was made.
+ */
+ Monitor createMonitor(CreateMonitorRequest createMonitorRequest);
+
+ /**
+ * Creates a refresh. Currently only supported for the `table` `object_type`.
+ *
+ *
The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
+ * on the table's parent catalog and be an owner of the table's parent schema 3. have the
+ * following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the
+ * table's parent schema - be an owner of the table
+ */
+ Refresh createRefresh(CreateRefreshRequest createRefreshRequest);
+
+ /**
+ * Delete a data quality monitor on Unity Catalog object.
+ *
+ *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
+ * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
+ * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
+ * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table.
+ *
+ *
Note that the metric tables and dashboard will not be deleted as part of this call; those
+ * assets must be manually cleaned up (if desired).
+ */
+ void deleteMonitor(DeleteMonitorRequest deleteMonitorRequest);
+
+ /** (Unimplemented) Delete a refresh */
+ void deleteRefresh(DeleteRefreshRequest deleteRefreshRequest);
+
+ /**
+ * Read a data quality monitor on Unity Catalog object.
+ *
+ *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
+ * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
+ * parent schema. 3. have the following permissions: - **USE_CATALOG** on the table's parent
+ * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table.
+ *
+ *
The returned information includes configuration values, as well as information on assets
+ * created by the monitor. Some information (e.g., dashboard) may be filtered out if the caller is
+ * in a different workspace than where the monitor was created.
+ */
+ Monitor getMonitor(GetMonitorRequest getMonitorRequest);
+
+ /**
+ * Get data quality monitor refresh.
+ *
+ *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
+ * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
+ * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
+ * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table.
+ */
+ Refresh getRefresh(GetRefreshRequest getRefreshRequest);
+
+ /** (Unimplemented) List data quality monitors. */
+ ListMonitorResponse listMonitor(ListMonitorRequest listMonitorRequest);
+
+ /**
+ * List data quality monitor refreshes.
+ *
+ *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
+ * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
+ * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
+ * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table.
+ */
+ ListRefreshResponse listRefresh(ListRefreshRequest listRefreshRequest);
+
+ /**
+ * Update a data quality monitor on Unity Catalog object.
+ *
+ *
For the `table` `object_type`, The caller must either: 1. be an owner of the table's parent
+ * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
+ * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
+ * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table.
+ */
+ Monitor updateMonitor(UpdateMonitorRequest updateMonitorRequest);
+
+ /** (Unimplemented) Update a refresh */
+ Refresh updateRefresh(UpdateRefreshRequest updateRefreshRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteMonitorRequest.java
new file mode 100755
index 000000000..0479ce355
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteMonitorRequest.java
@@ -0,0 +1,56 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteMonitorRequest {
+ /** The UUID of the request object. For example, schema id. */
+ @JsonIgnore private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: `schema` or `table`. */
+ @JsonIgnore private String objectType;
+
+ public DeleteMonitorRequest setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public DeleteMonitorRequest setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteMonitorRequest that = (DeleteMonitorRequest) o;
+ return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(objectId, objectType);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteMonitorRequest.class)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteRefreshRequest.java
new file mode 100755
index 000000000..6ec839ce9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteRefreshRequest.java
@@ -0,0 +1,71 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteRefreshRequest {
+ /** The UUID of the request object. For example, schema id. */
+ @JsonIgnore private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: `schema` or `table`. */
+ @JsonIgnore private String objectType;
+
+ /** Unique id of the refresh operation. */
+ @JsonIgnore private Long refreshId;
+
+ public DeleteRefreshRequest setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public DeleteRefreshRequest setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ public DeleteRefreshRequest setRefreshId(Long refreshId) {
+ this.refreshId = refreshId;
+ return this;
+ }
+
+ public Long getRefreshId() {
+ return refreshId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteRefreshRequest that = (DeleteRefreshRequest) o;
+ return Objects.equals(objectId, that.objectId)
+ && Objects.equals(objectType, that.objectType)
+ && Objects.equals(refreshId, that.refreshId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(objectId, objectType, refreshId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteRefreshRequest.class)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .add("refreshId", refreshId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetMonitorRequest.java
new file mode 100755
index 000000000..cdb1e5136
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetMonitorRequest.java
@@ -0,0 +1,56 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetMonitorRequest {
+ /** The UUID of the request object. For example, schema id. */
+ @JsonIgnore private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: `schema` or `table`. */
+ @JsonIgnore private String objectType;
+
+ public GetMonitorRequest setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public GetMonitorRequest setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetMonitorRequest that = (GetMonitorRequest) o;
+ return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(objectId, objectType);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetMonitorRequest.class)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetRefreshRequest.java
new file mode 100755
index 000000000..9280dce0f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetRefreshRequest.java
@@ -0,0 +1,71 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetRefreshRequest {
+ /** The UUID of the request object. For example, schema id. */
+ @JsonIgnore private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: `schema` or `table`. */
+ @JsonIgnore private String objectType;
+
+ /** Unique id of the refresh operation. */
+ @JsonIgnore private Long refreshId;
+
+ public GetRefreshRequest setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public GetRefreshRequest setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ public GetRefreshRequest setRefreshId(Long refreshId) {
+ this.refreshId = refreshId;
+ return this;
+ }
+
+ public Long getRefreshId() {
+ return refreshId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetRefreshRequest that = (GetRefreshRequest) o;
+ return Objects.equals(objectId, that.objectId)
+ && Objects.equals(objectType, that.objectType)
+ && Objects.equals(refreshId, that.refreshId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(objectId, objectType, refreshId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetRefreshRequest.class)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .add("refreshId", refreshId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceLogConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceLogConfig.java
new file mode 100755
index 000000000..0411b284a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceLogConfig.java
@@ -0,0 +1,145 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Inference log configuration. */
+@Generated
+public class InferenceLogConfig {
+ /**
+ * List of granularities to use when aggregating data into time windows based on their timestamp.
+ */
+ @JsonProperty("granularities")
+ private Collection granularities;
+
+ /** Column for the label. */
+ @JsonProperty("label_column")
+ private String labelColumn;
+
+ /** Column for the model identifier. */
+ @JsonProperty("model_id_column")
+ private String modelIdColumn;
+
+ /** Column for the prediction. */
+ @JsonProperty("prediction_column")
+ private String predictionColumn;
+
+ /** Column for prediction probabilities */
+ @JsonProperty("prediction_probability_column")
+ private String predictionProbabilityColumn;
+
+ /** Problem type the model aims to solve. */
+ @JsonProperty("problem_type")
+ private InferenceProblemType problemType;
+
+ /** Column for the timestamp. */
+ @JsonProperty("timestamp_column")
+ private String timestampColumn;
+
+ public InferenceLogConfig setGranularities(Collection granularities) {
+ this.granularities = granularities;
+ return this;
+ }
+
+ public Collection getGranularities() {
+ return granularities;
+ }
+
+ public InferenceLogConfig setLabelColumn(String labelColumn) {
+ this.labelColumn = labelColumn;
+ return this;
+ }
+
+ public String getLabelColumn() {
+ return labelColumn;
+ }
+
+ public InferenceLogConfig setModelIdColumn(String modelIdColumn) {
+ this.modelIdColumn = modelIdColumn;
+ return this;
+ }
+
+ public String getModelIdColumn() {
+ return modelIdColumn;
+ }
+
+ public InferenceLogConfig setPredictionColumn(String predictionColumn) {
+ this.predictionColumn = predictionColumn;
+ return this;
+ }
+
+ public String getPredictionColumn() {
+ return predictionColumn;
+ }
+
+ public InferenceLogConfig setPredictionProbabilityColumn(String predictionProbabilityColumn) {
+ this.predictionProbabilityColumn = predictionProbabilityColumn;
+ return this;
+ }
+
+ public String getPredictionProbabilityColumn() {
+ return predictionProbabilityColumn;
+ }
+
+ public InferenceLogConfig setProblemType(InferenceProblemType problemType) {
+ this.problemType = problemType;
+ return this;
+ }
+
+ public InferenceProblemType getProblemType() {
+ return problemType;
+ }
+
+ public InferenceLogConfig setTimestampColumn(String timestampColumn) {
+ this.timestampColumn = timestampColumn;
+ return this;
+ }
+
+ public String getTimestampColumn() {
+ return timestampColumn;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ InferenceLogConfig that = (InferenceLogConfig) o;
+ return Objects.equals(granularities, that.granularities)
+ && Objects.equals(labelColumn, that.labelColumn)
+ && Objects.equals(modelIdColumn, that.modelIdColumn)
+ && Objects.equals(predictionColumn, that.predictionColumn)
+ && Objects.equals(predictionProbabilityColumn, that.predictionProbabilityColumn)
+ && Objects.equals(problemType, that.problemType)
+ && Objects.equals(timestampColumn, that.timestampColumn);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ granularities,
+ labelColumn,
+ modelIdColumn,
+ predictionColumn,
+ predictionProbabilityColumn,
+ problemType,
+ timestampColumn);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(InferenceLogConfig.class)
+ .add("granularities", granularities)
+ .add("labelColumn", labelColumn)
+ .add("modelIdColumn", modelIdColumn)
+ .add("predictionColumn", predictionColumn)
+ .add("predictionProbabilityColumn", predictionProbabilityColumn)
+ .add("problemType", problemType)
+ .add("timestampColumn", timestampColumn)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceProblemType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceProblemType.java
new file mode 100755
index 000000000..3adad7d38
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceProblemType.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+
+/** Inference problem type the model aims to solve. */
+@Generated
+public enum InferenceProblemType {
+ INFERENCE_PROBLEM_TYPE_CLASSIFICATION,
+ INFERENCE_PROBLEM_TYPE_REGRESSION,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorRequest.java
new file mode 100755
index 000000000..5a7cc3b3a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorRequest.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListMonitorRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListMonitorRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListMonitorRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListMonitorRequest that = (ListMonitorRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListMonitorRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorResponse.java
new file mode 100755
index 000000000..ad6f2650b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Response for listing Monitors. */
+@Generated
+public class ListMonitorResponse {
+ /** */
+ @JsonProperty("monitors")
+ private Collection monitors;
+
+ /** */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListMonitorResponse setMonitors(Collection monitors) {
+ this.monitors = monitors;
+ return this;
+ }
+
+ public Collection getMonitors() {
+ return monitors;
+ }
+
+ public ListMonitorResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListMonitorResponse that = (ListMonitorResponse) o;
+ return Objects.equals(monitors, that.monitors)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(monitors, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListMonitorResponse.class)
+ .add("monitors", monitors)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshRequest.java
new file mode 100755
index 000000000..e86705d05
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshRequest.java
@@ -0,0 +1,90 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListRefreshRequest {
+ /** The UUID of the request object. For example, schema id. */
+ @JsonIgnore private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: `schema` or `table`. */
+ @JsonIgnore private String objectType;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListRefreshRequest setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public ListRefreshRequest setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ public ListRefreshRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListRefreshRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListRefreshRequest that = (ListRefreshRequest) o;
+ return Objects.equals(objectId, that.objectId)
+ && Objects.equals(objectType, that.objectType)
+ && Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(objectId, objectType, pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListRefreshRequest.class)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshResponse.java
new file mode 100755
index 000000000..d05dce54b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Response for listing refreshes. */
+@Generated
+public class ListRefreshResponse {
+ /** */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** */
+ @JsonProperty("refreshes")
+ private Collection refreshes;
+
+ public ListRefreshResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ public ListRefreshResponse setRefreshes(Collection refreshes) {
+ this.refreshes = refreshes;
+ return this;
+ }
+
+ public Collection getRefreshes() {
+ return refreshes;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListRefreshResponse that = (ListRefreshResponse) o;
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(refreshes, that.refreshes);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nextPageToken, refreshes);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListRefreshResponse.class)
+ .add("nextPageToken", nextPageToken)
+ .add("refreshes", refreshes)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Monitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Monitor.java
new file mode 100755
index 000000000..da034b6b9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Monitor.java
@@ -0,0 +1,90 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Monitor for the data quality of unity catalog entities such as schema or table. */
+@Generated
+public class Monitor {
+ /** Anomaly Detection Configuration, applicable to `schema` object types. */
+ @JsonProperty("anomaly_detection_config")
+ private AnomalyDetectionConfig anomalyDetectionConfig;
+
+ /** Data Profiling Configuration, applicable to `table` object types */
+ @JsonProperty("data_profiling_config")
+ private DataProfilingConfig dataProfilingConfig;
+
+ /** The UUID of the request object. For example, schema id. */
+ @JsonProperty("object_id")
+ private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: `schema` or `table`. */
+ @JsonProperty("object_type")
+ private String objectType;
+
+ public Monitor setAnomalyDetectionConfig(AnomalyDetectionConfig anomalyDetectionConfig) {
+ this.anomalyDetectionConfig = anomalyDetectionConfig;
+ return this;
+ }
+
+ public AnomalyDetectionConfig getAnomalyDetectionConfig() {
+ return anomalyDetectionConfig;
+ }
+
+ public Monitor setDataProfilingConfig(DataProfilingConfig dataProfilingConfig) {
+ this.dataProfilingConfig = dataProfilingConfig;
+ return this;
+ }
+
+ public DataProfilingConfig getDataProfilingConfig() {
+ return dataProfilingConfig;
+ }
+
+ public Monitor setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public Monitor setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Monitor that = (Monitor) o;
+ return Objects.equals(anomalyDetectionConfig, that.anomalyDetectionConfig)
+ && Objects.equals(dataProfilingConfig, that.dataProfilingConfig)
+ && Objects.equals(objectId, that.objectId)
+ && Objects.equals(objectType, that.objectType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(anomalyDetectionConfig, dataProfilingConfig, objectId, objectType);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Monitor.class)
+ .add("anomalyDetectionConfig", anomalyDetectionConfig)
+ .add("dataProfilingConfig", dataProfilingConfig)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationDestination.java
new file mode 100755
index 000000000..5a5a920aa
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationDestination.java
@@ -0,0 +1,49 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Destination of the data quality monitoring notification. */
+@Generated
+public class NotificationDestination {
+ /**
+ * The list of email addresses to send the notification to. A maximum of 5 email addresses is
+ * supported.
+ */
+ @JsonProperty("email_addresses")
+ private Collection emailAddresses;
+
+ public NotificationDestination setEmailAddresses(Collection emailAddresses) {
+ this.emailAddresses = emailAddresses;
+ return this;
+ }
+
+ public Collection getEmailAddresses() {
+ return emailAddresses;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ NotificationDestination that = (NotificationDestination) o;
+ return Objects.equals(emailAddresses, that.emailAddresses);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(emailAddresses);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(NotificationDestination.class)
+ .add("emailAddresses", emailAddresses)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationSettings.java
new file mode 100755
index 000000000..6f3b950b5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationSettings.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Settings for sending notifications on the data quality monitoring. */
+@Generated
+public class NotificationSettings {
+ /** Destinations to send notifications on failure/timeout. */
+ @JsonProperty("on_failure")
+ private NotificationDestination onFailure;
+
+ public NotificationSettings setOnFailure(NotificationDestination onFailure) {
+ this.onFailure = onFailure;
+ return this;
+ }
+
+ public NotificationDestination getOnFailure() {
+ return onFailure;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ NotificationSettings that = (NotificationSettings) o;
+ return Objects.equals(onFailure, that.onFailure);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(onFailure);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(NotificationSettings.class).add("onFailure", onFailure).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Refresh.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Refresh.java
new file mode 100755
index 000000000..d2e0fb6b9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Refresh.java
@@ -0,0 +1,154 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** The Refresh object gives information on a refresh of the data quality monitoring pipeline. */
+@Generated
+public class Refresh {
+ /** Time when the refresh ended (milliseconds since 1/1/1970 UTC). */
+ @JsonProperty("end_time_ms")
+ private Long endTimeMs;
+
+ /**
+ * An optional message to give insight into the current state of the refresh (e.g. FAILURE
+ * messages).
+ */
+ @JsonProperty("message")
+ private String message;
+
+ /** The UUID of the request object. For example, table id. */
+ @JsonProperty("object_id")
+ private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: `schema`or `table`. */
+ @JsonProperty("object_type")
+ private String objectType;
+
+ /** Unique id of the refresh operation. */
+ @JsonProperty("refresh_id")
+ private Long refreshId;
+
+ /** Time when the refresh started (milliseconds since 1/1/1970 UTC). */
+ @JsonProperty("start_time_ms")
+ private Long startTimeMs;
+
+ /** The current state of the refresh. */
+ @JsonProperty("state")
+ private RefreshState state;
+
+ /** What triggered the refresh. */
+ @JsonProperty("trigger")
+ private RefreshTrigger trigger;
+
+ public Refresh setEndTimeMs(Long endTimeMs) {
+ this.endTimeMs = endTimeMs;
+ return this;
+ }
+
+ public Long getEndTimeMs() {
+ return endTimeMs;
+ }
+
+ public Refresh setMessage(String message) {
+ this.message = message;
+ return this;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public Refresh setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public Refresh setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ public Refresh setRefreshId(Long refreshId) {
+ this.refreshId = refreshId;
+ return this;
+ }
+
+ public Long getRefreshId() {
+ return refreshId;
+ }
+
+ public Refresh setStartTimeMs(Long startTimeMs) {
+ this.startTimeMs = startTimeMs;
+ return this;
+ }
+
+ public Long getStartTimeMs() {
+ return startTimeMs;
+ }
+
+ public Refresh setState(RefreshState state) {
+ this.state = state;
+ return this;
+ }
+
+ public RefreshState getState() {
+ return state;
+ }
+
+ public Refresh setTrigger(RefreshTrigger trigger) {
+ this.trigger = trigger;
+ return this;
+ }
+
+ public RefreshTrigger getTrigger() {
+ return trigger;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Refresh that = (Refresh) o;
+ return Objects.equals(endTimeMs, that.endTimeMs)
+ && Objects.equals(message, that.message)
+ && Objects.equals(objectId, that.objectId)
+ && Objects.equals(objectType, that.objectType)
+ && Objects.equals(refreshId, that.refreshId)
+ && Objects.equals(startTimeMs, that.startTimeMs)
+ && Objects.equals(state, that.state)
+ && Objects.equals(trigger, that.trigger);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ endTimeMs, message, objectId, objectType, refreshId, startTimeMs, state, trigger);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Refresh.class)
+ .add("endTimeMs", endTimeMs)
+ .add("message", message)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .add("refreshId", refreshId)
+ .add("startTimeMs", startTimeMs)
+ .add("state", state)
+ .add("trigger", trigger)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshState.java
new file mode 100755
index 000000000..d69055c0e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshState.java
@@ -0,0 +1,16 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+
+/** The state of the refresh. */
+@Generated
+public enum RefreshState {
+ MONITOR_REFRESH_STATE_CANCELED,
+ MONITOR_REFRESH_STATE_FAILED,
+ MONITOR_REFRESH_STATE_PENDING,
+ MONITOR_REFRESH_STATE_RUNNING,
+ MONITOR_REFRESH_STATE_SUCCESS,
+ MONITOR_REFRESH_STATE_UNKNOWN,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshTrigger.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshTrigger.java
new file mode 100755
index 000000000..f40549424
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshTrigger.java
@@ -0,0 +1,14 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+
+/** The trigger of the refresh. */
+@Generated
+public enum RefreshTrigger {
+ MONITOR_REFRESH_TRIGGER_DATA_CHANGE,
+ MONITOR_REFRESH_TRIGGER_MANUAL,
+ MONITOR_REFRESH_TRIGGER_SCHEDULE,
+ MONITOR_REFRESH_TRIGGER_UNKNOWN,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/SnapshotConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/SnapshotConfig.java
new file mode 100755
index 000000000..de4158cf0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/SnapshotConfig.java
@@ -0,0 +1,29 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** Snapshot analysis configuration. */
+@Generated
+public class SnapshotConfig {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SnapshotConfig.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/TimeSeriesConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/TimeSeriesConfig.java
new file mode 100755
index 000000000..eb7d0402e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/TimeSeriesConfig.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Time series analysis configuration. */
+@Generated
+public class TimeSeriesConfig {
+ /**
+ * List of granularities to use when aggregating data into time windows based on their timestamp.
+ */
+ @JsonProperty("granularities")
+ private Collection granularities;
+
+ /** Column for the timestamp. */
+ @JsonProperty("timestamp_column")
+ private String timestampColumn;
+
+ public TimeSeriesConfig setGranularities(Collection granularities) {
+ this.granularities = granularities;
+ return this;
+ }
+
+ public Collection getGranularities() {
+ return granularities;
+ }
+
+ public TimeSeriesConfig setTimestampColumn(String timestampColumn) {
+ this.timestampColumn = timestampColumn;
+ return this;
+ }
+
+ public String getTimestampColumn() {
+ return timestampColumn;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ TimeSeriesConfig that = (TimeSeriesConfig) o;
+ return Objects.equals(granularities, that.granularities)
+ && Objects.equals(timestampColumn, that.timestampColumn);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(granularities, timestampColumn);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(TimeSeriesConfig.class)
+ .add("granularities", granularities)
+ .add("timestampColumn", timestampColumn)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java
new file mode 100755
index 000000000..014f74350
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java
@@ -0,0 +1,93 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateMonitorRequest {
+ /** The monitor to update. */
+ @JsonProperty("monitor")
+ private Monitor monitor;
+
+ /** The UUID of the request object. For example, schema id. */
+ @JsonIgnore private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: `schema` or `table`. */
+ @JsonIgnore private String objectType;
+
+ /**
+ * The field mask to specify which fields to update as a comma-separated list. Example value:
+ * `data_profiling_config.custom_metrics,data_profiling_config.schedule.quartz_cron_expression`
+ */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateMonitorRequest setMonitor(Monitor monitor) {
+ this.monitor = monitor;
+ return this;
+ }
+
+ public Monitor getMonitor() {
+ return monitor;
+ }
+
+ public UpdateMonitorRequest setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public UpdateMonitorRequest setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ public UpdateMonitorRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateMonitorRequest that = (UpdateMonitorRequest) o;
+ return Objects.equals(monitor, that.monitor)
+ && Objects.equals(objectId, that.objectId)
+ && Objects.equals(objectType, that.objectType)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(monitor, objectId, objectType, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateMonitorRequest.class)
+ .add("monitor", monitor)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java
new file mode 100755
index 000000000..057ac4706
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java
@@ -0,0 +1,104 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataquality;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateRefreshRequest {
+ /** The UUID of the request object. For example, schema id. */
+ @JsonIgnore private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: `schema` or `table`. */
+ @JsonIgnore private String objectType;
+
+ /** The refresh to update. */
+ @JsonProperty("refresh")
+ private Refresh refresh;
+
+ /** Unique id of the refresh operation. */
+ @JsonIgnore private Long refreshId;
+
+ /** The field mask to specify which fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateRefreshRequest setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public UpdateRefreshRequest setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ public UpdateRefreshRequest setRefresh(Refresh refresh) {
+ this.refresh = refresh;
+ return this;
+ }
+
+ public Refresh getRefresh() {
+ return refresh;
+ }
+
+ public UpdateRefreshRequest setRefreshId(Long refreshId) {
+ this.refreshId = refreshId;
+ return this;
+ }
+
+ public Long getRefreshId() {
+ return refreshId;
+ }
+
+ public UpdateRefreshRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateRefreshRequest that = (UpdateRefreshRequest) o;
+ return Objects.equals(objectId, that.objectId)
+ && Objects.equals(objectType, that.objectType)
+ && Objects.equals(refresh, that.refresh)
+ && Objects.equals(refreshId, that.refreshId)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(objectId, objectType, refresh, refreshId, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateRefreshRequest.class)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .add("refresh", refresh)
+ .add("refreshId", refreshId)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java
index 5aacd813f..8ae58f6a5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java
@@ -15,7 +15,7 @@ public class GetPermissionLevelsRequest {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
* clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files,
- * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
+ * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
* serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
index 9ee5386f6..801a423e4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
@@ -15,7 +15,7 @@ public class GetPermissionRequest {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
* clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files,
- * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
+ * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
* serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
index 188f7fda0..25ba32997 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
@@ -21,7 +21,7 @@ public class SetObjectPermissions {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
* clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files,
- * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
+ * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
* serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java
index 442fbf40d..b7ea0195f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java
@@ -21,7 +21,7 @@ public class UpdateObjectPermissions {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
* clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files,
- * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
+ * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
* serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java
index 431df6742..883361917 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java
@@ -26,6 +26,93 @@ public AccountIamV2API(AccountIamV2Service mock) {
impl = mock;
}
+ /** TODO: Write description later when this method is implemented */
+ public Group createGroup(CreateGroupRequest request) {
+ return impl.createGroup(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public ServicePrincipal createServicePrincipal(CreateServicePrincipalRequest request) {
+ return impl.createServicePrincipal(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public User createUser(CreateUserRequest request) {
+ return impl.createUser(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public WorkspaceAccessDetail createWorkspaceAccessDetail(
+ CreateWorkspaceAccessDetailRequest request) {
+ return impl.createWorkspaceAccessDetail(request);
+ }
+
+ public void deleteGroup(long internalId) {
+ deleteGroup(new DeleteGroupRequest().setInternalId(internalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public void deleteGroup(DeleteGroupRequest request) {
+ impl.deleteGroup(request);
+ }
+
+ public void deleteServicePrincipal(long internalId) {
+ deleteServicePrincipal(new DeleteServicePrincipalRequest().setInternalId(internalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public void deleteServicePrincipal(DeleteServicePrincipalRequest request) {
+ impl.deleteServicePrincipal(request);
+ }
+
+ public void deleteUser(long internalId) {
+ deleteUser(new DeleteUserRequest().setInternalId(internalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public void deleteUser(DeleteUserRequest request) {
+ impl.deleteUser(request);
+ }
+
+ public void deleteWorkspaceAccessDetail(long workspaceId, long principalId) {
+ deleteWorkspaceAccessDetail(
+ new DeleteWorkspaceAccessDetailRequest()
+ .setWorkspaceId(workspaceId)
+ .setPrincipalId(principalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public void deleteWorkspaceAccessDetail(DeleteWorkspaceAccessDetailRequest request) {
+ impl.deleteWorkspaceAccessDetail(request);
+ }
+
+ public Group getGroup(long internalId) {
+ return getGroup(new GetGroupRequest().setInternalId(internalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public Group getGroup(GetGroupRequest request) {
+ return impl.getGroup(request);
+ }
+
+ public ServicePrincipal getServicePrincipal(long internalId) {
+ return getServicePrincipal(new GetServicePrincipalRequest().setInternalId(internalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public ServicePrincipal getServicePrincipal(GetServicePrincipalRequest request) {
+ return impl.getServicePrincipal(request);
+ }
+
+ public User getUser(long internalId) {
+ return getUser(new GetUserRequest().setInternalId(internalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public User getUser(GetUserRequest request) {
+ return impl.getUser(request);
+ }
+
public WorkspaceAccessDetail getWorkspaceAccessDetail(long workspaceId, long principalId) {
return getWorkspaceAccessDetail(
new GetWorkspaceAccessDetailRequest()
@@ -44,6 +131,32 @@ public WorkspaceAccessDetail getWorkspaceAccessDetail(GetWorkspaceAccessDetailRe
return impl.getWorkspaceAccessDetail(request);
}
+ /** TODO: Write description later when this method is implemented */
+ public ListGroupsResponse listGroups(ListGroupsRequest request) {
+ return impl.listGroups(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public ListServicePrincipalsResponse listServicePrincipals(ListServicePrincipalsRequest request) {
+ return impl.listServicePrincipals(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public ListUsersResponse listUsers(ListUsersRequest request) {
+ return impl.listUsers(request);
+ }
+
+ public ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetails(long workspaceId) {
+ return listWorkspaceAccessDetails(
+ new ListWorkspaceAccessDetailsRequest().setWorkspaceId(workspaceId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetails(
+ ListWorkspaceAccessDetailsRequest request) {
+ return impl.listWorkspaceAccessDetails(request);
+ }
+
/**
* Resolves a group with the given external ID from the customer's IdP. If the group does not
* exist, it will be created in the account. If the customer is not onboarded onto Automatic
@@ -72,6 +185,27 @@ public ResolveUserResponse resolveUser(ResolveUserRequest request) {
return impl.resolveUser(request);
}
+ /** TODO: Write description later when this method is implemented */
+ public Group updateGroup(UpdateGroupRequest request) {
+ return impl.updateGroup(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public ServicePrincipal updateServicePrincipal(UpdateServicePrincipalRequest request) {
+ return impl.updateServicePrincipal(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public User updateUser(UpdateUserRequest request) {
+ return impl.updateUser(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public WorkspaceAccessDetail updateWorkspaceAccessDetail(
+ UpdateWorkspaceAccessDetailRequest request) {
+ return impl.updateWorkspaceAccessDetail(request);
+ }
+
public AccountIamV2Service impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Impl.java
index c887fd39b..f43782568 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Impl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Impl.java
@@ -16,6 +16,183 @@ public AccountIamV2Impl(ApiClient apiClient) {
this.apiClient = apiClient;
}
+ @Override
+ public Group createGroup(CreateGroupRequest request) {
+ String path =
+ String.format("/api/2.0/identity/accounts/%s/groups", apiClient.configuredAccountID());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getGroup()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, Group.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ServicePrincipal createServicePrincipal(CreateServicePrincipalRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/servicePrincipals", apiClient.configuredAccountID());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getServicePrincipal()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, ServicePrincipal.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public User createUser(CreateUserRequest request) {
+ String path =
+ String.format("/api/2.0/identity/accounts/%s/users", apiClient.configuredAccountID());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getUser()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, User.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public WorkspaceAccessDetail createWorkspaceAccessDetail(
+ CreateWorkspaceAccessDetailRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/workspaces/%s/workspaceAccessDetails",
+ apiClient.configuredAccountID(), request.getParent());
+ try {
+ Request req =
+ new Request("POST", path, apiClient.serialize(request.getWorkspaceAccessDetail()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, WorkspaceAccessDetail.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteGroup(DeleteGroupRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/groups/%s",
+ apiClient.configuredAccountID(), request.getInternalId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteServicePrincipal(DeleteServicePrincipalRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/servicePrincipals/%s",
+ apiClient.configuredAccountID(), request.getInternalId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteUser(DeleteUserRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/users/%s",
+ apiClient.configuredAccountID(), request.getInternalId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteWorkspaceAccessDetail(DeleteWorkspaceAccessDetailRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/workspaces/%s/workspaceAccessDetails/%s",
+ apiClient.configuredAccountID(), request.getWorkspaceId(), request.getPrincipalId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Group getGroup(GetGroupRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/groups/%s",
+ apiClient.configuredAccountID(), request.getInternalId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, Group.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ServicePrincipal getServicePrincipal(GetServicePrincipalRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/servicePrincipals/%s",
+ apiClient.configuredAccountID(), request.getInternalId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ServicePrincipal.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public User getUser(GetUserRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/users/%s",
+ apiClient.configuredAccountID(), request.getInternalId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, User.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public WorkspaceAccessDetail getWorkspaceAccessDetail(GetWorkspaceAccessDetailRequest request) {
String path =
@@ -32,6 +209,66 @@ public WorkspaceAccessDetail getWorkspaceAccessDetail(GetWorkspaceAccessDetailRe
}
}
+ @Override
+ public ListGroupsResponse listGroups(ListGroupsRequest request) {
+ String path =
+ String.format("/api/2.0/identity/accounts/%s/groups", apiClient.configuredAccountID());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListGroupsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListServicePrincipalsResponse listServicePrincipals(ListServicePrincipalsRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/servicePrincipals", apiClient.configuredAccountID());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListServicePrincipalsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListUsersResponse listUsers(ListUsersRequest request) {
+ String path =
+ String.format("/api/2.0/identity/accounts/%s/users", apiClient.configuredAccountID());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListUsersResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetails(
+ ListWorkspaceAccessDetailsRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/workspaces/%s/workspaceAccessDetails",
+ apiClient.configuredAccountID(), request.getWorkspaceId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListWorkspaceAccessDetailsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public ResolveGroupResponse resolveGroup(ResolveGroupRequest request) {
String path =
@@ -83,4 +320,74 @@ public ResolveUserResponse resolveUser(ResolveUserRequest request) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
+
+ @Override
+ public Group updateGroup(UpdateGroupRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/groups/%s",
+ apiClient.configuredAccountID(), request.getInternalId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getGroup()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, Group.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ServicePrincipal updateServicePrincipal(UpdateServicePrincipalRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/servicePrincipals/%s",
+ apiClient.configuredAccountID(), request.getInternalId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getServicePrincipal()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, ServicePrincipal.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public User updateUser(UpdateUserRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/users/%s",
+ apiClient.configuredAccountID(), request.getInternalId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getUser()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, User.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public WorkspaceAccessDetail updateWorkspaceAccessDetail(
+ UpdateWorkspaceAccessDetailRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/identity/accounts/%s/workspaces/%s/workspaceAccessDetails/%s",
+ apiClient.configuredAccountID(), request.getWorkspaceId(), request.getPrincipalId());
+ try {
+ Request req =
+ new Request("PATCH", path, apiClient.serialize(request.getWorkspaceAccessDetail()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, WorkspaceAccessDetail.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Service.java
index 2a56ad630..aad7cd81e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Service.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Service.java
@@ -13,6 +13,42 @@
*/
@Generated
public interface AccountIamV2Service {
+ /** TODO: Write description later when this method is implemented */
+ Group createGroup(CreateGroupRequest createGroupRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ ServicePrincipal createServicePrincipal(
+ CreateServicePrincipalRequest createServicePrincipalRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ User createUser(CreateUserRequest createUserRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ WorkspaceAccessDetail createWorkspaceAccessDetail(
+ CreateWorkspaceAccessDetailRequest createWorkspaceAccessDetailRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ void deleteGroup(DeleteGroupRequest deleteGroupRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ void deleteServicePrincipal(DeleteServicePrincipalRequest deleteServicePrincipalRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ void deleteUser(DeleteUserRequest deleteUserRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ void deleteWorkspaceAccessDetail(
+ DeleteWorkspaceAccessDetailRequest deleteWorkspaceAccessDetailRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ Group getGroup(GetGroupRequest getGroupRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ ServicePrincipal getServicePrincipal(GetServicePrincipalRequest getServicePrincipalRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ User getUser(GetUserRequest getUserRequest);
+
/**
* Returns the access details for a principal in a workspace. Allows for checking access details
* for any provisioned principal (user, service principal, or group) in a workspace. * Provisioned
@@ -23,6 +59,20 @@ public interface AccountIamV2Service {
WorkspaceAccessDetail getWorkspaceAccessDetail(
GetWorkspaceAccessDetailRequest getWorkspaceAccessDetailRequest);
+ /** TODO: Write description later when this method is implemented */
+ ListGroupsResponse listGroups(ListGroupsRequest listGroupsRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ ListServicePrincipalsResponse listServicePrincipals(
+ ListServicePrincipalsRequest listServicePrincipalsRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ ListUsersResponse listUsers(ListUsersRequest listUsersRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetails(
+ ListWorkspaceAccessDetailsRequest listWorkspaceAccessDetailsRequest);
+
/**
* Resolves a group with the given external ID from the customer's IdP. If the group does not
* exist, it will be created in the account. If the customer is not onboarded onto Automatic
@@ -44,4 +94,18 @@ ResolveServicePrincipalResponse resolveServicePrincipal(
* this will return an error.
*/
ResolveUserResponse resolveUser(ResolveUserRequest resolveUserRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ Group updateGroup(UpdateGroupRequest updateGroupRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ ServicePrincipal updateServicePrincipal(
+ UpdateServicePrincipalRequest updateServicePrincipalRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ User updateUser(UpdateUserRequest updateUserRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ WorkspaceAccessDetail updateWorkspaceAccessDetail(
+ UpdateWorkspaceAccessDetailRequest updateWorkspaceAccessDetailRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupProxyRequest.java
new file mode 100755
index 000000000..3b3b114de
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupProxyRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateGroupProxyRequest {
+ /** Required. Group to be created in */
+ @JsonProperty("group")
+ private Group group;
+
+ public CreateGroupProxyRequest setGroup(Group group) {
+ this.group = group;
+ return this;
+ }
+
+ public Group getGroup() {
+ return group;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateGroupProxyRequest that = (CreateGroupProxyRequest) o;
+ return Objects.equals(group, that.group);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(group);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateGroupProxyRequest.class).add("group", group).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupRequest.java
new file mode 100755
index 000000000..62c97720d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateGroupRequest {
+ /** Required. Group to be created in */
+ @JsonProperty("group")
+ private Group group;
+
+ public CreateGroupRequest setGroup(Group group) {
+ this.group = group;
+ return this;
+ }
+
+ public Group getGroup() {
+ return group;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateGroupRequest that = (CreateGroupRequest) o;
+ return Objects.equals(group, that.group);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(group);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateGroupRequest.class).add("group", group).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalProxyRequest.java
new file mode 100755
index 000000000..43953b199
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalProxyRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateServicePrincipalProxyRequest {
+ /** Required. Service principal to be created in */
+ @JsonProperty("service_principal")
+ private ServicePrincipal servicePrincipal;
+
+ public CreateServicePrincipalProxyRequest setServicePrincipal(ServicePrincipal servicePrincipal) {
+ this.servicePrincipal = servicePrincipal;
+ return this;
+ }
+
+ public ServicePrincipal getServicePrincipal() {
+ return servicePrincipal;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateServicePrincipalProxyRequest that = (CreateServicePrincipalProxyRequest) o;
+ return Objects.equals(servicePrincipal, that.servicePrincipal);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(servicePrincipal);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateServicePrincipalProxyRequest.class)
+ .add("servicePrincipal", servicePrincipal)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalRequest.java
new file mode 100755
index 000000000..fadf9a546
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateServicePrincipalRequest {
+ /** Required. Service principal to be created in */
+ @JsonProperty("service_principal")
+ private ServicePrincipal servicePrincipal;
+
+ public CreateServicePrincipalRequest setServicePrincipal(ServicePrincipal servicePrincipal) {
+ this.servicePrincipal = servicePrincipal;
+ return this;
+ }
+
+ public ServicePrincipal getServicePrincipal() {
+ return servicePrincipal;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateServicePrincipalRequest that = (CreateServicePrincipalRequest) o;
+ return Objects.equals(servicePrincipal, that.servicePrincipal);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(servicePrincipal);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateServicePrincipalRequest.class)
+ .add("servicePrincipal", servicePrincipal)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserProxyRequest.java
new file mode 100755
index 000000000..b35fec791
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserProxyRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateUserProxyRequest {
+ /** Required. User to be created in */
+ @JsonProperty("user")
+ private User user;
+
+ public CreateUserProxyRequest setUser(User user) {
+ this.user = user;
+ return this;
+ }
+
+ public User getUser() {
+ return user;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateUserProxyRequest that = (CreateUserProxyRequest) o;
+ return Objects.equals(user, that.user);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(user);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateUserProxyRequest.class).add("user", user).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserRequest.java
new file mode 100755
index 000000000..0e85e104b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateUserRequest {
+ /** Required. User to be created in */
+ @JsonProperty("user")
+ private User user;
+
+ public CreateUserRequest setUser(User user) {
+ this.user = user;
+ return this;
+ }
+
+ public User getUser() {
+ return user;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateUserRequest that = (CreateUserRequest) o;
+ return Objects.equals(user, that.user);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(user);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateUserRequest.class).add("user", user).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailLocalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailLocalRequest.java
new file mode 100755
index 000000000..4b00b8dd9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailLocalRequest.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateWorkspaceAccessDetailLocalRequest {
+ /** Required. Workspace access detail to be created in . */
+ @JsonProperty("workspace_access_detail")
+ private WorkspaceAccessDetail workspaceAccessDetail;
+
+ public CreateWorkspaceAccessDetailLocalRequest setWorkspaceAccessDetail(
+ WorkspaceAccessDetail workspaceAccessDetail) {
+ this.workspaceAccessDetail = workspaceAccessDetail;
+ return this;
+ }
+
+ public WorkspaceAccessDetail getWorkspaceAccessDetail() {
+ return workspaceAccessDetail;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateWorkspaceAccessDetailLocalRequest that = (CreateWorkspaceAccessDetailLocalRequest) o;
+ return Objects.equals(workspaceAccessDetail, that.workspaceAccessDetail);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(workspaceAccessDetail);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateWorkspaceAccessDetailLocalRequest.class)
+ .add("workspaceAccessDetail", workspaceAccessDetail)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailRequest.java
new file mode 100755
index 000000000..985550754
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailRequest.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateWorkspaceAccessDetailRequest {
+ /** Required. The parent path for workspace access detail. */
+ @JsonIgnore private String parent;
+
+ /** Required. Workspace access detail to be created in . */
+ @JsonProperty("workspace_access_detail")
+ private WorkspaceAccessDetail workspaceAccessDetail;
+
+ public CreateWorkspaceAccessDetailRequest setParent(String parent) {
+ this.parent = parent;
+ return this;
+ }
+
+ public String getParent() {
+ return parent;
+ }
+
+ public CreateWorkspaceAccessDetailRequest setWorkspaceAccessDetail(
+ WorkspaceAccessDetail workspaceAccessDetail) {
+ this.workspaceAccessDetail = workspaceAccessDetail;
+ return this;
+ }
+
+ public WorkspaceAccessDetail getWorkspaceAccessDetail() {
+ return workspaceAccessDetail;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateWorkspaceAccessDetailRequest that = (CreateWorkspaceAccessDetailRequest) o;
+ return Objects.equals(parent, that.parent)
+ && Objects.equals(workspaceAccessDetail, that.workspaceAccessDetail);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(parent, workspaceAccessDetail);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateWorkspaceAccessDetailRequest.class)
+ .add("parent", parent)
+ .add("workspaceAccessDetail", workspaceAccessDetail)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupProxyRequest.java
new file mode 100755
index 000000000..ec0d9d788
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupProxyRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteGroupProxyRequest {
+ /** Required. Internal ID of the group in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ public DeleteGroupProxyRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteGroupProxyRequest that = (DeleteGroupProxyRequest) o;
+ return Objects.equals(internalId, that.internalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteGroupProxyRequest.class).add("internalId", internalId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupRequest.java
new file mode 100755
index 000000000..ab0f0db5c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteGroupRequest {
+ /** Required. Internal ID of the group in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ public DeleteGroupRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteGroupRequest that = (DeleteGroupRequest) o;
+ return Objects.equals(internalId, that.internalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteGroupRequest.class).add("internalId", internalId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalProxyRequest.java
new file mode 100755
index 000000000..1698be7de
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalProxyRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteServicePrincipalProxyRequest {
+ /** Required. Internal ID of the service principal in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ public DeleteServicePrincipalProxyRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteServicePrincipalProxyRequest that = (DeleteServicePrincipalProxyRequest) o;
+ return Objects.equals(internalId, that.internalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteServicePrincipalProxyRequest.class)
+ .add("internalId", internalId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalRequest.java
new file mode 100755
index 000000000..be9589b99
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteServicePrincipalRequest {
+ /** Required. Internal ID of the service principal in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ public DeleteServicePrincipalRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteServicePrincipalRequest that = (DeleteServicePrincipalRequest) o;
+ return Objects.equals(internalId, that.internalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteServicePrincipalRequest.class)
+ .add("internalId", internalId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserProxyRequest.java
new file mode 100755
index 000000000..f0e6a092b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserProxyRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteUserProxyRequest {
+ /** Required. Internal ID of the user in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ public DeleteUserProxyRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteUserProxyRequest that = (DeleteUserProxyRequest) o;
+ return Objects.equals(internalId, that.internalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteUserProxyRequest.class).add("internalId", internalId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserRequest.java
new file mode 100755
index 000000000..efe69331b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteUserRequest {
+ /** Required. Internal ID of the user in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ public DeleteUserRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteUserRequest that = (DeleteUserRequest) o;
+ return Objects.equals(internalId, that.internalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteUserRequest.class).add("internalId", internalId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailLocalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailLocalRequest.java
new file mode 100755
index 000000000..d0f737f5a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailLocalRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteWorkspaceAccessDetailLocalRequest {
+ /** Required. ID of the principal in Databricks. */
+ @JsonIgnore private Long principalId;
+
+ public DeleteWorkspaceAccessDetailLocalRequest setPrincipalId(Long principalId) {
+ this.principalId = principalId;
+ return this;
+ }
+
+ public Long getPrincipalId() {
+ return principalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteWorkspaceAccessDetailLocalRequest that = (DeleteWorkspaceAccessDetailLocalRequest) o;
+ return Objects.equals(principalId, that.principalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(principalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteWorkspaceAccessDetailLocalRequest.class)
+ .add("principalId", principalId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailRequest.java
new file mode 100755
index 000000000..2ff0e7ba2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailRequest.java
@@ -0,0 +1,57 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteWorkspaceAccessDetailRequest {
+ /** Required. ID of the principal in Databricks to delete workspace access for. */
+ @JsonIgnore private Long principalId;
+
+ /** The workspace ID where the principal has access. */
+ @JsonIgnore private Long workspaceId;
+
+ public DeleteWorkspaceAccessDetailRequest setPrincipalId(Long principalId) {
+ this.principalId = principalId;
+ return this;
+ }
+
+ public Long getPrincipalId() {
+ return principalId;
+ }
+
+ public DeleteWorkspaceAccessDetailRequest setWorkspaceId(Long workspaceId) {
+ this.workspaceId = workspaceId;
+ return this;
+ }
+
+ public Long getWorkspaceId() {
+ return workspaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteWorkspaceAccessDetailRequest that = (DeleteWorkspaceAccessDetailRequest) o;
+ return Objects.equals(principalId, that.principalId)
+ && Objects.equals(workspaceId, that.workspaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(principalId, workspaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteWorkspaceAccessDetailRequest.class)
+ .add("principalId", principalId)
+ .add("workspaceId", workspaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupProxyRequest.java
new file mode 100755
index 000000000..a634eb259
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupProxyRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetGroupProxyRequest {
+ /** Required. Internal ID of the group in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ public GetGroupProxyRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetGroupProxyRequest that = (GetGroupProxyRequest) o;
+ return Objects.equals(internalId, that.internalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetGroupProxyRequest.class).add("internalId", internalId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupRequest.java
new file mode 100755
index 000000000..ba4ceffb6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetGroupRequest {
+ /** Required. Internal ID of the group in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ public GetGroupRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetGroupRequest that = (GetGroupRequest) o;
+ return Objects.equals(internalId, that.internalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetGroupRequest.class).add("internalId", internalId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalProxyRequest.java
new file mode 100755
index 000000000..675796d4e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalProxyRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetServicePrincipalProxyRequest {
+ /** Required. Internal ID of the service principal in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ public GetServicePrincipalProxyRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetServicePrincipalProxyRequest that = (GetServicePrincipalProxyRequest) o;
+ return Objects.equals(internalId, that.internalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetServicePrincipalProxyRequest.class)
+ .add("internalId", internalId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalRequest.java
new file mode 100755
index 000000000..e51607786
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetServicePrincipalRequest {
+ /** Required. Internal ID of the service principal in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ public GetServicePrincipalRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetServicePrincipalRequest that = (GetServicePrincipalRequest) o;
+ return Objects.equals(internalId, that.internalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetServicePrincipalRequest.class)
+ .add("internalId", internalId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserProxyRequest.java
new file mode 100755
index 000000000..681dc1b3d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserProxyRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetUserProxyRequest {
+ /** Required. Internal ID of the user in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ public GetUserProxyRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetUserProxyRequest that = (GetUserProxyRequest) o;
+ return Objects.equals(internalId, that.internalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetUserProxyRequest.class).add("internalId", internalId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserRequest.java
new file mode 100755
index 000000000..8cfc0a484
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetUserRequest {
+ /** Required. Internal ID of the user in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ public GetUserRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetUserRequest that = (GetUserRequest) o;
+ return Objects.equals(internalId, that.internalId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetUserRequest.class).add("internalId", internalId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsProxyRequest.java
new file mode 100755
index 000000000..53b964703
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsProxyRequest.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListGroupsProxyRequest {
+ /** The maximum number of groups to return. The service may return fewer than this value. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous ListGroups call. Provide this to retrieve the subsequent
+ * page.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListGroupsProxyRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListGroupsProxyRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListGroupsProxyRequest that = (ListGroupsProxyRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListGroupsProxyRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsRequest.java
new file mode 100755
index 000000000..6d64f2102
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsRequest.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListGroupsRequest {
+ /** The maximum number of groups to return. The service may return fewer than this value. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous ListGroups call. Provide this to retrieve the subsequent
+ * page.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListGroupsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListGroupsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListGroupsRequest that = (ListGroupsRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListGroupsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsResponse.java
new file mode 100755
index 000000000..c2ca63976
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsResponse.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** TODO: Write description later when this method is implemented */
+@Generated
+public class ListGroupsResponse {
+ /** */
+ @JsonProperty("groups")
+ private Collection groups;
+
+ /**
+ * A token, which can be sent as page_token to retrieve the next page. If this field is omitted,
+ * there are no subsequent pages.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListGroupsResponse setGroups(Collection groups) {
+ this.groups = groups;
+ return this;
+ }
+
+ public Collection getGroups() {
+ return groups;
+ }
+
+ public ListGroupsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListGroupsResponse that = (ListGroupsResponse) o;
+ return Objects.equals(groups, that.groups) && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(groups, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListGroupsResponse.class)
+ .add("groups", groups)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsProxyRequest.java
new file mode 100755
index 000000000..df9b6119e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsProxyRequest.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListServicePrincipalsProxyRequest {
+ /** The maximum number of SPs to return. The service may return fewer than this value. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the
+ * subsequent page.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListServicePrincipalsProxyRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListServicePrincipalsProxyRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListServicePrincipalsProxyRequest that = (ListServicePrincipalsProxyRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListServicePrincipalsProxyRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsRequest.java
new file mode 100755
index 000000000..ac839e4f6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsRequest.java
@@ -0,0 +1,67 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListServicePrincipalsRequest {
+ /**
+ * The maximum number of service principals to return. The service may return fewer than this
+ * value.
+ */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the
+ * subsequent page.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListServicePrincipalsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListServicePrincipalsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListServicePrincipalsRequest that = (ListServicePrincipalsRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListServicePrincipalsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsResponse.java
new file mode 100755
index 000000000..81c7f957f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsResponse.java
@@ -0,0 +1,65 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** TODO: Write description later when this method is implemented */
+@Generated
+public class ListServicePrincipalsResponse {
+ /**
+ * A token, which can be sent as page_token to retrieve the next page. If this field is omitted,
+ * there are no subsequent pages.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** */
+ @JsonProperty("service_principals")
+ private Collection servicePrincipals;
+
+ public ListServicePrincipalsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ public ListServicePrincipalsResponse setServicePrincipals(
+ Collection servicePrincipals) {
+ this.servicePrincipals = servicePrincipals;
+ return this;
+ }
+
+ public Collection getServicePrincipals() {
+ return servicePrincipals;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListServicePrincipalsResponse that = (ListServicePrincipalsResponse) o;
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(servicePrincipals, that.servicePrincipals);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nextPageToken, servicePrincipals);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListServicePrincipalsResponse.class)
+ .add("nextPageToken", nextPageToken)
+ .add("servicePrincipals", servicePrincipals)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersProxyRequest.java
new file mode 100755
index 000000000..4f0ba814a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersProxyRequest.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListUsersProxyRequest {
+ /** The maximum number of users to return. The service may return fewer than this value. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous ListUsers call. Provide this to retrieve the subsequent
+ * page.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListUsersProxyRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListUsersProxyRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListUsersProxyRequest that = (ListUsersProxyRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListUsersProxyRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersRequest.java
new file mode 100755
index 000000000..05ab3394f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersRequest.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListUsersRequest {
+ /** The maximum number of users to return. The service may return fewer than this value. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous ListUsers call. Provide this to retrieve the subsequent
+ * page.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListUsersRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListUsersRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListUsersRequest that = (ListUsersRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListUsersRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersResponse.java
new file mode 100755
index 000000000..6cd3c5e21
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersResponse.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** TODO: Write description later when this method is implemented */
+@Generated
+public class ListUsersResponse {
+ /**
+ * A token, which can be sent as page_token to retrieve the next page. If this field is omitted,
+ * there are no subsequent pages.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** */
+ @JsonProperty("users")
+ private Collection users;
+
+ public ListUsersResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ public ListUsersResponse setUsers(Collection users) {
+ this.users = users;
+ return this;
+ }
+
+ public Collection getUsers() {
+ return users;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListUsersResponse that = (ListUsersResponse) o;
+ return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(users, that.users);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nextPageToken, users);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListUsersResponse.class)
+ .add("nextPageToken", nextPageToken)
+ .add("users", users)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsLocalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsLocalRequest.java
new file mode 100755
index 000000000..8a32d473c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsLocalRequest.java
@@ -0,0 +1,67 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListWorkspaceAccessDetailsLocalRequest {
+ /**
+ * The maximum number of workspace access details to return. The service may return fewer than
+ * this value.
+ */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous ListWorkspaceAccessDetails call. Provide this to
+ * retrieve the subsequent page.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListWorkspaceAccessDetailsLocalRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListWorkspaceAccessDetailsLocalRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListWorkspaceAccessDetailsLocalRequest that = (ListWorkspaceAccessDetailsLocalRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListWorkspaceAccessDetailsLocalRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsRequest.java
new file mode 100755
index 000000000..108638b0e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsRequest.java
@@ -0,0 +1,82 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListWorkspaceAccessDetailsRequest {
+ /**
+ * The maximum number of workspace access details to return. The service may return fewer than
+ * this value.
+ */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous ListWorkspaceAccessDetails call. Provide this to
+ * retrieve the subsequent page.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ /** The workspace ID for which the workspace access details are being fetched. */
+ @JsonIgnore private Long workspaceId;
+
+ public ListWorkspaceAccessDetailsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListWorkspaceAccessDetailsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public ListWorkspaceAccessDetailsRequest setWorkspaceId(Long workspaceId) {
+ this.workspaceId = workspaceId;
+ return this;
+ }
+
+ public Long getWorkspaceId() {
+ return workspaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListWorkspaceAccessDetailsRequest that = (ListWorkspaceAccessDetailsRequest) o;
+ return Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(workspaceId, that.workspaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken, workspaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListWorkspaceAccessDetailsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .add("workspaceId", workspaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsResponse.java
new file mode 100755
index 000000000..8853de896
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsResponse.java
@@ -0,0 +1,65 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** TODO: Write description later when this method is implemented */
+@Generated
+public class ListWorkspaceAccessDetailsResponse {
+ /**
+ * A token, which can be sent as page_token to retrieve the next page. If this field is omitted,
+ * there are no subsequent pages.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** */
+ @JsonProperty("workspace_access_details")
+ private Collection workspaceAccessDetails;
+
+ public ListWorkspaceAccessDetailsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ public ListWorkspaceAccessDetailsResponse setWorkspaceAccessDetails(
+ Collection workspaceAccessDetails) {
+ this.workspaceAccessDetails = workspaceAccessDetails;
+ return this;
+ }
+
+ public Collection getWorkspaceAccessDetails() {
+ return workspaceAccessDetails;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListWorkspaceAccessDetailsResponse that = (ListWorkspaceAccessDetailsResponse) o;
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(workspaceAccessDetails, that.workspaceAccessDetails);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nextPageToken, workspaceAccessDetails);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListWorkspaceAccessDetailsResponse.class)
+ .add("nextPageToken", nextPageToken)
+ .add("workspaceAccessDetails", workspaceAccessDetails)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupProxyRequest.java
new file mode 100755
index 000000000..e580f65ae
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupProxyRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateGroupProxyRequest {
+ /** Required. Group to be updated in */
+ @JsonProperty("group")
+ private Group group;
+
+ /** Required. Internal ID of the group in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ /** Optional. The list of fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateGroupProxyRequest setGroup(Group group) {
+ this.group = group;
+ return this;
+ }
+
+ public Group getGroup() {
+ return group;
+ }
+
+ public UpdateGroupProxyRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ public UpdateGroupProxyRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateGroupProxyRequest that = (UpdateGroupProxyRequest) o;
+ return Objects.equals(group, that.group)
+ && Objects.equals(internalId, that.internalId)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(group, internalId, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateGroupProxyRequest.class)
+ .add("group", group)
+ .add("internalId", internalId)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupRequest.java
new file mode 100755
index 000000000..b8b0c5850
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateGroupRequest {
+ /** Required. Group to be updated in */
+ @JsonProperty("group")
+ private Group group;
+
+ /** Required. Internal ID of the group in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ /** Optional. The list of fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateGroupRequest setGroup(Group group) {
+ this.group = group;
+ return this;
+ }
+
+ public Group getGroup() {
+ return group;
+ }
+
+ public UpdateGroupRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ public UpdateGroupRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateGroupRequest that = (UpdateGroupRequest) o;
+ return Objects.equals(group, that.group)
+ && Objects.equals(internalId, that.internalId)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(group, internalId, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateGroupRequest.class)
+ .add("group", group)
+ .add("internalId", internalId)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalProxyRequest.java
new file mode 100755
index 000000000..a09930fb3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalProxyRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateServicePrincipalProxyRequest {
+ /** Required. Internal ID of the service principal in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ /** Required. Service principal to be updated in */
+ @JsonProperty("service_principal")
+ private ServicePrincipal servicePrincipal;
+
+ /** Optional. The list of fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateServicePrincipalProxyRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ public UpdateServicePrincipalProxyRequest setServicePrincipal(ServicePrincipal servicePrincipal) {
+ this.servicePrincipal = servicePrincipal;
+ return this;
+ }
+
+ public ServicePrincipal getServicePrincipal() {
+ return servicePrincipal;
+ }
+
+ public UpdateServicePrincipalProxyRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateServicePrincipalProxyRequest that = (UpdateServicePrincipalProxyRequest) o;
+ return Objects.equals(internalId, that.internalId)
+ && Objects.equals(servicePrincipal, that.servicePrincipal)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId, servicePrincipal, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateServicePrincipalProxyRequest.class)
+ .add("internalId", internalId)
+ .add("servicePrincipal", servicePrincipal)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalRequest.java
new file mode 100755
index 000000000..362ac7daa
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateServicePrincipalRequest {
+ /** Required. Internal ID of the service principal in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ /** Required. Service Principal to be updated in */
+ @JsonProperty("service_principal")
+ private ServicePrincipal servicePrincipal;
+
+ /** Optional. The list of fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateServicePrincipalRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ public UpdateServicePrincipalRequest setServicePrincipal(ServicePrincipal servicePrincipal) {
+ this.servicePrincipal = servicePrincipal;
+ return this;
+ }
+
+ public ServicePrincipal getServicePrincipal() {
+ return servicePrincipal;
+ }
+
+ public UpdateServicePrincipalRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateServicePrincipalRequest that = (UpdateServicePrincipalRequest) o;
+ return Objects.equals(internalId, that.internalId)
+ && Objects.equals(servicePrincipal, that.servicePrincipal)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId, servicePrincipal, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateServicePrincipalRequest.class)
+ .add("internalId", internalId)
+ .add("servicePrincipal", servicePrincipal)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserProxyRequest.java
new file mode 100755
index 000000000..5526d8149
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserProxyRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateUserProxyRequest {
+ /** Required. Internal ID of the user in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ /** Optional. The list of fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ /** Required. User to be updated in */
+ @JsonProperty("user")
+ private User user;
+
+ public UpdateUserProxyRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ public UpdateUserProxyRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ public UpdateUserProxyRequest setUser(User user) {
+ this.user = user;
+ return this;
+ }
+
+ public User getUser() {
+ return user;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateUserProxyRequest that = (UpdateUserProxyRequest) o;
+ return Objects.equals(internalId, that.internalId)
+ && Objects.equals(updateMask, that.updateMask)
+ && Objects.equals(user, that.user);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId, updateMask, user);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateUserProxyRequest.class)
+ .add("internalId", internalId)
+ .add("updateMask", updateMask)
+ .add("user", user)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserRequest.java
new file mode 100755
index 000000000..4a75d8d0e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateUserRequest {
+ /** Required. Internal ID of the user in Databricks. */
+ @JsonIgnore private Long internalId;
+
+ /** Optional. The list of fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ /** Required. User to be updated in */
+ @JsonProperty("user")
+ private User user;
+
+ public UpdateUserRequest setInternalId(Long internalId) {
+ this.internalId = internalId;
+ return this;
+ }
+
+ public Long getInternalId() {
+ return internalId;
+ }
+
+ public UpdateUserRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ public UpdateUserRequest setUser(User user) {
+ this.user = user;
+ return this;
+ }
+
+ public User getUser() {
+ return user;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateUserRequest that = (UpdateUserRequest) o;
+ return Objects.equals(internalId, that.internalId)
+ && Objects.equals(updateMask, that.updateMask)
+ && Objects.equals(user, that.user);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internalId, updateMask, user);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateUserRequest.class)
+ .add("internalId", internalId)
+ .add("updateMask", updateMask)
+ .add("user", user)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailLocalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailLocalRequest.java
new file mode 100755
index 000000000..3d88bd1f1
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailLocalRequest.java
@@ -0,0 +1,77 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateWorkspaceAccessDetailLocalRequest {
+ /** Required. ID of the principal in Databricks. */
+ @JsonIgnore private Long principalId;
+
+ /** Optional. The list of fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ /** Required. WorkspaceAccessDetail to be updated in */
+ @JsonProperty("workspace_access_detail")
+ private WorkspaceAccessDetail workspaceAccessDetail;
+
+ public UpdateWorkspaceAccessDetailLocalRequest setPrincipalId(Long principalId) {
+ this.principalId = principalId;
+ return this;
+ }
+
+ public Long getPrincipalId() {
+ return principalId;
+ }
+
+ public UpdateWorkspaceAccessDetailLocalRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ public UpdateWorkspaceAccessDetailLocalRequest setWorkspaceAccessDetail(
+ WorkspaceAccessDetail workspaceAccessDetail) {
+ this.workspaceAccessDetail = workspaceAccessDetail;
+ return this;
+ }
+
+ public WorkspaceAccessDetail getWorkspaceAccessDetail() {
+ return workspaceAccessDetail;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateWorkspaceAccessDetailLocalRequest that = (UpdateWorkspaceAccessDetailLocalRequest) o;
+ return Objects.equals(principalId, that.principalId)
+ && Objects.equals(updateMask, that.updateMask)
+ && Objects.equals(workspaceAccessDetail, that.workspaceAccessDetail);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(principalId, updateMask, workspaceAccessDetail);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateWorkspaceAccessDetailLocalRequest.class)
+ .add("principalId", principalId)
+ .add("updateMask", updateMask)
+ .add("workspaceAccessDetail", workspaceAccessDetail)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailRequest.java
new file mode 100755
index 000000000..d8f77804b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailRequest.java
@@ -0,0 +1,91 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iamv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateWorkspaceAccessDetailRequest {
+ /** Required. ID of the principal in Databricks. */
+ @JsonIgnore private Long principalId;
+
+ /** Optional. The list of fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ /** Required. Workspace access detail to be updated in */
+ @JsonProperty("workspace_access_detail")
+ private WorkspaceAccessDetail workspaceAccessDetail;
+
+ /** Required. The workspace ID for which the workspace access detail is being updated. */
+ @JsonIgnore private Long workspaceId;
+
+ public UpdateWorkspaceAccessDetailRequest setPrincipalId(Long principalId) {
+ this.principalId = principalId;
+ return this;
+ }
+
+ public Long getPrincipalId() {
+ return principalId;
+ }
+
+ public UpdateWorkspaceAccessDetailRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ public UpdateWorkspaceAccessDetailRequest setWorkspaceAccessDetail(
+ WorkspaceAccessDetail workspaceAccessDetail) {
+ this.workspaceAccessDetail = workspaceAccessDetail;
+ return this;
+ }
+
+ public WorkspaceAccessDetail getWorkspaceAccessDetail() {
+ return workspaceAccessDetail;
+ }
+
+ public UpdateWorkspaceAccessDetailRequest setWorkspaceId(Long workspaceId) {
+ this.workspaceId = workspaceId;
+ return this;
+ }
+
+ public Long getWorkspaceId() {
+ return workspaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateWorkspaceAccessDetailRequest that = (UpdateWorkspaceAccessDetailRequest) o;
+ return Objects.equals(principalId, that.principalId)
+ && Objects.equals(updateMask, that.updateMask)
+ && Objects.equals(workspaceAccessDetail, that.workspaceAccessDetail)
+ && Objects.equals(workspaceId, that.workspaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(principalId, updateMask, workspaceAccessDetail, workspaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateWorkspaceAccessDetailRequest.class)
+ .add("principalId", principalId)
+ .add("updateMask", updateMask)
+ .add("workspaceAccessDetail", workspaceAccessDetail)
+ .add("workspaceId", workspaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2API.java
index a9c53bcdf..d5c8f351e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2API.java
@@ -26,6 +26,92 @@ public WorkspaceIamV2API(WorkspaceIamV2Service mock) {
impl = mock;
}
+ /** TODO: Write description later when this method is implemented */
+ public Group createGroupProxy(CreateGroupProxyRequest request) {
+ return impl.createGroupProxy(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public ServicePrincipal createServicePrincipalProxy(CreateServicePrincipalProxyRequest request) {
+ return impl.createServicePrincipalProxy(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public User createUserProxy(CreateUserProxyRequest request) {
+ return impl.createUserProxy(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public WorkspaceAccessDetail createWorkspaceAccessDetailLocal(
+ CreateWorkspaceAccessDetailLocalRequest request) {
+ return impl.createWorkspaceAccessDetailLocal(request);
+ }
+
+ public void deleteGroupProxy(long internalId) {
+ deleteGroupProxy(new DeleteGroupProxyRequest().setInternalId(internalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public void deleteGroupProxy(DeleteGroupProxyRequest request) {
+ impl.deleteGroupProxy(request);
+ }
+
+ public void deleteServicePrincipalProxy(long internalId) {
+ deleteServicePrincipalProxy(new DeleteServicePrincipalProxyRequest().setInternalId(internalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public void deleteServicePrincipalProxy(DeleteServicePrincipalProxyRequest request) {
+ impl.deleteServicePrincipalProxy(request);
+ }
+
+ public void deleteUserProxy(long internalId) {
+ deleteUserProxy(new DeleteUserProxyRequest().setInternalId(internalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public void deleteUserProxy(DeleteUserProxyRequest request) {
+ impl.deleteUserProxy(request);
+ }
+
+ public void deleteWorkspaceAccessDetailLocal(long principalId) {
+ deleteWorkspaceAccessDetailLocal(
+ new DeleteWorkspaceAccessDetailLocalRequest().setPrincipalId(principalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public void deleteWorkspaceAccessDetailLocal(DeleteWorkspaceAccessDetailLocalRequest request) {
+ impl.deleteWorkspaceAccessDetailLocal(request);
+ }
+
+ public Group getGroupProxy(long internalId) {
+ return getGroupProxy(new GetGroupProxyRequest().setInternalId(internalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public Group getGroupProxy(GetGroupProxyRequest request) {
+ return impl.getGroupProxy(request);
+ }
+
+ public ServicePrincipal getServicePrincipalProxy(long internalId) {
+ return getServicePrincipalProxy(
+ new GetServicePrincipalProxyRequest().setInternalId(internalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public ServicePrincipal getServicePrincipalProxy(GetServicePrincipalProxyRequest request) {
+ return impl.getServicePrincipalProxy(request);
+ }
+
+ public User getUserProxy(long internalId) {
+ return getUserProxy(new GetUserProxyRequest().setInternalId(internalId));
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public User getUserProxy(GetUserProxyRequest request) {
+ return impl.getUserProxy(request);
+ }
+
public WorkspaceAccessDetail getWorkspaceAccessDetailLocal(long principalId) {
return getWorkspaceAccessDetailLocal(
new GetWorkspaceAccessDetailLocalRequest().setPrincipalId(principalId));
@@ -43,6 +129,28 @@ public WorkspaceAccessDetail getWorkspaceAccessDetailLocal(
return impl.getWorkspaceAccessDetailLocal(request);
}
+ /** TODO: Write description later when this method is implemented */
+ public ListGroupsResponse listGroupsProxy(ListGroupsProxyRequest request) {
+ return impl.listGroupsProxy(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public ListServicePrincipalsResponse listServicePrincipalsProxy(
+ ListServicePrincipalsProxyRequest request) {
+ return impl.listServicePrincipalsProxy(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public ListUsersResponse listUsersProxy(ListUsersProxyRequest request) {
+ return impl.listUsersProxy(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetailsLocal(
+ ListWorkspaceAccessDetailsLocalRequest request) {
+ return impl.listWorkspaceAccessDetailsLocal(request);
+ }
+
/**
* Resolves a group with the given external ID from the customer's IdP. If the group does not
* exist, it will be created in the account. If the customer is not onboarded onto Automatic
@@ -71,6 +179,27 @@ public ResolveUserResponse resolveUserProxy(ResolveUserProxyRequest request) {
return impl.resolveUserProxy(request);
}
+ /** TODO: Write description later when this method is implemented */
+ public Group updateGroupProxy(UpdateGroupProxyRequest request) {
+ return impl.updateGroupProxy(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public ServicePrincipal updateServicePrincipalProxy(UpdateServicePrincipalProxyRequest request) {
+ return impl.updateServicePrincipalProxy(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public User updateUserProxy(UpdateUserProxyRequest request) {
+ return impl.updateUserProxy(request);
+ }
+
+ /** TODO: Write description later when this method is implemented */
+ public WorkspaceAccessDetail updateWorkspaceAccessDetailLocal(
+ UpdateWorkspaceAccessDetailLocalRequest request) {
+ return impl.updateWorkspaceAccessDetailLocal(request);
+ }
+
public WorkspaceIamV2Service impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Impl.java
index 91f88bc33..1903e2849 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Impl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Impl.java
@@ -16,6 +16,156 @@ public WorkspaceIamV2Impl(ApiClient apiClient) {
this.apiClient = apiClient;
}
+ @Override
+ public Group createGroupProxy(CreateGroupProxyRequest request) {
+ String path = "/api/2.0/identity/groups";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getGroup()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, Group.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ServicePrincipal createServicePrincipalProxy(CreateServicePrincipalProxyRequest request) {
+ String path = "/api/2.0/identity/servicePrincipals";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getServicePrincipal()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, ServicePrincipal.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public User createUserProxy(CreateUserProxyRequest request) {
+ String path = "/api/2.0/identity/users";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getUser()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, User.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public WorkspaceAccessDetail createWorkspaceAccessDetailLocal(
+ CreateWorkspaceAccessDetailLocalRequest request) {
+ String path = "/api/2.0/identity/workspaceAccessDetails";
+ try {
+ Request req =
+ new Request("POST", path, apiClient.serialize(request.getWorkspaceAccessDetail()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, WorkspaceAccessDetail.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteGroupProxy(DeleteGroupProxyRequest request) {
+ String path = String.format("/api/2.0/identity/groups/%s", request.getInternalId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteServicePrincipalProxy(DeleteServicePrincipalProxyRequest request) {
+ String path = String.format("/api/2.0/identity/servicePrincipals/%s", request.getInternalId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteUserProxy(DeleteUserProxyRequest request) {
+ String path = String.format("/api/2.0/identity/users/%s", request.getInternalId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteWorkspaceAccessDetailLocal(DeleteWorkspaceAccessDetailLocalRequest request) {
+ String path =
+ String.format("/api/2.0/identity/workspaceAccessDetails/%s", request.getPrincipalId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Group getGroupProxy(GetGroupProxyRequest request) {
+ String path = String.format("/api/2.0/identity/groups/%s", request.getInternalId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, Group.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ServicePrincipal getServicePrincipalProxy(GetServicePrincipalProxyRequest request) {
+ String path = String.format("/api/2.0/identity/servicePrincipals/%s", request.getInternalId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ServicePrincipal.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public User getUserProxy(GetUserProxyRequest request) {
+ String path = String.format("/api/2.0/identity/users/%s", request.getInternalId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, User.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public WorkspaceAccessDetail getWorkspaceAccessDetailLocal(
GetWorkspaceAccessDetailLocalRequest request) {
@@ -31,6 +181,60 @@ public WorkspaceAccessDetail getWorkspaceAccessDetailLocal(
}
}
+ @Override
+ public ListGroupsResponse listGroupsProxy(ListGroupsProxyRequest request) {
+ String path = "/api/2.0/identity/groups";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListGroupsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListServicePrincipalsResponse listServicePrincipalsProxy(
+ ListServicePrincipalsProxyRequest request) {
+ String path = "/api/2.0/identity/servicePrincipals";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListServicePrincipalsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListUsersResponse listUsersProxy(ListUsersProxyRequest request) {
+ String path = "/api/2.0/identity/users";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListUsersResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetailsLocal(
+ ListWorkspaceAccessDetailsLocalRequest request) {
+ String path = "/api/2.0/identity/workspaceAccessDetails";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListWorkspaceAccessDetailsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public ResolveGroupResponse resolveGroupProxy(ResolveGroupProxyRequest request) {
String path = "/api/2.0/identity/groups/resolveByExternalId";
@@ -73,4 +277,63 @@ public ResolveUserResponse resolveUserProxy(ResolveUserProxyRequest request) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
+
+ @Override
+ public Group updateGroupProxy(UpdateGroupProxyRequest request) {
+ String path = String.format("/api/2.0/identity/groups/%s", request.getInternalId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getGroup()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, Group.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ServicePrincipal updateServicePrincipalProxy(UpdateServicePrincipalProxyRequest request) {
+ String path = String.format("/api/2.0/identity/servicePrincipals/%s", request.getInternalId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getServicePrincipal()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, ServicePrincipal.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public User updateUserProxy(UpdateUserProxyRequest request) {
+ String path = String.format("/api/2.0/identity/users/%s", request.getInternalId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getUser()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, User.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public WorkspaceAccessDetail updateWorkspaceAccessDetailLocal(
+ UpdateWorkspaceAccessDetailLocalRequest request) {
+ String path =
+ String.format("/api/2.0/identity/workspaceAccessDetails/%s", request.getPrincipalId());
+ try {
+ Request req =
+ new Request("PATCH", path, apiClient.serialize(request.getWorkspaceAccessDetail()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, WorkspaceAccessDetail.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Service.java
index f20471423..8614cc8d4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Service.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Service.java
@@ -13,6 +13,44 @@
*/
@Generated
public interface WorkspaceIamV2Service {
+ /** TODO: Write description later when this method is implemented */
+ Group createGroupProxy(CreateGroupProxyRequest createGroupProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ ServicePrincipal createServicePrincipalProxy(
+ CreateServicePrincipalProxyRequest createServicePrincipalProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ User createUserProxy(CreateUserProxyRequest createUserProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ WorkspaceAccessDetail createWorkspaceAccessDetailLocal(
+ CreateWorkspaceAccessDetailLocalRequest createWorkspaceAccessDetailLocalRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ void deleteGroupProxy(DeleteGroupProxyRequest deleteGroupProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ void deleteServicePrincipalProxy(
+ DeleteServicePrincipalProxyRequest deleteServicePrincipalProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ void deleteUserProxy(DeleteUserProxyRequest deleteUserProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ void deleteWorkspaceAccessDetailLocal(
+ DeleteWorkspaceAccessDetailLocalRequest deleteWorkspaceAccessDetailLocalRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ Group getGroupProxy(GetGroupProxyRequest getGroupProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ ServicePrincipal getServicePrincipalProxy(
+ GetServicePrincipalProxyRequest getServicePrincipalProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ User getUserProxy(GetUserProxyRequest getUserProxyRequest);
+
/**
* Returns the access details for a principal in the current workspace. Allows for checking access
* details for any provisioned principal (user, service principal, or group) in the current
@@ -23,6 +61,20 @@ public interface WorkspaceIamV2Service {
WorkspaceAccessDetail getWorkspaceAccessDetailLocal(
GetWorkspaceAccessDetailLocalRequest getWorkspaceAccessDetailLocalRequest);
+ /** TODO: Write description later when this method is implemented */
+ ListGroupsResponse listGroupsProxy(ListGroupsProxyRequest listGroupsProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ ListServicePrincipalsResponse listServicePrincipalsProxy(
+ ListServicePrincipalsProxyRequest listServicePrincipalsProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ ListUsersResponse listUsersProxy(ListUsersProxyRequest listUsersProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetailsLocal(
+ ListWorkspaceAccessDetailsLocalRequest listWorkspaceAccessDetailsLocalRequest);
+
/**
* Resolves a group with the given external ID from the customer's IdP. If the group does not
* exist, it will be created in the account. If the customer is not onboarded onto Automatic
@@ -44,4 +96,18 @@ ResolveServicePrincipalResponse resolveServicePrincipalProxy(
* this will return an error.
*/
ResolveUserResponse resolveUserProxy(ResolveUserProxyRequest resolveUserProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ Group updateGroupProxy(UpdateGroupProxyRequest updateGroupProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ ServicePrincipal updateServicePrincipalProxy(
+ UpdateServicePrincipalProxyRequest updateServicePrincipalProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ User updateUserProxy(UpdateUserProxyRequest updateUserProxyRequest);
+
+ /** TODO: Write description later when this method is implemented */
+ WorkspaceAccessDetail updateWorkspaceAccessDetailLocal(
+ UpdateWorkspaceAccessDetailLocalRequest updateWorkspaceAccessDetailLocalRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java
index a78f27377..08b5cf207 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java
@@ -47,6 +47,14 @@ public class BaseJob {
@JsonProperty("job_id")
private Long jobId;
+ /**
+ * Path of the job object in workspace file tree, including file extension. If absent, the job
+ * doesn't have a workspace object. Example:
+ * /Workspace/user@example.com/my_project/my_job.job.json
+ */
+ @JsonProperty("path")
+ private String path;
+
/**
* Settings for this job and all of its runs. These settings can be updated using the `resetJob`
* method.
@@ -112,6 +120,15 @@ public Long getJobId() {
return jobId;
}
+ public BaseJob setPath(String path) {
+ this.path = path;
+ return this;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
public BaseJob setSettings(JobSettings settings) {
this.settings = settings;
return this;
@@ -141,6 +158,7 @@ public boolean equals(Object o) {
&& Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId)
&& Objects.equals(hasMore, that.hasMore)
&& Objects.equals(jobId, that.jobId)
+ && Objects.equals(path, that.path)
&& Objects.equals(settings, that.settings)
&& Objects.equals(triggerState, that.triggerState);
}
@@ -154,6 +172,7 @@ public int hashCode() {
effectiveUsagePolicyId,
hasMore,
jobId,
+ path,
settings,
triggerState);
}
@@ -167,6 +186,7 @@ public String toString() {
.add("effectiveUsagePolicyId", effectiveUsagePolicyId)
.add("hasMore", hasMore)
.add("jobId", jobId)
+ .add("path", path)
.add("settings", settings)
.add("triggerState", triggerState)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
index c38a239bd..adf3d7ab6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
@@ -127,6 +127,13 @@ public class CreateJob {
@JsonProperty("parameters")
private Collection parameters;
+ /**
+ * Path of the job parent folder in workspace file tree. If absent, the job doesn't have a
+ * workspace object.
+ */
+ @JsonProperty("parent_path")
+ private String parentPath;
+
/**
* The performance mode on a serverless job. This field determines the level of compute
* performance or cost-efficiency for the run.
@@ -343,6 +350,15 @@ public Collection getParameters() {
return parameters;
}
+ public CreateJob setParentPath(String parentPath) {
+ this.parentPath = parentPath;
+ return this;
+ }
+
+ public String getParentPath() {
+ return parentPath;
+ }
+
public CreateJob setPerformanceTarget(PerformanceTarget performanceTarget) {
this.performanceTarget = performanceTarget;
return this;
@@ -454,6 +470,7 @@ public boolean equals(Object o) {
&& Objects.equals(name, that.name)
&& Objects.equals(notificationSettings, that.notificationSettings)
&& Objects.equals(parameters, that.parameters)
+ && Objects.equals(parentPath, that.parentPath)
&& Objects.equals(performanceTarget, that.performanceTarget)
&& Objects.equals(queue, that.queue)
&& Objects.equals(runAs, that.runAs)
@@ -485,6 +502,7 @@ public int hashCode() {
name,
notificationSettings,
parameters,
+ parentPath,
performanceTarget,
queue,
runAs,
@@ -516,6 +534,7 @@ public String toString() {
.add("name", name)
.add("notificationSettings", notificationSettings)
.add("parameters", parameters)
+ .add("parentPath", parentPath)
.add("performanceTarget", performanceTarget)
.add("queue", queue)
.add("runAs", runAs)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java
index 3b2a3d50b..f270de7e0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java
@@ -52,6 +52,14 @@ public class Job {
@JsonProperty("next_page_token")
private String nextPageToken;
+ /**
+ * Path of the job object in workspace file tree, including file extension. If absent, the job
+ * doesn't have a workspace object. Example:
+ * /Workspace/user@example.com/my_project/my_job.job.json
+ */
+ @JsonProperty("path")
+ private String path;
+
/**
* The email of an active workspace user or the application ID of a service principal that the job
* runs as. This value can be changed by setting the `run_as` field when creating or updating a
@@ -138,6 +146,15 @@ public String getNextPageToken() {
return nextPageToken;
}
+ public Job setPath(String path) {
+ this.path = path;
+ return this;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
public Job setRunAsUserName(String runAsUserName) {
this.runAsUserName = runAsUserName;
return this;
@@ -177,6 +194,7 @@ public boolean equals(Object o) {
&& Objects.equals(hasMore, that.hasMore)
&& Objects.equals(jobId, that.jobId)
&& Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(path, that.path)
&& Objects.equals(runAsUserName, that.runAsUserName)
&& Objects.equals(settings, that.settings)
&& Objects.equals(triggerState, that.triggerState);
@@ -192,6 +210,7 @@ public int hashCode() {
hasMore,
jobId,
nextPageToken,
+ path,
runAsUserName,
settings,
triggerState);
@@ -207,6 +226,7 @@ public String toString() {
.add("hasMore", hasMore)
.add("jobId", jobId)
.add("nextPageToken", nextPageToken)
+ .add("path", path)
.add("runAsUserName", runAsUserName)
.add("settings", settings)
.add("triggerState", triggerState)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
index d0d2ad7ee..91d8b5de6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
@@ -123,6 +123,13 @@ public class JobSettings {
@JsonProperty("parameters")
private Collection parameters;
+ /**
+ * Path of the job parent folder in workspace file tree. If absent, the job doesn't have a
+ * workspace object.
+ */
+ @JsonProperty("parent_path")
+ private String parentPath;
+
/**
* The performance mode on a serverless job. This field determines the level of compute
* performance or cost-efficiency for the run.
@@ -330,6 +337,15 @@ public Collection getParameters() {
return parameters;
}
+ public JobSettings setParentPath(String parentPath) {
+ this.parentPath = parentPath;
+ return this;
+ }
+
+ public String getParentPath() {
+ return parentPath;
+ }
+
public JobSettings setPerformanceTarget(PerformanceTarget performanceTarget) {
this.performanceTarget = performanceTarget;
return this;
@@ -440,6 +456,7 @@ public boolean equals(Object o) {
&& Objects.equals(name, that.name)
&& Objects.equals(notificationSettings, that.notificationSettings)
&& Objects.equals(parameters, that.parameters)
+ && Objects.equals(parentPath, that.parentPath)
&& Objects.equals(performanceTarget, that.performanceTarget)
&& Objects.equals(queue, that.queue)
&& Objects.equals(runAs, that.runAs)
@@ -470,6 +487,7 @@ public int hashCode() {
name,
notificationSettings,
parameters,
+ parentPath,
performanceTarget,
queue,
runAs,
@@ -500,6 +518,7 @@ public String toString() {
.add("name", name)
.add("notificationSettings", notificationSettings)
.add("parameters", parameters)
+ .add("parentPath", parentPath)
.add("performanceTarget", performanceTarget)
.add("queue", queue)
.add("runAs", runAs)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java
new file mode 100755
index 000000000..36452e42e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java
@@ -0,0 +1,125 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ModelTriggerConfiguration {
+ /**
+ * Aliases of the model versions to monitor. Can only be used in conjunction with condition
+ * MODEL_ALIAS_SET.
+ */
+ @JsonProperty("aliases")
+ private Collection aliases;
+
+ /** The condition based on which to trigger a job run. */
+ @JsonProperty("condition")
+ private ModelTriggerConfigurationCondition condition;
+
+ /**
+ * If set, the trigger starts a run only after the specified amount of time has passed since the
+ * last time the trigger fired. The minimum allowed value is 60 seconds.
+ */
+ @JsonProperty("min_time_between_triggers_seconds")
+ private Long minTimeBetweenTriggersSeconds;
+
+ /**
+ * Name of the securable to monitor ("mycatalog.myschema.mymodel" in the case of model-level
+ * triggers, "mycatalog.myschema" in the case of schema-level triggers) or empty in the case of
+ * metastore-level triggers.
+ */
+ @JsonProperty("securable_name")
+ private String securableName;
+
+ /**
+ * If set, the trigger starts a run only after no model updates have occurred for the specified
+ * time and can be used to wait for a series of model updates before triggering a run. The minimum
+ * allowed value is 60 seconds.
+ */
+ @JsonProperty("wait_after_last_change_seconds")
+ private Long waitAfterLastChangeSeconds;
+
+ public ModelTriggerConfiguration setAliases(Collection aliases) {
+ this.aliases = aliases;
+ return this;
+ }
+
+ public Collection getAliases() {
+ return aliases;
+ }
+
+ public ModelTriggerConfiguration setCondition(ModelTriggerConfigurationCondition condition) {
+ this.condition = condition;
+ return this;
+ }
+
+ public ModelTriggerConfigurationCondition getCondition() {
+ return condition;
+ }
+
+ public ModelTriggerConfiguration setMinTimeBetweenTriggersSeconds(
+ Long minTimeBetweenTriggersSeconds) {
+ this.minTimeBetweenTriggersSeconds = minTimeBetweenTriggersSeconds;
+ return this;
+ }
+
+ public Long getMinTimeBetweenTriggersSeconds() {
+ return minTimeBetweenTriggersSeconds;
+ }
+
+ public ModelTriggerConfiguration setSecurableName(String securableName) {
+ this.securableName = securableName;
+ return this;
+ }
+
+ public String getSecurableName() {
+ return securableName;
+ }
+
+ public ModelTriggerConfiguration setWaitAfterLastChangeSeconds(Long waitAfterLastChangeSeconds) {
+ this.waitAfterLastChangeSeconds = waitAfterLastChangeSeconds;
+ return this;
+ }
+
+ public Long getWaitAfterLastChangeSeconds() {
+ return waitAfterLastChangeSeconds;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ModelTriggerConfiguration that = (ModelTriggerConfiguration) o;
+ return Objects.equals(aliases, that.aliases)
+ && Objects.equals(condition, that.condition)
+ && Objects.equals(minTimeBetweenTriggersSeconds, that.minTimeBetweenTriggersSeconds)
+ && Objects.equals(securableName, that.securableName)
+ && Objects.equals(waitAfterLastChangeSeconds, that.waitAfterLastChangeSeconds);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ aliases,
+ condition,
+ minTimeBetweenTriggersSeconds,
+ securableName,
+ waitAfterLastChangeSeconds);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ModelTriggerConfiguration.class)
+ .add("aliases", aliases)
+ .add("condition", condition)
+ .add("minTimeBetweenTriggersSeconds", minTimeBetweenTriggersSeconds)
+ .add("securableName", securableName)
+ .add("waitAfterLastChangeSeconds", waitAfterLastChangeSeconds)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java
new file mode 100755
index 000000000..263b649fa
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum ModelTriggerConfigurationCondition {
+ MODEL_ALIAS_SET,
+ MODEL_CREATED,
+ MODEL_VERSION_READY,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java
index c68229720..66e16b5c3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java
@@ -84,6 +84,13 @@ public class RunTask {
@JsonProperty("description")
private String description;
+ /**
+ * An optional flag to disable the task. If set to true, the task will not run even if it is part
+ * of a job.
+ */
+ @JsonProperty("disabled")
+ private Boolean disabled;
+
/**
* The actual performance target used by the serverless run during execution. This can differ from
* the client-set performance target on the request depending on whether the performance mode is
@@ -403,6 +410,15 @@ public String getDescription() {
return description;
}
+ public RunTask setDisabled(Boolean disabled) {
+ this.disabled = disabled;
+ return this;
+ }
+
+ public Boolean getDisabled() {
+ return disabled;
+ }
+
public RunTask setEffectivePerformanceTarget(PerformanceTarget effectivePerformanceTarget) {
this.effectivePerformanceTarget = effectivePerformanceTarget;
return this;
@@ -734,6 +750,7 @@ public boolean equals(Object o) {
&& Objects.equals(dbtTask, that.dbtTask)
&& Objects.equals(dependsOn, that.dependsOn)
&& Objects.equals(description, that.description)
+ && Objects.equals(disabled, that.disabled)
&& Objects.equals(effectivePerformanceTarget, that.effectivePerformanceTarget)
&& Objects.equals(emailNotifications, that.emailNotifications)
&& Objects.equals(endTime, that.endTime)
@@ -785,6 +802,7 @@ public int hashCode() {
dbtTask,
dependsOn,
description,
+ disabled,
effectivePerformanceTarget,
emailNotifications,
endTime,
@@ -836,6 +854,7 @@ public String toString() {
.add("dbtTask", dbtTask)
.add("dependsOn", dependsOn)
.add("description", description)
+ .add("disabled", disabled)
.add("effectivePerformanceTarget", effectivePerformanceTarget)
.add("emailNotifications", emailNotifications)
.add("endTime", endTime)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java
index 2ca6b1107..d5510335a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java
@@ -57,6 +57,13 @@ public class SubmitTask {
@JsonProperty("description")
private String description;
+ /**
+ * An optional flag to disable the task. If set to true, the task will not run even if it is part
+ * of a job.
+ */
+ @JsonProperty("disabled")
+ private Boolean disabled;
+
/**
* An optional set of email addresses notified when the task run begins or completes. The default
* behavior is to not send any emails.
@@ -260,6 +267,15 @@ public String getDescription() {
return description;
}
+ public SubmitTask setDisabled(Boolean disabled) {
+ this.disabled = disabled;
+ return this;
+ }
+
+ public Boolean getDisabled() {
+ return disabled;
+ }
+
public SubmitTask setEmailNotifications(JobEmailNotifications emailNotifications) {
this.emailNotifications = emailNotifications;
return this;
@@ -471,6 +487,7 @@ public boolean equals(Object o) {
&& Objects.equals(dbtTask, that.dbtTask)
&& Objects.equals(dependsOn, that.dependsOn)
&& Objects.equals(description, that.description)
+ && Objects.equals(disabled, that.disabled)
&& Objects.equals(emailNotifications, that.emailNotifications)
&& Objects.equals(environmentKey, that.environmentKey)
&& Objects.equals(existingClusterId, that.existingClusterId)
@@ -506,6 +523,7 @@ public int hashCode() {
dbtTask,
dependsOn,
description,
+ disabled,
emailNotifications,
environmentKey,
existingClusterId,
@@ -541,6 +559,7 @@ public String toString() {
.add("dbtTask", dbtTask)
.add("dependsOn", dependsOn)
.add("description", description)
+ .add("disabled", disabled)
.add("emailNotifications", emailNotifications)
.add("environmentKey", environmentKey)
.add("existingClusterId", existingClusterId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java
index db8989c2e..999c1128d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java
@@ -22,7 +22,7 @@ public class TableUpdateTriggerConfiguration {
private Long minTimeBetweenTriggersSeconds;
/**
- * A list of Delta tables to monitor for changes. The table name must be in the format
+ * A list of tables to monitor for changes. The table name must be in the format
* `catalog_name.schema_name.table_name`.
*/
@JsonProperty("table_names")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java
index 7ee1fe4b1..0c5217593 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java
@@ -13,6 +13,10 @@ public class TriggerSettings {
@JsonProperty("file_arrival")
private FileArrivalTriggerConfiguration fileArrival;
+ /** */
+ @JsonProperty("model")
+ private ModelTriggerConfiguration model;
+
/** Whether this trigger is paused or not. */
@JsonProperty("pause_status")
private PauseStatus pauseStatus;
@@ -38,6 +42,15 @@ public FileArrivalTriggerConfiguration getFileArrival() {
return fileArrival;
}
+ public TriggerSettings setModel(ModelTriggerConfiguration model) {
+ this.model = model;
+ return this;
+ }
+
+ public ModelTriggerConfiguration getModel() {
+ return model;
+ }
+
public TriggerSettings setPauseStatus(PauseStatus pauseStatus) {
this.pauseStatus = pauseStatus;
return this;
@@ -80,6 +93,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
TriggerSettings that = (TriggerSettings) o;
return Objects.equals(fileArrival, that.fileArrival)
+ && Objects.equals(model, that.model)
&& Objects.equals(pauseStatus, that.pauseStatus)
&& Objects.equals(periodic, that.periodic)
&& Objects.equals(table, that.table)
@@ -88,13 +102,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(fileArrival, pauseStatus, periodic, table, tableUpdate);
+ return Objects.hash(fileArrival, model, pauseStatus, periodic, table, tableUpdate);
}
@Override
public String toString() {
return new ToStringer(TriggerSettings.class)
.add("fileArrival", fileArrival)
+ .add("model", model)
.add("pauseStatus", pauseStatus)
.add("periodic", periodic)
.add("table", table)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java
index cc7583362..6e734b77e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java
@@ -9,6 +9,7 @@ public enum AssetType {
ASSET_TYPE_APP,
ASSET_TYPE_DATA_TABLE,
ASSET_TYPE_GIT_REPO,
+ ASSET_TYPE_MCP,
ASSET_TYPE_MEDIA,
ASSET_TYPE_MODEL,
ASSET_TYPE_NOTEBOOK,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java
index a3080894f..09bbcbe2c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java
@@ -164,6 +164,11 @@ public GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest request) {
return impl.getLoggedModel(request);
}
+ /** Batch endpoint for getting logged models from a list of model IDs */
+ public GetLoggedModelsRequestResponse getLoggedModels(GetLoggedModelsRequest request) {
+ return impl.getLoggedModels(request);
+ }
+
public GetExperimentPermissionLevelsResponse getPermissionLevels(String experimentId) {
return getPermissionLevels(
new GetExperimentPermissionLevelsRequest().setExperimentId(experimentId));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java
index 0d39a660b..d3b844e34 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java
@@ -208,6 +208,19 @@ public GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest request) {
}
}
+ @Override
+ public GetLoggedModelsRequestResponse getLoggedModels(GetLoggedModelsRequest request) {
+ String path = "/api/2.0/mlflow/logged-models:batchGet";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, GetLoggedModelsRequestResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public GetExperimentPermissionLevelsResponse getPermissionLevels(
GetExperimentPermissionLevelsRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java
index 486c97d62..a2eba9962 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java
@@ -89,6 +89,9 @@ FinalizeLoggedModelResponse finalizeLoggedModel(
/** Get a logged model. */
GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest getLoggedModelRequest);
+ /** Batch endpoint for getting logged models from a list of model IDs */
+ GetLoggedModelsRequestResponse getLoggedModels(GetLoggedModelsRequest getLoggedModelsRequest);
+
/** Gets the permission levels that a user can have on an object. */
GetExperimentPermissionLevelsResponse getPermissionLevels(
GetExperimentPermissionLevelsRequest getExperimentPermissionLevelsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequest.java
new file mode 100755
index 000000000..c71186bd0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequest.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class GetLoggedModelsRequest {
+ /** The IDs of the logged models to retrieve. Max threshold is 100. */
+ @JsonIgnore
+ @QueryParam("model_ids")
+ private Collection modelIds;
+
+ public GetLoggedModelsRequest setModelIds(Collection modelIds) {
+ this.modelIds = modelIds;
+ return this;
+ }
+
+ public Collection getModelIds() {
+ return modelIds;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetLoggedModelsRequest that = (GetLoggedModelsRequest) o;
+ return Objects.equals(modelIds, that.modelIds);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(modelIds);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetLoggedModelsRequest.class).add("modelIds", modelIds).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequestResponse.java
new file mode 100755
index 000000000..a5469473e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequestResponse.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class GetLoggedModelsRequestResponse {
+ /** The retrieved logged models. */
+ @JsonProperty("models")
+ private Collection models;
+
+ public GetLoggedModelsRequestResponse setModels(Collection models) {
+ this.models = models;
+ return this;
+ }
+
+ public Collection getModels() {
+ return models;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetLoggedModelsRequestResponse that = (GetLoggedModelsRequestResponse) o;
+ return Objects.equals(models, that.models);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(models);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetLoggedModelsRequestResponse.class).add("models", models).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java
index ac3b6fd36..74495054d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java
@@ -9,14 +9,32 @@
@Generated
public class TokenAccessPolicy {
+ /** absolute OAuth session TTL in minutes when single-use refresh tokens are enabled */
+ @JsonProperty("absolute_session_lifetime_in_minutes")
+ private Long absoluteSessionLifetimeInMinutes;
+
/** access token time to live in minutes */
@JsonProperty("access_token_ttl_in_minutes")
private Long accessTokenTtlInMinutes;
+ /** whether to enable single-use refresh tokens */
+ @JsonProperty("enable_single_use_refresh_tokens")
+ private Boolean enableSingleUseRefreshTokens;
+
/** refresh token time to live in minutes */
@JsonProperty("refresh_token_ttl_in_minutes")
private Long refreshTokenTtlInMinutes;
+ public TokenAccessPolicy setAbsoluteSessionLifetimeInMinutes(
+ Long absoluteSessionLifetimeInMinutes) {
+ this.absoluteSessionLifetimeInMinutes = absoluteSessionLifetimeInMinutes;
+ return this;
+ }
+
+ public Long getAbsoluteSessionLifetimeInMinutes() {
+ return absoluteSessionLifetimeInMinutes;
+ }
+
public TokenAccessPolicy setAccessTokenTtlInMinutes(Long accessTokenTtlInMinutes) {
this.accessTokenTtlInMinutes = accessTokenTtlInMinutes;
return this;
@@ -26,6 +44,15 @@ public Long getAccessTokenTtlInMinutes() {
return accessTokenTtlInMinutes;
}
+ public TokenAccessPolicy setEnableSingleUseRefreshTokens(Boolean enableSingleUseRefreshTokens) {
+ this.enableSingleUseRefreshTokens = enableSingleUseRefreshTokens;
+ return this;
+ }
+
+ public Boolean getEnableSingleUseRefreshTokens() {
+ return enableSingleUseRefreshTokens;
+ }
+
public TokenAccessPolicy setRefreshTokenTtlInMinutes(Long refreshTokenTtlInMinutes) {
this.refreshTokenTtlInMinutes = refreshTokenTtlInMinutes;
return this;
@@ -40,19 +67,27 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TokenAccessPolicy that = (TokenAccessPolicy) o;
- return Objects.equals(accessTokenTtlInMinutes, that.accessTokenTtlInMinutes)
+ return Objects.equals(absoluteSessionLifetimeInMinutes, that.absoluteSessionLifetimeInMinutes)
+ && Objects.equals(accessTokenTtlInMinutes, that.accessTokenTtlInMinutes)
+ && Objects.equals(enableSingleUseRefreshTokens, that.enableSingleUseRefreshTokens)
&& Objects.equals(refreshTokenTtlInMinutes, that.refreshTokenTtlInMinutes);
}
@Override
public int hashCode() {
- return Objects.hash(accessTokenTtlInMinutes, refreshTokenTtlInMinutes);
+ return Objects.hash(
+ absoluteSessionLifetimeInMinutes,
+ accessTokenTtlInMinutes,
+ enableSingleUseRefreshTokens,
+ refreshTokenTtlInMinutes);
}
@Override
public String toString() {
return new ToStringer(TokenAccessPolicy.class)
+ .add("absoluteSessionLifetimeInMinutes", absoluteSessionLifetimeInMinutes)
.add("accessTokenTtlInMinutes", accessTokenTtlInMinutes)
+ .add("enableSingleUseRefreshTokens", enableSingleUseRefreshTokens)
.add("refreshTokenTtlInMinutes", refreshTokenTtlInMinutes)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequest.java
new file mode 100755
index 000000000..00ce73ecb
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ApplyEnvironmentRequest {
+ /** */
+ @JsonIgnore private String pipelineId;
+
+ public ApplyEnvironmentRequest setPipelineId(String pipelineId) {
+ this.pipelineId = pipelineId;
+ return this;
+ }
+
+ public String getPipelineId() {
+ return pipelineId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ApplyEnvironmentRequest that = (ApplyEnvironmentRequest) o;
+ return Objects.equals(pipelineId, that.pipelineId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pipelineId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ApplyEnvironmentRequest.class).add("pipelineId", pipelineId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequestResponse.java
new file mode 100755
index 000000000..9dd9d8c06
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequestResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class ApplyEnvironmentRequestResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ApplyEnvironmentRequestResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java
new file mode 100755
index 000000000..729e23fae
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java
@@ -0,0 +1,48 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class ConnectionParameters {
+ /**
+ * Source catalog for initial connection. This is necessary for schema exploration in some
+ * database systems like Oracle, and optional but nice-to-have in some other database systems like
+ * Postgres. For Oracle databases, this maps to a service name.
+ */
+ @JsonProperty("source_catalog")
+ private String sourceCatalog;
+
+ public ConnectionParameters setSourceCatalog(String sourceCatalog) {
+ this.sourceCatalog = sourceCatalog;
+ return this;
+ }
+
+ public String getSourceCatalog() {
+ return sourceCatalog;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ConnectionParameters that = (ConnectionParameters) o;
+ return Objects.equals(sourceCatalog, that.sourceCatalog);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(sourceCatalog);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ConnectionParameters.class)
+ .add("sourceCatalog", sourceCatalog)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java
index 167282b32..9d6fdfd31 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java
@@ -151,6 +151,10 @@ public class CreatePipeline {
@JsonProperty("trigger")
private PipelineTrigger trigger;
+ /** Usage policy of this pipeline. */
+ @JsonProperty("usage_policy_id")
+ private String usagePolicyId;
+
public CreatePipeline setAllowDuplicateNames(Boolean allowDuplicateNames) {
this.allowDuplicateNames = allowDuplicateNames;
return this;
@@ -421,6 +425,15 @@ public PipelineTrigger getTrigger() {
return trigger;
}
+ public CreatePipeline setUsagePolicyId(String usagePolicyId) {
+ this.usagePolicyId = usagePolicyId;
+ return this;
+ }
+
+ public String getUsagePolicyId() {
+ return usagePolicyId;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -455,7 +468,8 @@ public boolean equals(Object o) {
&& Objects.equals(storage, that.storage)
&& Objects.equals(tags, that.tags)
&& Objects.equals(target, that.target)
- && Objects.equals(trigger, that.trigger);
+ && Objects.equals(trigger, that.trigger)
+ && Objects.equals(usagePolicyId, that.usagePolicyId);
}
@Override
@@ -490,7 +504,8 @@ public int hashCode() {
storage,
tags,
target,
- trigger);
+ trigger,
+ usagePolicyId);
}
@Override
@@ -526,6 +541,7 @@ public String toString() {
.add("tags", tags)
.add("target", target)
.add("trigger", trigger)
+ .add("usagePolicyId", usagePolicyId)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java
index 5b8e3c4f2..6cc967ec2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java
@@ -160,6 +160,10 @@ public class EditPipeline {
@JsonProperty("trigger")
private PipelineTrigger trigger;
+ /** Usage policy of this pipeline. */
+ @JsonProperty("usage_policy_id")
+ private String usagePolicyId;
+
public EditPipeline setAllowDuplicateNames(Boolean allowDuplicateNames) {
this.allowDuplicateNames = allowDuplicateNames;
return this;
@@ -439,6 +443,15 @@ public PipelineTrigger getTrigger() {
return trigger;
}
+ public EditPipeline setUsagePolicyId(String usagePolicyId) {
+ this.usagePolicyId = usagePolicyId;
+ return this;
+ }
+
+ public String getUsagePolicyId() {
+ return usagePolicyId;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -474,7 +487,8 @@ public boolean equals(Object o) {
&& Objects.equals(storage, that.storage)
&& Objects.equals(tags, that.tags)
&& Objects.equals(target, that.target)
- && Objects.equals(trigger, that.trigger);
+ && Objects.equals(trigger, that.trigger)
+ && Objects.equals(usagePolicyId, that.usagePolicyId);
}
@Override
@@ -510,7 +524,8 @@ public int hashCode() {
storage,
tags,
target,
- trigger);
+ trigger,
+ usagePolicyId);
}
@Override
@@ -547,6 +562,7 @@ public String toString() {
.add("tags", tags)
.add("target", target)
.add("trigger", trigger)
+ .add("usagePolicyId", usagePolicyId)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java
index ff158fa65..1c77c1b2c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java
@@ -26,6 +26,10 @@ public class GetPipelineResponse {
@JsonProperty("effective_budget_policy_id")
private String effectiveBudgetPolicyId;
+ /** Serverless usage policy ID of the pipeline. */
+ @JsonProperty("effective_usage_policy_id")
+ private String effectiveUsagePolicyId;
+
/** The health of a pipeline. */
@JsonProperty("health")
private GetPipelineResponseHealth health;
@@ -102,6 +106,15 @@ public String getEffectiveBudgetPolicyId() {
return effectiveBudgetPolicyId;
}
+ public GetPipelineResponse setEffectiveUsagePolicyId(String effectiveUsagePolicyId) {
+ this.effectiveUsagePolicyId = effectiveUsagePolicyId;
+ return this;
+ }
+
+ public String getEffectiveUsagePolicyId() {
+ return effectiveUsagePolicyId;
+ }
+
public GetPipelineResponse setHealth(GetPipelineResponseHealth health) {
this.health = health;
return this;
@@ -192,6 +205,7 @@ public boolean equals(Object o) {
&& Objects.equals(clusterId, that.clusterId)
&& Objects.equals(creatorUserName, that.creatorUserName)
&& Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId)
+ && Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId)
&& Objects.equals(health, that.health)
&& Objects.equals(lastModified, that.lastModified)
&& Objects.equals(latestUpdates, that.latestUpdates)
@@ -210,6 +224,7 @@ public int hashCode() {
clusterId,
creatorUserName,
effectiveBudgetPolicyId,
+ effectiveUsagePolicyId,
health,
lastModified,
latestUpdates,
@@ -228,6 +243,7 @@ public String toString() {
.add("clusterId", clusterId)
.add("creatorUserName", creatorUserName)
.add("effectiveBudgetPolicyId", effectiveBudgetPolicyId)
+ .add("effectiveUsagePolicyId", effectiveUsagePolicyId)
.add("health", health)
.add("lastModified", lastModified)
.add("latestUpdates", latestUpdates)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java
index 58142fafd..4361c4480 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java
@@ -23,6 +23,10 @@ public class IngestionGatewayPipelineDefinition {
@JsonProperty("connection_name")
private String connectionName;
+ /** Optional, Internal. Parameters required to establish an initial connection with the source. */
+ @JsonProperty("connection_parameters")
+ private ConnectionParameters connectionParameters;
+
/** Required, Immutable. The name of the catalog for the gateway pipeline's storage location. */
@JsonProperty("gateway_storage_catalog")
private String gatewayStorageCatalog;
@@ -57,6 +61,16 @@ public String getConnectionName() {
return connectionName;
}
+ public IngestionGatewayPipelineDefinition setConnectionParameters(
+ ConnectionParameters connectionParameters) {
+ this.connectionParameters = connectionParameters;
+ return this;
+ }
+
+ public ConnectionParameters getConnectionParameters() {
+ return connectionParameters;
+ }
+
public IngestionGatewayPipelineDefinition setGatewayStorageCatalog(String gatewayStorageCatalog) {
this.gatewayStorageCatalog = gatewayStorageCatalog;
return this;
@@ -91,6 +105,7 @@ public boolean equals(Object o) {
IngestionGatewayPipelineDefinition that = (IngestionGatewayPipelineDefinition) o;
return Objects.equals(connectionId, that.connectionId)
&& Objects.equals(connectionName, that.connectionName)
+ && Objects.equals(connectionParameters, that.connectionParameters)
&& Objects.equals(gatewayStorageCatalog, that.gatewayStorageCatalog)
&& Objects.equals(gatewayStorageName, that.gatewayStorageName)
&& Objects.equals(gatewayStorageSchema, that.gatewayStorageSchema);
@@ -101,6 +116,7 @@ public int hashCode() {
return Objects.hash(
connectionId,
connectionName,
+ connectionParameters,
gatewayStorageCatalog,
gatewayStorageName,
gatewayStorageSchema);
@@ -111,6 +127,7 @@ public String toString() {
return new ToStringer(IngestionGatewayPipelineDefinition.class)
.add("connectionId", connectionId)
.add("connectionName", connectionName)
+ .add("connectionParameters", connectionParameters)
.add("gatewayStorageCatalog", gatewayStorageCatalog)
.add("gatewayStorageName", gatewayStorageName)
.add("gatewayStorageSchema", gatewayStorageSchema)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java
index 6c311c809..1e96af349 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java
@@ -17,6 +17,14 @@ public class IngestionPipelineDefinition {
@JsonProperty("connection_name")
private String connectionName;
+ /**
+ * Immutable. If set to true, the pipeline will ingest tables from the UC foreign catalogs
+ * directly without the need to specify a UC connection or ingestion gateway. The `source_catalog`
+ * fields in objects of IngestionConfig are interpreted as the UC foreign catalogs to ingest from.
+ */
+ @JsonProperty("ingest_from_uc_foreign_catalog")
+ private Boolean ingestFromUcForeignCatalog;
+
/**
* Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate
* with the source database. This is used with connectors to databases like SQL Server.
@@ -65,6 +73,16 @@ public String getConnectionName() {
return connectionName;
}
+ public IngestionPipelineDefinition setIngestFromUcForeignCatalog(
+ Boolean ingestFromUcForeignCatalog) {
+ this.ingestFromUcForeignCatalog = ingestFromUcForeignCatalog;
+ return this;
+ }
+
+ public Boolean getIngestFromUcForeignCatalog() {
+ return ingestFromUcForeignCatalog;
+ }
+
public IngestionPipelineDefinition setIngestionGatewayId(String ingestionGatewayId) {
this.ingestionGatewayId = ingestionGatewayId;
return this;
@@ -126,6 +144,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
IngestionPipelineDefinition that = (IngestionPipelineDefinition) o;
return Objects.equals(connectionName, that.connectionName)
+ && Objects.equals(ingestFromUcForeignCatalog, that.ingestFromUcForeignCatalog)
&& Objects.equals(ingestionGatewayId, that.ingestionGatewayId)
&& Objects.equals(netsuiteJarPath, that.netsuiteJarPath)
&& Objects.equals(objects, that.objects)
@@ -138,6 +157,7 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
connectionName,
+ ingestFromUcForeignCatalog,
ingestionGatewayId,
netsuiteJarPath,
objects,
@@ -150,6 +170,7 @@ public int hashCode() {
public String toString() {
return new ToStringer(IngestionPipelineDefinition.class)
.add("connectionName", connectionName)
+ .add("ingestFromUcForeignCatalog", ingestFromUcForeignCatalog)
.add("ingestionGatewayId", ingestionGatewayId)
.add("netsuiteJarPath", netsuiteJarPath)
.add("objects", objects)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java
index 9e36878d7..88923522d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java
@@ -36,6 +36,10 @@ public class Origin {
@JsonProperty("flow_name")
private String flowName;
+ /** The UUID of the graph associated with this event, corresponding to a GRAPH_UPDATED event. */
+ @JsonProperty("graph_id")
+ private String graphId;
+
/** The optional host name where the event was triggered */
@JsonProperty("host")
private String host;
@@ -134,6 +138,15 @@ public String getFlowName() {
return flowName;
}
+ public Origin setGraphId(String graphId) {
+ this.graphId = graphId;
+ return this;
+ }
+
+ public String getGraphId() {
+ return graphId;
+ }
+
public Origin setHost(String host) {
this.host = host;
return this;
@@ -244,6 +257,7 @@ public boolean equals(Object o) {
&& Objects.equals(datasetName, that.datasetName)
&& Objects.equals(flowId, that.flowId)
&& Objects.equals(flowName, that.flowName)
+ && Objects.equals(graphId, that.graphId)
&& Objects.equals(host, that.host)
&& Objects.equals(maintenanceId, that.maintenanceId)
&& Objects.equals(materializationName, that.materializationName)
@@ -266,6 +280,7 @@ public int hashCode() {
datasetName,
flowId,
flowName,
+ graphId,
host,
maintenanceId,
materializationName,
@@ -288,6 +303,7 @@ public String toString() {
.add("datasetName", datasetName)
.add("flowId", flowId)
.add("flowName", flowName)
+ .add("graphId", graphId)
.add("host", host)
.add("maintenanceId", maintenanceId)
.add("materializationName", materializationName)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java
index ff14ee59b..78977dc16 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java
@@ -139,6 +139,10 @@ public class PipelineSpec {
@JsonProperty("trigger")
private PipelineTrigger trigger;
+ /** Usage policy of this pipeline. */
+ @JsonProperty("usage_policy_id")
+ private String usagePolicyId;
+
public PipelineSpec setBudgetPolicyId(String budgetPolicyId) {
this.budgetPolicyId = budgetPolicyId;
return this;
@@ -382,6 +386,15 @@ public PipelineTrigger getTrigger() {
return trigger;
}
+ public PipelineSpec setUsagePolicyId(String usagePolicyId) {
+ this.usagePolicyId = usagePolicyId;
+ return this;
+ }
+
+ public String getUsagePolicyId() {
+ return usagePolicyId;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -413,7 +426,8 @@ public boolean equals(Object o) {
&& Objects.equals(storage, that.storage)
&& Objects.equals(tags, that.tags)
&& Objects.equals(target, that.target)
- && Objects.equals(trigger, that.trigger);
+ && Objects.equals(trigger, that.trigger)
+ && Objects.equals(usagePolicyId, that.usagePolicyId);
}
@Override
@@ -445,7 +459,8 @@ public int hashCode() {
storage,
tags,
target,
- trigger);
+ trigger,
+ usagePolicyId);
}
@Override
@@ -478,6 +493,7 @@ public String toString() {
.add("tags", tags)
.add("target", target)
.add("trigger", trigger)
+ .add("usagePolicyId", usagePolicyId)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
index 0a270aad3..4504eb9d9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
@@ -89,6 +89,14 @@ public GetPipelineResponse waitGetPipelineIdle(
throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage));
}
+ /**
+ * * Applies the current pipeline environment onto the pipeline compute. The environment applied
+ * can be used by subsequent dev-mode updates.
+ */
+ public ApplyEnvironmentRequestResponse applyEnvironment(ApplyEnvironmentRequest request) {
+ return impl.applyEnvironment(request);
+ }
+
/**
* Creates a new data processing pipeline based on the requested configuration. If successful,
* this method returns the ID of the new pipeline.
@@ -191,6 +199,14 @@ public ListUpdatesResponse listUpdates(ListUpdatesRequest request) {
return impl.listUpdates(request);
}
+ /**
+ * * Restores a pipeline that was previously deleted, if within the restoration window. All tables
+ * deleted at pipeline deletion will be undropped as well.
+ */
+ public RestorePipelineRequestResponse restorePipeline(RestorePipelineRequest request) {
+ return impl.restorePipeline(request);
+ }
+
/**
* Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
* permissions if none are specified. Objects can inherit permissions from their root object.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java
index 0fdad690a..9d044602a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java
@@ -16,6 +16,19 @@ public PipelinesImpl(ApiClient apiClient) {
this.apiClient = apiClient;
}
+ @Override
+ public ApplyEnvironmentRequestResponse applyEnvironment(ApplyEnvironmentRequest request) {
+ String path = String.format("/api/2.0/pipelines/%s/environment/apply", request.getPipelineId());
+ try {
+ Request req = new Request("POST", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ApplyEnvironmentRequestResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public CreatePipelineResponse create(CreatePipeline request) {
String path = "/api/2.0/pipelines";
@@ -139,6 +152,19 @@ public ListUpdatesResponse listUpdates(ListUpdatesRequest request) {
}
}
+ @Override
+ public RestorePipelineRequestResponse restorePipeline(RestorePipelineRequest request) {
+ String path = String.format("/api/2.0/pipelines/%s/restore", request.getPipelineId());
+ try {
+ Request req = new Request("POST", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, RestorePipelineRequestResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public PipelinePermissions setPermissions(PipelinePermissionsRequest request) {
String path = String.format("/api/2.0/permissions/pipelines/%s", request.getPipelineId());
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
index d2d0a81c4..d0fa81a7a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
@@ -24,6 +24,12 @@
*/
@Generated
public interface PipelinesService {
+ /**
+ * * Applies the current pipeline environment onto the pipeline compute. The environment applied
+ * can be used by subsequent dev-mode updates.
+ */
+ ApplyEnvironmentRequestResponse applyEnvironment(ApplyEnvironmentRequest applyEnvironmentRequest);
+
/**
* Creates a new data processing pipeline based on the requested configuration. If successful,
* this method returns the ID of the new pipeline.
@@ -61,6 +67,12 @@ ListPipelineEventsResponse listPipelineEvents(
/** List updates for an active pipeline. */
ListUpdatesResponse listUpdates(ListUpdatesRequest listUpdatesRequest);
+ /**
+ * * Restores a pipeline that was previously deleted, if within the restoration window. All tables
+ * deleted at pipeline deletion will be undropped as well.
+ */
+ RestorePipelineRequestResponse restorePipeline(RestorePipelineRequest restorePipelineRequest);
+
/**
* Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
* permissions if none are specified. Objects can inherit permissions from their root object.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequest.java
new file mode 100755
index 000000000..c9b91f8ae
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class RestorePipelineRequest {
+ /** The ID of the pipeline to restore */
+ @JsonIgnore private String pipelineId;
+
+ public RestorePipelineRequest setPipelineId(String pipelineId) {
+ this.pipelineId = pipelineId;
+ return this;
+ }
+
+ public String getPipelineId() {
+ return pipelineId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RestorePipelineRequest that = (RestorePipelineRequest) o;
+ return Objects.equals(pipelineId, that.pipelineId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pipelineId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RestorePipelineRequest.class).add("pipelineId", pipelineId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequestResponse.java
new file mode 100755
index 000000000..293d32256
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequestResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class RestorePipelineRequestResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RestorePipelineRequestResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java
index 7626f06c3..75a00df90 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java
@@ -37,6 +37,13 @@ public class TableSpecificConfig {
private IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig
queryBasedConnectorConfig;
+ /**
+ * (Optional, Immutable) The row filter condition to be applied to the table. It must not contain
+ * the WHERE keyword, only the actual filter condition. It must be in DBSQL format.
+ */
+ @JsonProperty("row_filter")
+ private String rowFilter;
+
/**
* If true, formula fields defined in the table are included in the ingestion. This setting is
* only valid for the Salesforce connector
@@ -98,6 +105,15 @@ public TableSpecificConfig setQueryBasedConnectorConfig(
return queryBasedConnectorConfig;
}
+ public TableSpecificConfig setRowFilter(String rowFilter) {
+ this.rowFilter = rowFilter;
+ return this;
+ }
+
+ public String getRowFilter() {
+ return rowFilter;
+ }
+
public TableSpecificConfig setSalesforceIncludeFormulaFields(
Boolean salesforceIncludeFormulaFields) {
this.salesforceIncludeFormulaFields = salesforceIncludeFormulaFields;
@@ -145,6 +161,7 @@ public boolean equals(Object o) {
&& Objects.equals(includeColumns, that.includeColumns)
&& Objects.equals(primaryKeys, that.primaryKeys)
&& Objects.equals(queryBasedConnectorConfig, that.queryBasedConnectorConfig)
+ && Objects.equals(rowFilter, that.rowFilter)
&& Objects.equals(salesforceIncludeFormulaFields, that.salesforceIncludeFormulaFields)
&& Objects.equals(scdType, that.scdType)
&& Objects.equals(sequenceBy, that.sequenceBy)
@@ -158,6 +175,7 @@ public int hashCode() {
includeColumns,
primaryKeys,
queryBasedConnectorConfig,
+ rowFilter,
salesforceIncludeFormulaFields,
scdType,
sequenceBy,
@@ -171,6 +189,7 @@ public String toString() {
.add("includeColumns", includeColumns)
.add("primaryKeys", primaryKeys)
.add("queryBasedConnectorConfig", queryBasedConnectorConfig)
+ .add("rowFilter", rowFilter)
.add("salesforceIncludeFormulaFields", salesforceIncludeFormulaFields)
.add("scdType", scdType)
.add("sequenceBy", sequenceBy)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java
index 59982f4d8..823232d6f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java
@@ -41,6 +41,13 @@ public class UpdateInfo {
@JsonProperty("full_refresh_selection")
private Collection fullRefreshSelection;
+ /**
+ * Indicates whether the update is either part of a continuous job run, or running in legacy
+ * continuous pipeline mode.
+ */
+ @JsonProperty("mode")
+ private UpdateMode mode;
+
/** The ID of the pipeline. */
@JsonProperty("pipeline_id")
private String pipelineId;
@@ -122,6 +129,15 @@ public Collection getFullRefreshSelection() {
return fullRefreshSelection;
}
+ public UpdateInfo setMode(UpdateMode mode) {
+ this.mode = mode;
+ return this;
+ }
+
+ public UpdateMode getMode() {
+ return mode;
+ }
+
public UpdateInfo setPipelineId(String pipelineId) {
this.pipelineId = pipelineId;
return this;
@@ -178,6 +194,7 @@ public boolean equals(Object o) {
&& Objects.equals(creationTime, that.creationTime)
&& Objects.equals(fullRefresh, that.fullRefresh)
&& Objects.equals(fullRefreshSelection, that.fullRefreshSelection)
+ && Objects.equals(mode, that.mode)
&& Objects.equals(pipelineId, that.pipelineId)
&& Objects.equals(refreshSelection, that.refreshSelection)
&& Objects.equals(state, that.state)
@@ -194,6 +211,7 @@ public int hashCode() {
creationTime,
fullRefresh,
fullRefreshSelection,
+ mode,
pipelineId,
refreshSelection,
state,
@@ -210,6 +228,7 @@ public String toString() {
.add("creationTime", creationTime)
.add("fullRefresh", fullRefresh)
.add("fullRefreshSelection", fullRefreshSelection)
+ .add("mode", mode)
.add("pipelineId", pipelineId)
.add("refreshSelection", refreshSelection)
.add("state", state)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateMode.java
new file mode 100755
index 000000000..752421818
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateMode.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum UpdateMode {
+ CONTINUOUS,
+ DEFAULT,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java
index aeaa56801..9cd72cb1f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java
@@ -22,9 +22,9 @@ public class AwsKeyInfo {
private String keyRegion;
/**
- * This field applies only if the `use_cases` property includes `STORAGE`. If this is set to
- * `true` or omitted, the key is also used to encrypt cluster EBS volumes. If you do not want to
- * use this key for encrypting EBS volumes, set to `false`.
+ * This field applies only if the `use_cases` property includes `STORAGE`. If this is set to true
+ * or omitted, the key is also used to encrypt cluster EBS volumes. If you do not want to use this
+ * key for encrypting EBS volumes, set to false.
*/
@JsonProperty("reuse_key_for_cluster_volumes")
private Boolean reuseKeyForClusterVolumes;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureKeyInfo.java
new file mode 100755
index 000000000..69ababff2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureKeyInfo.java
@@ -0,0 +1,126 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.provisioning;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AzureKeyInfo {
+ /**
+ * The Disk Encryption Set id that is used to represent the key info used for Managed Disk BYOK
+ * use case
+ */
+ @JsonProperty("disk_encryption_set_id")
+ private String diskEncryptionSetId;
+
+ /**
+ * The structure to store key access credential This is set if the Managed Identity is being used
+ * to access the Azure Key Vault key.
+ */
+ @JsonProperty("key_access_configuration")
+ private KeyAccessConfiguration keyAccessConfiguration;
+
+ /** The name of the key in KeyVault. */
+ @JsonProperty("key_name")
+ private String keyName;
+
+ /** The base URI of the KeyVault. */
+ @JsonProperty("key_vault_uri")
+ private String keyVaultUri;
+
+ /** The tenant id where the KeyVault lives. */
+ @JsonProperty("tenant_id")
+ private String tenantId;
+
+ /** The current key version. */
+ @JsonProperty("version")
+ private String version;
+
+ public AzureKeyInfo setDiskEncryptionSetId(String diskEncryptionSetId) {
+ this.diskEncryptionSetId = diskEncryptionSetId;
+ return this;
+ }
+
+ public String getDiskEncryptionSetId() {
+ return diskEncryptionSetId;
+ }
+
+ public AzureKeyInfo setKeyAccessConfiguration(KeyAccessConfiguration keyAccessConfiguration) {
+ this.keyAccessConfiguration = keyAccessConfiguration;
+ return this;
+ }
+
+ public KeyAccessConfiguration getKeyAccessConfiguration() {
+ return keyAccessConfiguration;
+ }
+
+ public AzureKeyInfo setKeyName(String keyName) {
+ this.keyName = keyName;
+ return this;
+ }
+
+ public String getKeyName() {
+ return keyName;
+ }
+
+ public AzureKeyInfo setKeyVaultUri(String keyVaultUri) {
+ this.keyVaultUri = keyVaultUri;
+ return this;
+ }
+
+ public String getKeyVaultUri() {
+ return keyVaultUri;
+ }
+
+ public AzureKeyInfo setTenantId(String tenantId) {
+ this.tenantId = tenantId;
+ return this;
+ }
+
+ public String getTenantId() {
+ return tenantId;
+ }
+
+ public AzureKeyInfo setVersion(String version) {
+ this.version = version;
+ return this;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AzureKeyInfo that = (AzureKeyInfo) o;
+ return Objects.equals(diskEncryptionSetId, that.diskEncryptionSetId)
+ && Objects.equals(keyAccessConfiguration, that.keyAccessConfiguration)
+ && Objects.equals(keyName, that.keyName)
+ && Objects.equals(keyVaultUri, that.keyVaultUri)
+ && Objects.equals(tenantId, that.tenantId)
+ && Objects.equals(version, that.version);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ diskEncryptionSetId, keyAccessConfiguration, keyName, keyVaultUri, tenantId, version);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AzureKeyInfo.class)
+ .add("diskEncryptionSetId", diskEncryptionSetId)
+ .add("keyAccessConfiguration", keyAccessConfiguration)
+ .add("keyName", keyName)
+ .add("keyVaultUri", keyVaultUri)
+ .add("tenantId", tenantId)
+ .add("version", version)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java
index ca8290c0b..721620a58 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** The general workspace configurations that are specific to cloud providers. */
@Generated
public class CloudResourceContainer {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java
index ae5ac50f5..6478ac476 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java
@@ -13,17 +13,18 @@ public class CreateAwsKeyInfo {
@JsonProperty("key_alias")
private String keyAlias;
- /**
- * The AWS KMS key's Amazon Resource Name (ARN). Note that the key's AWS region is inferred from
- * the ARN.
- */
+ /** The AWS KMS key's Amazon Resource Name (ARN). */
@JsonProperty("key_arn")
private String keyArn;
+ /** The AWS KMS key region. */
+ @JsonProperty("key_region")
+ private String keyRegion;
+
/**
- * This field applies only if the `use_cases` property includes `STORAGE`. If this is set to
- * `true` or omitted, the key is also used to encrypt cluster EBS volumes. To not use this key
- * also for encrypting EBS volumes, set this to `false`.
+ * This field applies only if the `use_cases` property includes `STORAGE`. If this is set to true
+ * or omitted, the key is also used to encrypt cluster EBS volumes. If you do not want to use this
+ * key for encrypting EBS volumes, set to false.
*/
@JsonProperty("reuse_key_for_cluster_volumes")
private Boolean reuseKeyForClusterVolumes;
@@ -46,6 +47,15 @@ public String getKeyArn() {
return keyArn;
}
+ public CreateAwsKeyInfo setKeyRegion(String keyRegion) {
+ this.keyRegion = keyRegion;
+ return this;
+ }
+
+ public String getKeyRegion() {
+ return keyRegion;
+ }
+
public CreateAwsKeyInfo setReuseKeyForClusterVolumes(Boolean reuseKeyForClusterVolumes) {
this.reuseKeyForClusterVolumes = reuseKeyForClusterVolumes;
return this;
@@ -62,12 +72,13 @@ public boolean equals(Object o) {
CreateAwsKeyInfo that = (CreateAwsKeyInfo) o;
return Objects.equals(keyAlias, that.keyAlias)
&& Objects.equals(keyArn, that.keyArn)
+ && Objects.equals(keyRegion, that.keyRegion)
&& Objects.equals(reuseKeyForClusterVolumes, that.reuseKeyForClusterVolumes);
}
@Override
public int hashCode() {
- return Objects.hash(keyAlias, keyArn, reuseKeyForClusterVolumes);
+ return Objects.hash(keyAlias, keyArn, keyRegion, reuseKeyForClusterVolumes);
}
@Override
@@ -75,6 +86,7 @@ public String toString() {
return new ToStringer(CreateAwsKeyInfo.class)
.add("keyAlias", keyAlias)
.add("keyArn", keyArn)
+ .add("keyRegion", keyRegion)
.add("reuseKeyForClusterVolumes", reuseKeyForClusterVolumes)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRole.java
index 461005798..c31784e77 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRole.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRole.java
@@ -9,7 +9,7 @@
@Generated
public class CreateCredentialStsRole {
- /** The Amazon Resource Name (ARN) of the cross account role. */
+ /** The Amazon Resource Name (ARN) of the cross account IAM role. */
@JsonProperty("role_arn")
private String roleArn;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java
index 2d62f0470..f070c7a31 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java
@@ -9,7 +9,10 @@
@Generated
public class CreateGcpKeyInfo {
- /** The GCP KMS key's resource name */
+ /**
+ * Globally unique kms key resource id of the form
+ * projects/testProjectId/locations/us-east4/keyRings/gcpCmkKeyRing/cryptoKeys/cmk-eastus4
+ */
@JsonProperty("kms_key_id")
private String kmsKeyId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java
index 9dff47e10..65b9799c3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java
@@ -37,8 +37,8 @@ public class CreateNetworkRequest {
private NetworkVpcEndpoints vpcEndpoints;
/**
- * The ID of the VPC associated with this network. VPC IDs can be used in multiple network
- * configurations.
+ * The ID of the VPC associated with this network configuration. VPC IDs can be used in multiple
+ * networks.
*/
@JsonProperty("vpc_id")
private String vpcId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreatePrivateAccessSettingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreatePrivateAccessSettingsRequest.java
index aed606c1f..bf8d4bd70 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreatePrivateAccessSettingsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreatePrivateAccessSettingsRequest.java
@@ -11,23 +11,24 @@
@Generated
public class CreatePrivateAccessSettingsRequest {
/**
- * An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when
- * registering the VPC endpoint configuration in your Databricks account. This is not the ID of
- * the VPC endpoint in AWS.
- *
- * Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC
- * endpoints that in your account that can connect to your workspace over AWS PrivateLink.
- *
- *
If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`,
- * this control only works for PrivateLink connections. To control how your workspace is accessed
- * via public internet, see [IP access lists].
- *
- *
[IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
+ * An array of Databricks VPC endpoint IDs. This is the Databricks ID returned when registering
+ * the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC
+ * endpoint in AWS. Only used when private_access_level is set to ENDPOINT. This is an allow list
+ * of VPC endpoints registered in your Databricks account that can connect to your workspace over
+ * AWS PrivateLink. Note: If hybrid access to your workspace is enabled by setting
+ * public_access_enabled to true, this control only works for PrivateLink connections. To control
+ * how your workspace is accessed via public internet, see IP access lists.
*/
@JsonProperty("allowed_vpc_endpoint_ids")
private Collection allowedVpcEndpointIds;
- /** */
+ /**
+ * The private access level controls which VPC endpoints can connect to the UI or API of any
+ * workspace that attaches this private access settings object. `ACCOUNT` level access (the
+ * default) allows only VPC endpoints that are registered in your Databricks account connect to
+ * your workspace. `ENDPOINT` level access allows only specified VPC endpoints connect to your
+ * workspace. For details, see allowed_vpc_endpoint_ids.
+ */
@JsonProperty("private_access_level")
private PrivateAccessLevel privateAccessLevel;
@@ -37,14 +38,13 @@ public class CreatePrivateAccessSettingsRequest {
/**
* Determines if the workspace can be accessed over public internet. For fully private workspaces,
- * you can optionally specify `false`, but only if you implement both the front-end and the
- * back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is
- * enabled.
+ * you can optionally specify false, but only if you implement both the front-end and the back-end
+ * PrivateLink connections. Otherwise, specify true, which means that public access is enabled.
*/
@JsonProperty("public_access_enabled")
private Boolean publicAccessEnabled;
- /** The cloud region for workspaces associated with this private access settings object. */
+ /** The AWS region for workspaces attached to this private access settings object. */
@JsonProperty("region")
private String region;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java
index 17bbcebc8..5c1e21eeb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java
@@ -9,7 +9,17 @@
@Generated
public class CreateStorageConfigurationRequest {
- /** */
+ /**
+ * Optional IAM role that is used to access the workspace catalog which is created during
+ * workspace creation for UC by Default. If a storage configuration with this field populated is
+ * used to create a workspace, then a workspace catalog is created together with the workspace.
+ * The workspace catalog shares the root bucket with internal workspace storage (including DBFS
+ * root) but uses a dedicated bucket path prefix.
+ */
+ @JsonProperty("role_arn")
+ private String roleArn;
+
+ /** Root S3 bucket information. */
@JsonProperty("root_bucket_info")
private RootBucketInfo rootBucketInfo;
@@ -17,6 +27,15 @@ public class CreateStorageConfigurationRequest {
@JsonProperty("storage_configuration_name")
private String storageConfigurationName;
+ public CreateStorageConfigurationRequest setRoleArn(String roleArn) {
+ this.roleArn = roleArn;
+ return this;
+ }
+
+ public String getRoleArn() {
+ return roleArn;
+ }
+
public CreateStorageConfigurationRequest setRootBucketInfo(RootBucketInfo rootBucketInfo) {
this.rootBucketInfo = rootBucketInfo;
return this;
@@ -41,18 +60,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CreateStorageConfigurationRequest that = (CreateStorageConfigurationRequest) o;
- return Objects.equals(rootBucketInfo, that.rootBucketInfo)
+ return Objects.equals(roleArn, that.roleArn)
+ && Objects.equals(rootBucketInfo, that.rootBucketInfo)
&& Objects.equals(storageConfigurationName, that.storageConfigurationName);
}
@Override
public int hashCode() {
- return Objects.hash(rootBucketInfo, storageConfigurationName);
+ return Objects.hash(roleArn, rootBucketInfo, storageConfigurationName);
}
@Override
public String toString() {
return new ToStringer(CreateStorageConfigurationRequest.class)
+ .add("roleArn", roleArn)
.add("rootBucketInfo", rootBucketInfo)
.add("storageConfigurationName", storageConfigurationName)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java
index 00ed46856..4c13c8d3e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java
@@ -13,11 +13,11 @@ public class CreateVpcEndpointRequest {
@JsonProperty("aws_vpc_endpoint_id")
private String awsVpcEndpointId;
- /** */
+ /** The cloud info of this vpc endpoint. */
@JsonProperty("gcp_vpc_endpoint_info")
private GcpVpcEndpointInfo gcpVpcEndpointInfo;
- /** The AWS region in which this VPC endpoint object exists. */
+ /** The region in which this VPC endpoint object exists. */
@JsonProperty("region")
private String region;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java
index 31d107a91..b478d7c6f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java
@@ -10,14 +10,11 @@
@Generated
public class CreateWorkspaceRequest {
- /** The AWS region of the workspace's data plane. */
+ /** */
@JsonProperty("aws_region")
private String awsRegion;
- /**
- * The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field
- * to `gcp`.
- */
+ /** The cloud name. This field always has the value `gcp`. */
@JsonProperty("cloud")
private String cloud;
@@ -25,6 +22,16 @@ public class CreateWorkspaceRequest {
@JsonProperty("cloud_resource_container")
private CloudResourceContainer cloudResourceContainer;
+ /**
+ * If the compute mode is `SERVERLESS`, a serverless workspace is created that comes
+ * pre-configured with serverless compute and default storage, providing a fully-managed,
+ * enterprise-ready SaaS experience. This means you don't need to provide any resources managed by
+ * you, such as credentials, storage, or network. If the compute mode is `HYBRID` (which is the
+ * default option), a classic workspace is created that uses customer-managed resources.
+ */
+ @JsonProperty("compute_mode")
+ private CustomerFacingComputeMode computeMode;
+
/** ID of the workspace's credential configuration object. */
@JsonProperty("credentials_id")
private String credentialsId;
@@ -39,28 +46,21 @@ public class CreateWorkspaceRequest {
/**
* The deployment name defines part of the subdomain for the workspace. The workspace URL for the
- * web application and REST APIs is `.cloud.databricks.com`. For
- * example, if the deployment name is `abcsales`, your workspace URL will be
- * `https://abcsales.cloud.databricks.com`. Hyphens are allowed. This property supports only the
- * set of characters that are allowed in a subdomain.
- *
- * To set this value, you must have a deployment name prefix. Contact your Databricks account
- * team to add an account deployment name prefix to your account.
- *
- *
Workspace deployment names follow the account prefix and a hyphen. For example, if your
- * account's deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the
- * JSON response for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL
- * would be `acme-workspace-1.cloud.databricks.com`.
- *
- *
You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the
- * deployment name to only include the deployment prefix. For example, if your account's
- * deployment prefix is `acme` and the workspace deployment name is `EMPTY`, the `deployment_name`
- * becomes `acme` only and the workspace URL is `acme.cloud.databricks.com`.
- *
- *
This value must be unique across all non-deleted deployments across all AWS regions.
- *
- *
If a new workspace omits this property, the server generates a unique deployment name for
- * you with the pattern `dbc-xxxxxxxx-xxxx`.
+ * web application and REST APIs is .cloud.databricks.com. For example,
+ * if the deployment name is abcsales, your workspace URL will be
+ * https://abcsales.cloud.databricks.com. Hyphens are allowed. This property supports only the set
+ * of characters that are allowed in a subdomain. To set this value, you must have a deployment
+ * name prefix. Contact your Databricks account team to add an account deployment name prefix to
+ * your account. Workspace deployment names follow the account prefix and a hyphen. For example,
+ * if your account's deployment prefix is acme and the workspace deployment name is workspace-1,
+ * the JSON response for the deployment_name field becomes acme-workspace-1. The workspace URL
+ * would be acme-workspace-1.cloud.databricks.com. You can also set the deployment_name to the
+ * reserved keyword EMPTY if you want the deployment name to only include the deployment prefix.
+ * For example, if your account's deployment prefix is acme and the workspace deployment name is
+ * EMPTY, the deployment_name becomes acme only and the workspace URL is
+ * acme.cloud.databricks.com. This value must be unique across all non-deleted deployments across
+ * all AWS regions. If a new workspace omits this property, the server generates a unique
+ * deployment name for you with the pattern dbc-xxxxxxxx-xxxx.
*/
@JsonProperty("deployment_name")
private String deploymentName;
@@ -73,13 +73,9 @@ public class CreateWorkspaceRequest {
@JsonProperty("gke_config")
private GkeConfig gkeConfig;
- /** Whether no public IP is enabled for the workspace. */
- @JsonProperty("is_no_public_ip_enabled")
- private Boolean isNoPublicIpEnabled;
-
/**
- * The Google Cloud region of the workspace data plane in your Google account. For example,
- * `us-east4`.
+ * The Google Cloud region of the workspace data plane in your Google account (for example,
+ * `us-east4`).
*/
@JsonProperty("location")
private String location;
@@ -87,13 +83,16 @@ public class CreateWorkspaceRequest {
/**
* The ID of the workspace's managed services encryption key configuration object. This is used to
* help protect and control access to the workspace's notebooks, secrets, Databricks SQL queries,
- * and query history. The provided key configuration object property `use_cases` must contain
- * `MANAGED_SERVICES`.
+ * and query history. The provided key configuration object property use_cases must contain
+ * MANAGED_SERVICES.
*/
@JsonProperty("managed_services_customer_managed_key_id")
private String managedServicesCustomerManagedKeyId;
- /** */
+ /**
+ * The ID of the workspace's network configuration object. To use AWS PrivateLink, this field is
+ * required.
+ */
@JsonProperty("network_id")
private String networkId;
@@ -102,11 +101,10 @@ public class CreateWorkspaceRequest {
private PricingTier pricingTier;
/**
- * ID of the workspace's private access settings object. Only used for PrivateLink. This ID must
- * be specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace
+ * ID of the workspace's private access settings object. Only used for PrivateLink. You must
+ * specify this ID if you are using [AWS PrivateLink] for either front-end (user-to-workspace
* connection), back-end (data plane to control plane connection), or both connection types.
- *
- *