diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index f09edb728..82cc21f8f 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -c3a3e3055fe11cb9683f398a665c225a03563ff1 \ No newline at end of file +universe:/home/parth.bansal/vn0/universe \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 38a03117f..08fb422c6 100755 --- a/.gitattributes +++ b/.gitattributes @@ -65,6 +65,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissions databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceDatabase.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceDatabaseDatabasePermission.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpace.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpaceGenieSpacePermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJob.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJobJobPermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecret.java linguist-generated=true @@ -76,6 +78,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSql databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatusUpdateState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java linguist-generated=true @@ -84,6 +89,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsSettingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsSettingsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsSettingsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AsyncUpdateAppRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeSize.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java linguist-generated=true @@ -97,6 +104,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissi databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppUpdateRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetCustomTemplateRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsResponse.java linguist-generated=true @@ -139,8 +147,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudge databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetConfigurationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateUsagePolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteUsagePolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java linguist-generated=true @@ -152,12 +162,15 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetCo databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryConfigurationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetUsagePolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LimitConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfigStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java linguist-generated=true @@ -173,10 +186,15 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudge databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateUsagePolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java linguist-generated=true @@ -192,13 +210,23 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStor databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignmentResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredentialInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreAssignmentResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteStorageCredentialResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsGetMetastoreResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsListMetastoresResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignmentResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredentialResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java linguist-generated=true @@ -239,8 +267,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Connections databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfoState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccessRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccessRequestResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsMetastore.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java linguist-generated=true @@ -310,6 +342,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency. databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DestinationType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlagInheritedFromType.java linguist-generated=true @@ -322,6 +356,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAs databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignmentsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignmentsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignmentsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnvironmentSettings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageExternalMetadata.java linguist-generated=true @@ -581,6 +616,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTa databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccessRequestDestinationsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsMetastore.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java linguist-generated=true @@ -700,6 +737,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttribut databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAvailability.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributes.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAvailability.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/BaseEnvironmentType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelCommand.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClientsTypes.java linguist-generated=true @@ -750,6 +788,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStat databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateContext.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateDefaultBaseEnvironmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java linguist-generated=true @@ -761,7 +800,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEv databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetailsEventType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataSecurityMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCache.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCacheStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteCluster.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteDefaultBaseEnvironmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteGlobalInitScriptRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePool.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicy.java linguist-generated=true @@ -795,6 +838,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterP databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetClusterRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetDefaultBaseEnvironmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEvents.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsOrder.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponse.java linguist-generated=true @@ -860,6 +904,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListCluster databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortBy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByDirection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByField.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListGlobalInitScriptsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstancePools.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstanceProfilesResponse.java linguist-generated=true @@ -872,9 +918,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListSortOrd databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LocalFileInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MaterializedEnvironment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MavenLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java linguist-generated=true @@ -888,6 +936,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamil databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PythonPyPiLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RCranLibrary.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RefreshDefaultBaseEnvironmentsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveInstanceProfile.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java linguist-generated=true @@ -907,11 +956,16 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLi databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultBaseEnvironmentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultDefaultBaseEnvironmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java linguist-generated=true @@ -920,18 +974,25 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboar databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DashboardView.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversationSummary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieDeleteConversationMessageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieDeleteConversationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedbackRating.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java linguist-generated=true @@ -954,7 +1015,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSta databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSuggestedQuestionsAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java linguist-generated=true @@ -977,8 +1040,17 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageE databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachmentParameter.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SchedulePauseStatus.java linguist-generated=true @@ -986,19 +1058,30 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscrib databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscription.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseBranchRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseProjectRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CustomTag.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseBranch.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpoint.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointPoolerMode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointSettings.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRef.java linguist-generated=true @@ -1007,27 +1090,47 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseIn databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleIdentityType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleMembershipRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProject.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectCustomTag.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectDefaultEndpointSettings.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectSettings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseBranchRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRoleRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseProjectRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeltaTableSyncInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FailoverDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseBranchRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRoleRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseProjectRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java linguist-generated=true @@ -1036,6 +1139,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/Provisioni databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaims.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaimsPermissionSet.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedResource.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RestartDatabaseEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java linguist-generated=true @@ -1047,9 +1151,49 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTabl databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseBranchRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRoleRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseProjectRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AggregationGranularity.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionJobType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateRefreshRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedule.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedulePauseStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetric.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetricType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteRefreshRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetRefreshRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceLogConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceProblemType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Monitor.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationDestination.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationSettings.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Refresh.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshTrigger.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/SnapshotConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/TimeSeriesConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java linguist-generated=true @@ -1231,9 +1375,43 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermis databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Impl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Service.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailLocalRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailLocalRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetWorkspaceAccessDetailLocalRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetWorkspaceAccessDetailRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/Group.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsLocalRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/PrincipalType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ResolveGroupProxyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ResolveGroupRequest.java linguist-generated=true @@ -1246,6 +1424,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ResolveUserRe databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ResolveUserResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ServicePrincipal.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/State.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserProxyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailLocalRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/User.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UserName.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceAccessDetail.java linguist-generated=true @@ -1348,6 +1534,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsReques databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfo.java linguist-generated=true @@ -1706,6 +1894,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersion databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequestResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelResponse.java linguist-generated=true @@ -1903,6 +2093,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccoun databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequestResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigger.java linguist-generated=true @@ -1969,6 +2162,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PostgresC databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PostgresSlotConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequestResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java linguist-generated=true @@ -1986,10 +2181,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfoCause.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfoState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfoState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsCredentials.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureKeyInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureWorkspaceInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java linguist-generated=true @@ -2007,7 +2204,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Creden databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingComputeMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainer.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingStorageMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKey.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteEncryptionKeyRequest.java linguist-generated=true @@ -2021,7 +2220,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Encryp databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EndpointUseCase.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ErrorType.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpCommonNetworkConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfo.java linguist-generated=true @@ -2035,6 +2234,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetVpc databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfigConnectivityType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/KeyAccessConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/KeyUseCase.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java linguist-generated=true @@ -2064,11 +2264,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEnd databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WarningType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspaceNetwork.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspaceStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionJobType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java linguist-generated=true @@ -2188,6 +2390,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndp databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingModelWorkloadType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateInferenceEndpointNotifications.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateInferenceEndpointNotificationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/V1ResponseChoiceElement.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnable.java linguist-generated=true @@ -2359,6 +2563,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTo databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GcpEndpointSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GenericWebhookConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java linguist-generated=true @@ -2434,6 +2639,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressT databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java linguist-generated=true @@ -2897,15 +3103,25 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesServi databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Widget.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPosition.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/CreateTagAssignmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/CreateTagPolicyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagAssignmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagPolicyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/GetTagAssignmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/GetTagPolicyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagPoliciesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagPoliciesResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/UpdateTagAssignmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/UpdateTagPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/Value.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java linguist-generated=true @@ -2952,6 +3168,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateVectorIndexUsagePolicyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateVectorIndexUsagePolicyResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResult.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java linguist-generated=true @@ -2982,6 +3200,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRep databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecret.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportOutputs.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetAclRequest.java linguist-generated=true @@ -3050,3 +3269,34 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Workspace databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescription.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/ComplexQueryParam.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/CreateResourceRequest.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Impl.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Service.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/Resource.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/GetResourceRequest.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Impl.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Service.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RequiredFields.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/Resource.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/TestEnum.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CancelOperationRequest.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceRequest.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DatabricksServiceExceptionWithDetailsProto.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/ErrorCode.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetOperationRequest.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetTestResourceRequest.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingImpl.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingService.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/Operation.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/TestResource.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/TestResourceOperationMetadata.java linguist-generated=true diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java index 5461ba07e..7ddbd1230 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java @@ -16,6 +16,8 @@ import com.databricks.sdk.service.billing.LogDeliveryService; import com.databricks.sdk.service.billing.UsageDashboardsAPI; import com.databricks.sdk.service.billing.UsageDashboardsService; +import com.databricks.sdk.service.billing.UsagePolicyAPI; +import com.databricks.sdk.service.billing.UsagePolicyService; import com.databricks.sdk.service.catalog.AccountMetastoreAssignmentsAPI; import com.databricks.sdk.service.catalog.AccountMetastoreAssignmentsService; import com.databricks.sdk.service.catalog.AccountMetastoresAPI; @@ -115,6 +117,7 @@ public class AccountClient { private StorageAPI storageAPI; private AccountStorageCredentialsAPI storageCredentialsAPI; private UsageDashboardsAPI usageDashboardsAPI; + private UsagePolicyAPI usagePolicyAPI; private AccountUsersV2API usersV2API; private VpcEndpointsAPI vpcEndpointsAPI; private WorkspaceAssignmentAPI workspaceAssignmentAPI; @@ -160,6 +163,7 @@ public AccountClient(DatabricksConfig config) { storageAPI = new StorageAPI(apiClient); storageCredentialsAPI = new AccountStorageCredentialsAPI(apiClient); usageDashboardsAPI = new UsageDashboardsAPI(apiClient); + usagePolicyAPI = new UsagePolicyAPI(apiClient); usersV2API = new AccountUsersV2API(apiClient); vpcEndpointsAPI = new VpcEndpointsAPI(apiClient); workspaceAssignmentAPI = new WorkspaceAssignmentAPI(apiClient); @@ -591,6 +595,11 @@ public UsageDashboardsAPI usageDashboards() { return usageDashboardsAPI; } + /** A service serves REST API about Usage policies */ + public UsagePolicyAPI usagePolicy() { + return usagePolicyAPI; + } + /** * User identities recognized by Databricks and represented by email addresses. * @@ -1009,6 +1018,17 @@ public AccountClient withUsageDashboardsAPI(UsageDashboardsAPI usageDashboards) return this; } + /** Replace the default UsagePolicyService with a custom implementation. */ + public AccountClient withUsagePolicyImpl(UsagePolicyService usagePolicy) { + return this.withUsagePolicyAPI(new UsagePolicyAPI(usagePolicy)); + } + + /** Replace the default UsagePolicyAPI with a custom implementation. */ + public AccountClient withUsagePolicyAPI(UsagePolicyAPI usagePolicy) { + this.usagePolicyAPI = usagePolicy; + return this; + } + /** Replace the default AccountUsersV2Service with a custom implementation. */ public AccountClient withUsersV2Impl(AccountUsersV2Service accountUsersV2) { return this.withUsersV2API(new AccountUsersV2API(accountUsersV2)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index bac85f5a9..e8c91ce19 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -101,8 +101,14 @@ import com.databricks.sdk.service.dashboards.LakeviewEmbeddedAPI; import com.databricks.sdk.service.dashboards.LakeviewEmbeddedService; import com.databricks.sdk.service.dashboards.LakeviewService; +import com.databricks.sdk.service.dashboards.QueryExecutionAPI; +import com.databricks.sdk.service.dashboards.QueryExecutionService; import com.databricks.sdk.service.database.DatabaseAPI; +import com.databricks.sdk.service.database.DatabaseProjectAPI; +import com.databricks.sdk.service.database.DatabaseProjectService; import com.databricks.sdk.service.database.DatabaseService; +import com.databricks.sdk.service.dataquality.DataQualityAPI; +import com.databricks.sdk.service.dataquality.DataQualityService; import com.databricks.sdk.service.files.DbfsService; import com.databricks.sdk.service.files.FilesAPI; import com.databricks.sdk.service.files.FilesService; @@ -236,6 +242,8 @@ import com.databricks.sdk.service.sql.StatementExecutionService; import com.databricks.sdk.service.sql.WarehousesAPI; import com.databricks.sdk.service.sql.WarehousesService; +import com.databricks.sdk.service.tags.TagAssignmentsAPI; +import com.databricks.sdk.service.tags.TagAssignmentsService; import com.databricks.sdk.service.tags.TagPoliciesAPI; import com.databricks.sdk.service.tags.TagPoliciesService; import com.databricks.sdk.service.vectorsearch.VectorSearchEndpointsAPI; @@ -286,8 +294,10 @@ public class WorkspaceClient { private CurrentUserAPI currentUserAPI; private DashboardWidgetsAPI dashboardWidgetsAPI; private DashboardsAPI dashboardsAPI; + private DataQualityAPI dataQualityAPI; private DataSourcesAPI dataSourcesAPI; private DatabaseAPI databaseAPI; + private DatabaseProjectAPI databaseProjectAPI; private DbfsExt dbfsAPI; private DbsqlPermissionsAPI dbsqlPermissionsAPI; private EntityTagAssignmentsAPI entityTagAssignmentsAPI; @@ -337,6 +347,7 @@ public class WorkspaceClient { private QualityMonitorsAPI qualityMonitorsAPI; private QueriesAPI queriesAPI; private QueriesLegacyAPI queriesLegacyAPI; + private QueryExecutionAPI queryExecutionAPI; private QueryHistoryAPI queryHistoryAPI; private QueryVisualizationsAPI queryVisualizationsAPI; private QueryVisualizationsLegacyAPI queryVisualizationsLegacyAPI; @@ -361,6 +372,7 @@ public class WorkspaceClient { private SystemSchemasAPI systemSchemasAPI; private TableConstraintsAPI tableConstraintsAPI; private TablesAPI tablesAPI; + private TagAssignmentsAPI tagAssignmentsAPI; private TagPoliciesAPI tagPoliciesAPI; private TemporaryPathCredentialsAPI temporaryPathCredentialsAPI; private TemporaryTableCredentialsAPI temporaryTableCredentialsAPI; @@ -416,8 +428,10 @@ public WorkspaceClient(DatabricksConfig config) { currentUserAPI = new CurrentUserAPI(apiClient); dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient); dashboardsAPI = new DashboardsAPI(apiClient); + dataQualityAPI = new DataQualityAPI(apiClient); dataSourcesAPI = new DataSourcesAPI(apiClient); databaseAPI = new DatabaseAPI(apiClient); + databaseProjectAPI = new DatabaseProjectAPI(apiClient); dbfsAPI = new DbfsExt(apiClient); dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient); entityTagAssignmentsAPI = new EntityTagAssignmentsAPI(apiClient); @@ -467,6 +481,7 @@ public WorkspaceClient(DatabricksConfig config) { qualityMonitorsAPI = new QualityMonitorsAPI(apiClient); queriesAPI = new QueriesAPI(apiClient); queriesLegacyAPI = new QueriesLegacyAPI(apiClient); + queryExecutionAPI = new QueryExecutionAPI(apiClient); queryHistoryAPI = new QueryHistoryAPI(apiClient); queryVisualizationsAPI = new QueryVisualizationsAPI(apiClient); queryVisualizationsLegacyAPI = new QueryVisualizationsLegacyAPI(apiClient); @@ -492,6 +507,7 @@ public WorkspaceClient(DatabricksConfig config) { systemSchemasAPI = new SystemSchemasAPI(apiClient); tableConstraintsAPI = new TableConstraintsAPI(apiClient); tablesAPI = new TablesAPI(apiClient); + tagAssignmentsAPI = new TagAssignmentsAPI(apiClient); tagPoliciesAPI = new TagPoliciesAPI(apiClient); temporaryPathCredentialsAPI = new TemporaryPathCredentialsAPI(apiClient); temporaryTableCredentialsAPI = new TemporaryTableCredentialsAPI(apiClient); @@ -801,6 +817,11 @@ public DashboardsAPI dashboards() { return dashboardsAPI; } + /** Manage the data quality of Unity Catalog objects (currently support `schema` and `table`) */ + public DataQualityAPI dataQuality() { + return dataQualityAPI; + } + /** * This API is provided to assist you in making new query objects. When creating a query object, * you may optionally specify a `data_source_id` for the SQL warehouse against which it will run. @@ -824,6 +845,11 @@ public DatabaseAPI database() { return databaseAPI; } + /** Database Projects provide access to a database via REST API or direct SQL. */ + public DatabaseProjectAPI databaseProject() { + return databaseProjectAPI; + } + /** * DBFS API makes it simple to interact with various data sources without having to include a * users credentials every time to read a file. @@ -1436,6 +1462,11 @@ public QueriesLegacyAPI queriesLegacy() { return queriesLegacyAPI; } + /** Query execution APIs for AI / BI Dashboards */ + public QueryExecutionAPI queryExecution() { + return queryExecutionAPI; + } + /** * A service responsible for storing and retrieving the list of queries run against SQL endpoints * and serverless compute. @@ -1555,8 +1586,8 @@ public RedashConfigAPI redashConfig() { * version metadata (comments, aliases) create a new model version, or update permissions on the * registered model, users must be owners of the registered model. * - *

Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, - * grants) that specify a securable type, use "FUNCTION" as the securable type. + *

Note: The securable type for models is FUNCTION. When using REST APIs (e.g. tagging, grants) + * that specify a securable type, use FUNCTION as the securable type. */ public RegisteredModelsAPI registeredModels() { return registeredModelsAPI; @@ -1727,16 +1758,16 @@ public SharesAPI shares() { * has not yet finished. This can be set to either `CONTINUE`, to fallback to asynchronous mode, * or it can be set to `CANCEL`, which cancels the statement. * - *

In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call - * waits up to 30 seconds; if the statement execution finishes within this time, the result data - * is returned directly in the response. If the execution takes longer than 30 seconds, the - * execution is canceled and the call returns with a `CANCELED` state. - Asynchronous mode - - * `wait_timeout=0s` (`on_wait_timeout` is ignored) - The call doesn't wait for the statement to - * finish but returns directly with a statement ID. The status of the statement execution can be - * polled by issuing :method:statementexecution/getStatement with the statement ID. Once the + *

In summary: - **Synchronous mode** (`wait_timeout=30s` and `on_wait_timeout=CANCEL`): The + * call waits up to 30 seconds; if the statement execution finishes within this time, the result + * data is returned directly in the response. If the execution takes longer than 30 seconds, the + * execution is canceled and the call returns with a `CANCELED` state. - **Asynchronous mode** + * (`wait_timeout=0s` and `on_wait_timeout` is ignored): The call doesn't wait for the statement + * to finish but returns directly with a statement ID. The status of the statement execution can + * be polled by issuing :method:statementexecution/getStatement with the statement ID. Once the * execution has succeeded, this call also returns the result and metadata in the response. - - * Hybrid mode (default) - `wait_timeout=10s` and `on_wait_timeout=CONTINUE` - The call waits for - * up to 10 seconds; if the statement execution finishes within this time, the result data is + * **[Default] Hybrid mode** (`wait_timeout=10s` and `on_wait_timeout=CONTINUE`): The call waits + * for up to 10 seconds; if the statement execution finishes within this time, the result data is * returned directly in the response. If the execution takes longer than 10 seconds, a statement * ID is returned. The statement ID can be used to fetch status and results in the same way as in * the asynchronous mode. @@ -1853,6 +1884,11 @@ public TablesAPI tables() { return tablesAPI; } + /** Manage tag assignments on workspace-scoped objects. */ + public TagAssignmentsAPI tagAssignments() { + return tagAssignmentsAPI; + } + /** * The Tag Policy API allows you to manage policies for governed tags in Databricks. Permissions * for tag policies can be managed using the [Account Access Control Proxy API]. @@ -2409,6 +2445,17 @@ public WorkspaceClient withDashboardsAPI(DashboardsAPI dashboards) { return this; } + /** Replace the default DataQualityService with a custom implementation. */ + public WorkspaceClient withDataQualityImpl(DataQualityService dataQuality) { + return this.withDataQualityAPI(new DataQualityAPI(dataQuality)); + } + + /** Replace the default DataQualityAPI with a custom implementation. */ + public WorkspaceClient withDataQualityAPI(DataQualityAPI dataQuality) { + this.dataQualityAPI = dataQuality; + return this; + } + /** Replace the default DataSourcesService with a custom implementation. */ public WorkspaceClient withDataSourcesImpl(DataSourcesService dataSources) { return this.withDataSourcesAPI(new DataSourcesAPI(dataSources)); @@ -2431,6 +2478,17 @@ public WorkspaceClient withDatabaseAPI(DatabaseAPI database) { return this; } + /** Replace the default DatabaseProjectService with a custom implementation. */ + public WorkspaceClient withDatabaseProjectImpl(DatabaseProjectService databaseProject) { + return this.withDatabaseProjectAPI(new DatabaseProjectAPI(databaseProject)); + } + + /** Replace the default DatabaseProjectAPI with a custom implementation. */ + public WorkspaceClient withDatabaseProjectAPI(DatabaseProjectAPI databaseProject) { + this.databaseProjectAPI = databaseProject; + return this; + } + /** Replace the default DbfsService with a custom implementation. */ public WorkspaceClient withDbfsImpl(DbfsService dbfs) { return this.withDbfsAPI(new DbfsExt(dbfs)); @@ -2993,6 +3051,17 @@ public WorkspaceClient withQueriesLegacyAPI(QueriesLegacyAPI queriesLegacy) { return this; } + /** Replace the default QueryExecutionService with a custom implementation. */ + public WorkspaceClient withQueryExecutionImpl(QueryExecutionService queryExecution) { + return this.withQueryExecutionAPI(new QueryExecutionAPI(queryExecution)); + } + + /** Replace the default QueryExecutionAPI with a custom implementation. */ + public WorkspaceClient withQueryExecutionAPI(QueryExecutionAPI queryExecution) { + this.queryExecutionAPI = queryExecution; + return this; + } + /** Replace the default QueryHistoryService with a custom implementation. */ public WorkspaceClient withQueryHistoryImpl(QueryHistoryService queryHistory) { return this.withQueryHistoryAPI(new QueryHistoryAPI(queryHistory)); @@ -3272,6 +3341,17 @@ public WorkspaceClient withTablesAPI(TablesAPI tables) { return this; } + /** Replace the default TagAssignmentsService with a custom implementation. */ + public WorkspaceClient withTagAssignmentsImpl(TagAssignmentsService tagAssignments) { + return this.withTagAssignmentsAPI(new TagAssignmentsAPI(tagAssignments)); + } + + /** Replace the default TagAssignmentsAPI with a custom implementation. */ + public WorkspaceClient withTagAssignmentsAPI(TagAssignmentsAPI tagAssignments) { + this.tagAssignmentsAPI = tagAssignments; + return this; + } + /** Replace the default TagPoliciesService with a custom implementation. */ public WorkspaceClient withTagPoliciesImpl(TagPoliciesService tagPolicies) { return this.withTagPoliciesAPI(new TagPoliciesAPI(tagPolicies)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java index 97b6f3b19..d7ef3d38a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java @@ -25,6 +25,10 @@ public class App { @JsonProperty("budget_policy_id") private String budgetPolicyId; + /** */ + @JsonProperty("compute_size") + private ComputeSize computeSize; + /** */ @JsonProperty("compute_status") private ComputeStatus computeStatus; @@ -52,6 +56,10 @@ public class App { @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; + /** */ + @JsonProperty("effective_usage_policy_id") + private String effectiveUsagePolicyId; + /** The effective api scopes granted to the user access token. */ @JsonProperty("effective_user_api_scopes") private Collection effectiveUserApiScopes; @@ -110,6 +118,10 @@ public class App { @JsonProperty("url") private String url; + /** */ + @JsonProperty("usage_policy_id") + private String usagePolicyId; + /** */ @JsonProperty("user_api_scopes") private Collection userApiScopes; @@ -141,6 +153,15 @@ public String getBudgetPolicyId() { return budgetPolicyId; } + public App setComputeSize(ComputeSize computeSize) { + this.computeSize = computeSize; + return this; + } + + public ComputeSize getComputeSize() { + return computeSize; + } + public App setComputeStatus(ComputeStatus computeStatus) { this.computeStatus = computeStatus; return this; @@ -195,6 +216,15 @@ public String getEffectiveBudgetPolicyId() { return effectiveBudgetPolicyId; } + public App setEffectiveUsagePolicyId(String effectiveUsagePolicyId) { + this.effectiveUsagePolicyId = effectiveUsagePolicyId; + return this; + } + + public String getEffectiveUsagePolicyId() { + return effectiveUsagePolicyId; + } + public App setEffectiveUserApiScopes(Collection effectiveUserApiScopes) { this.effectiveUserApiScopes = effectiveUserApiScopes; return this; @@ -312,6 +342,15 @@ public String getUrl() { return url; } + public App setUsagePolicyId(String usagePolicyId) { + this.usagePolicyId = usagePolicyId; + return this; + } + + public String getUsagePolicyId() { + return usagePolicyId; + } + public App setUserApiScopes(Collection userApiScopes) { this.userApiScopes = userApiScopes; return this; @@ -329,12 +368,14 @@ public boolean equals(Object o) { return Objects.equals(activeDeployment, that.activeDeployment) && Objects.equals(appStatus, that.appStatus) && Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(computeSize, that.computeSize) && Objects.equals(computeStatus, that.computeStatus) && Objects.equals(createTime, that.createTime) && Objects.equals(creator, that.creator) && Objects.equals(defaultSourceCodePath, that.defaultSourceCodePath) && Objects.equals(description, that.description) && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId) && Objects.equals(effectiveUserApiScopes, that.effectiveUserApiScopes) && Objects.equals(id, that.id) && Objects.equals(name, that.name) @@ -348,6 +389,7 @@ public boolean equals(Object o) { && Objects.equals(updateTime, that.updateTime) && Objects.equals(updater, that.updater) && Objects.equals(url, that.url) + && Objects.equals(usagePolicyId, that.usagePolicyId) && Objects.equals(userApiScopes, that.userApiScopes); } @@ -357,12 +399,14 @@ public int hashCode() { activeDeployment, appStatus, budgetPolicyId, + computeSize, computeStatus, createTime, creator, defaultSourceCodePath, description, effectiveBudgetPolicyId, + effectiveUsagePolicyId, effectiveUserApiScopes, id, name, @@ -376,6 +420,7 @@ public int hashCode() { updateTime, updater, url, + usagePolicyId, userApiScopes); } @@ -385,12 +430,14 @@ public String toString() { .add("activeDeployment", activeDeployment) .add("appStatus", appStatus) .add("budgetPolicyId", budgetPolicyId) + .add("computeSize", computeSize) .add("computeStatus", computeStatus) .add("createTime", createTime) .add("creator", creator) .add("defaultSourceCodePath", defaultSourceCodePath) .add("description", description) .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("effectiveUsagePolicyId", effectiveUsagePolicyId) .add("effectiveUserApiScopes", effectiveUserApiScopes) .add("id", id) .add("name", name) @@ -404,6 +451,7 @@ public String toString() { .add("updateTime", updateTime) .add("updater", updater) .add("url", url) + .add("usagePolicyId", usagePolicyId) .add("userApiScopes", userApiScopes) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java index 1e8acf263..2761c1651 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java @@ -17,6 +17,10 @@ public class AppResource { @JsonProperty("description") private String description; + /** */ + @JsonProperty("genie_space") + private AppResourceGenieSpace genieSpace; + /** */ @JsonProperty("job") private AppResourceJob job; @@ -59,6 +63,15 @@ public String getDescription() { return description; } + public AppResource setGenieSpace(AppResourceGenieSpace genieSpace) { + this.genieSpace = genieSpace; + return this; + } + + public AppResourceGenieSpace getGenieSpace() { + return genieSpace; + } + public AppResource setJob(AppResourceJob job) { this.job = job; return this; @@ -120,6 +133,7 @@ public boolean equals(Object o) { AppResource that = (AppResource) o; return Objects.equals(database, that.database) && Objects.equals(description, that.description) + && Objects.equals(genieSpace, that.genieSpace) && Objects.equals(job, that.job) && Objects.equals(name, that.name) && Objects.equals(secret, that.secret) @@ -131,7 +145,15 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - database, description, job, name, secret, servingEndpoint, sqlWarehouse, ucSecurable); + database, + description, + genieSpace, + job, + name, + secret, + servingEndpoint, + sqlWarehouse, + ucSecurable); } @Override @@ -139,6 +161,7 @@ public String toString() { return new ToStringer(AppResource.class) .add("database", database) .add("description", description) + .add("genieSpace", genieSpace) .add("job", job) .add("name", name) .add("secret", secret) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpace.java new file mode 100755 index 000000000..00045cdbd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpace.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AppResourceGenieSpace { + /** */ + @JsonProperty("name") + private String name; + + /** */ + @JsonProperty("permission") + private AppResourceGenieSpaceGenieSpacePermission permission; + + /** */ + @JsonProperty("space_id") + private String spaceId; + + public AppResourceGenieSpace setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public AppResourceGenieSpace setPermission(AppResourceGenieSpaceGenieSpacePermission permission) { + this.permission = permission; + return this; + } + + public AppResourceGenieSpaceGenieSpacePermission getPermission() { + return permission; + } + + public AppResourceGenieSpace setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceGenieSpace that = (AppResourceGenieSpace) o; + return Objects.equals(name, that.name) + && Objects.equals(permission, that.permission) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(name, permission, spaceId); + } + + @Override + public String toString() { + return new ToStringer(AppResourceGenieSpace.class) + .add("name", name) + .add("permission", permission) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpaceGenieSpacePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpaceGenieSpacePermission.java new file mode 100755 index 000000000..c93785cc3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpaceGenieSpacePermission.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AppResourceGenieSpaceGenieSpacePermission { + CAN_EDIT, + CAN_MANAGE, + CAN_RUN, + CAN_VIEW, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java new file mode 100755 index 000000000..b34c390e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java @@ -0,0 +1,136 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class AppUpdate { + /** */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + /** */ + @JsonProperty("compute_size") + private ComputeSize computeSize; + + /** */ + @JsonProperty("description") + private String description; + + /** */ + @JsonProperty("resources") + private Collection resources; + + /** */ + @JsonProperty("status") + private AppUpdateUpdateStatus status; + + /** */ + @JsonProperty("usage_policy_id") + private String usagePolicyId; + + /** */ + @JsonProperty("user_api_scopes") + private Collection userApiScopes; + + public AppUpdate setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public AppUpdate setComputeSize(ComputeSize computeSize) { + this.computeSize = computeSize; + return this; + } + + public ComputeSize getComputeSize() { + return computeSize; + } + + public AppUpdate setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public AppUpdate setResources(Collection resources) { + this.resources = resources; + return this; + } + + public Collection getResources() { + return resources; + } + + public AppUpdate setStatus(AppUpdateUpdateStatus status) { + this.status = status; + return this; + } + + public AppUpdateUpdateStatus getStatus() { + return status; + } + + public AppUpdate setUsagePolicyId(String usagePolicyId) { + this.usagePolicyId = usagePolicyId; + return this; + } + + public String getUsagePolicyId() { + return usagePolicyId; + } + + public AppUpdate setUserApiScopes(Collection userApiScopes) { + this.userApiScopes = userApiScopes; + return this; + } + + public Collection getUserApiScopes() { + return userApiScopes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppUpdate that = (AppUpdate) o; + return Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(computeSize, that.computeSize) + && Objects.equals(description, that.description) + && Objects.equals(resources, that.resources) + && Objects.equals(status, that.status) + && Objects.equals(usagePolicyId, that.usagePolicyId) + && Objects.equals(userApiScopes, that.userApiScopes); + } + + @Override + public int hashCode() { + return Objects.hash( + budgetPolicyId, computeSize, description, resources, status, usagePolicyId, userApiScopes); + } + + @Override + public String toString() { + return new ToStringer(AppUpdate.class) + .add("budgetPolicyId", budgetPolicyId) + .add("computeSize", computeSize) + .add("description", description) + .add("resources", resources) + .add("status", status) + .add("usagePolicyId", usagePolicyId) + .add("userApiScopes", userApiScopes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatus.java new file mode 100755 index 000000000..d666a314b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatus.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AppUpdateUpdateStatus { + /** */ + @JsonProperty("message") + private String message; + + /** */ + @JsonProperty("state") + private AppUpdateUpdateStatusUpdateState state; + + public AppUpdateUpdateStatus setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public AppUpdateUpdateStatus setState(AppUpdateUpdateStatusUpdateState state) { + this.state = state; + return this; + } + + public AppUpdateUpdateStatusUpdateState getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppUpdateUpdateStatus that = (AppUpdateUpdateStatus) o; + return Objects.equals(message, that.message) && Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash(message, state); + } + + @Override + public String toString() { + return new ToStringer(AppUpdateUpdateStatus.class) + .add("message", message) + .add("state", state) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatusUpdateState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatusUpdateState.java new file mode 100755 index 000000000..d86091f69 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatusUpdateState.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AppUpdateUpdateStatusUpdateState { + FAILED, + IN_PROGRESS, + NOT_UPDATED, + SUCCEEDED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java index 1394408c5..dee0d4745 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java @@ -81,6 +81,55 @@ public App waitGetAppActive(String name, Duration timeout, Consumer callbac throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } + public AppUpdate waitGetUpdateAppSucceeded(String appName) throws TimeoutException { + return waitGetUpdateAppSucceeded(appName, Duration.ofMinutes(20), null); + } + + public AppUpdate waitGetUpdateAppSucceeded( + String appName, Duration timeout, Consumer callback) throws TimeoutException { + long deadline = System.currentTimeMillis() + timeout.toMillis(); + java.util.List targetStates = + Arrays.asList(AppUpdateUpdateStatusUpdateState.SUCCEEDED); + java.util.List failureStates = + Arrays.asList(AppUpdateUpdateStatusUpdateState.FAILED); + String statusMessage = "polling..."; + int attempt = 1; + while (System.currentTimeMillis() < deadline) { + AppUpdate poll = getUpdate(new GetAppUpdateRequest().setAppName(appName)); + AppUpdateUpdateStatusUpdateState status = poll.getStatus().getState(); + statusMessage = String.format("current status: %s", status); + if (poll.getStatus() != null) { + statusMessage = poll.getStatus().getMessage(); + } + if (targetStates.contains(status)) { + return poll; + } + if (callback != null) { + callback.accept(poll); + } + if (failureStates.contains(status)) { + String msg = String.format("failed to reach SUCCEEDED, got %s: %s", status, statusMessage); + throw new IllegalStateException(msg); + } + + String prefix = String.format("appName=%s", appName); + int sleep = attempt; + if (sleep > 10) { + // sleep 10s max per attempt + sleep = 10; + } + LOG.info("{}: ({}) {} (sleeping ~{}s)", prefix, status, statusMessage, sleep); + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); + } + public AppDeployment waitGetDeploymentAppSucceeded(String appName, String deploymentId) throws TimeoutException { return waitGetDeploymentAppSucceeded(appName, deploymentId, Duration.ofMinutes(20), null); @@ -186,6 +235,17 @@ public Wait create(CreateAppRequest request) { (timeout, callback) -> waitGetAppActive(response.getName(), timeout, callback), response); } + /** + * Creates an app update and starts the update process. The update process is asynchronous and the + * status of the update can be checked with the GetAppUpdate method. + */ + public Wait createUpdate(AsyncUpdateAppRequest request) { + AppUpdate response = impl.createUpdate(request); + return new Wait<>( + (timeout, callback) -> waitGetUpdateAppSucceeded(request.getAppName(), timeout, callback), + response); + } + public App delete(String name) { return delete(new DeleteAppRequest().setName(name)); } @@ -242,6 +302,15 @@ public AppPermissions getPermissions(GetAppPermissionsRequest request) { return impl.getPermissions(request); } + public AppUpdate getUpdate(String appName) { + return getUpdate(new GetAppUpdateRequest().setAppName(appName)); + } + + /** Gets the status of an app update. */ + public AppUpdate getUpdate(GetAppUpdateRequest request) { + return impl.getUpdate(request); + } + /** Lists all apps in the workspace. */ public Iterable list(ListAppsRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java index 0e6ec9ff6..047d71e54 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java @@ -30,6 +30,20 @@ public App create(CreateAppRequest request) { } } + @Override + public AppUpdate createUpdate(AsyncUpdateAppRequest request) { + String path = String.format("/api/2.0/apps/%s/update", request.getAppName()); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AppUpdate.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public App delete(DeleteAppRequest request) { String path = String.format("/api/2.0/apps/%s", request.getName()); @@ -112,6 +126,19 @@ public AppPermissions getPermissions(GetAppPermissionsRequest request) { } } + @Override + public AppUpdate getUpdate(GetAppUpdateRequest request) { + String path = String.format("/api/2.0/apps/%s/update", request.getAppName()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, AppUpdate.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public ListAppsResponse list(ListAppsRequest request) { String path = "/api/2.0/apps"; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java index 9e5b895bf..31742387d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java @@ -16,6 +16,12 @@ public interface AppsService { /** Creates a new app. */ App create(CreateAppRequest createAppRequest); + /** + * Creates an app update and starts the update process. The update process is asynchronous and the + * status of the update can be checked with the GetAppUpdate method. + */ + AppUpdate createUpdate(AsyncUpdateAppRequest asyncUpdateAppRequest); + /** Deletes an app. */ App delete(DeleteAppRequest deleteAppRequest); @@ -35,6 +41,9 @@ GetAppPermissionLevelsResponse getPermissionLevels( /** Gets the permissions of an app. Apps can inherit permissions from their root object. */ AppPermissions getPermissions(GetAppPermissionsRequest getAppPermissionsRequest); + /** Gets the status of an app update. */ + AppUpdate getUpdate(GetAppUpdateRequest getAppUpdateRequest); + /** Lists all apps in the workspace. */ ListAppsResponse list(ListAppsRequest listAppsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AsyncUpdateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AsyncUpdateAppRequest.java new file mode 100755 index 000000000..136f919eb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AsyncUpdateAppRequest.java @@ -0,0 +1,84 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AsyncUpdateAppRequest { + /** */ + @JsonProperty("app") + private App app; + + /** */ + @JsonIgnore private String appName; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonProperty("update_mask") + private String updateMask; + + public AsyncUpdateAppRequest setApp(App app) { + this.app = app; + return this; + } + + public App getApp() { + return app; + } + + public AsyncUpdateAppRequest setAppName(String appName) { + this.appName = appName; + return this; + } + + public String getAppName() { + return appName; + } + + public AsyncUpdateAppRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AsyncUpdateAppRequest that = (AsyncUpdateAppRequest) o; + return Objects.equals(app, that.app) + && Objects.equals(appName, that.appName) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(app, appName, updateMask); + } + + @Override + public String toString() { + return new ToStringer(AsyncUpdateAppRequest.class) + .add("app", app) + .add("appName", appName) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeSize.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeSize.java new file mode 100755 index 000000000..ff5b63350 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeSize.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum ComputeSize { + LARGE, + LIQUID, + MEDIUM, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppUpdateRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppUpdateRequest.java new file mode 100755 index 000000000..152df04b8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppUpdateRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetAppUpdateRequest { + /** The name of the app. */ + @JsonIgnore private String appName; + + public GetAppUpdateRequest setAppName(String appName) { + this.appName = appName; + return this; + } + + public String getAppName() { + return appName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAppUpdateRequest that = (GetAppUpdateRequest) o; + return Objects.equals(appName, that.appName); + } + + @Override + public int hashCode() { + return Objects.hash(appName); + } + + @Override + public String toString() { + return new ToStringer(GetAppUpdateRequest.class).add("appName", appName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateUsagePolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateUsagePolicyRequest.java new file mode 100755 index 000000000..be656f0d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateUsagePolicyRequest.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A request to create a UsagePolicy. */ +@Generated +public class CreateUsagePolicyRequest { + /** The policy to create. `policy_id` needs to be empty as it will be generated */ + @JsonProperty("policy") + private UsagePolicy policy; + + /** A unique identifier for this request. Restricted to 36 ASCII characters. */ + @JsonProperty("request_id") + private String requestId; + + public CreateUsagePolicyRequest setPolicy(UsagePolicy policy) { + this.policy = policy; + return this; + } + + public UsagePolicy getPolicy() { + return policy; + } + + public CreateUsagePolicyRequest setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateUsagePolicyRequest that = (CreateUsagePolicyRequest) o; + return Objects.equals(policy, that.policy) && Objects.equals(requestId, that.requestId); + } + + @Override + public int hashCode() { + return Objects.hash(policy, requestId); + } + + @Override + public String toString() { + return new ToStringer(CreateUsagePolicyRequest.class) + .add("policy", policy) + .add("requestId", requestId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteUsagePolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteUsagePolicyRequest.java new file mode 100755 index 000000000..0d52acd6f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteUsagePolicyRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteUsagePolicyRequest { + /** The Id of the policy. */ + @JsonIgnore private String policyId; + + public DeleteUsagePolicyRequest setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteUsagePolicyRequest that = (DeleteUsagePolicyRequest) o; + return Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(policyId); + } + + @Override + public String toString() { + return new ToStringer(DeleteUsagePolicyRequest.class).add("policyId", policyId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetUsagePolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetUsagePolicyRequest.java new file mode 100755 index 000000000..db044183d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetUsagePolicyRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetUsagePolicyRequest { + /** The Id of the policy. */ + @JsonIgnore private String policyId; + + public GetUsagePolicyRequest setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetUsagePolicyRequest that = (GetUsagePolicyRequest) o; + return Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(policyId); + } + + @Override + public String toString() { + return new ToStringer(GetUsagePolicyRequest.class).add("policyId", policyId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesRequest.java new file mode 100755 index 000000000..81936b444 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesRequest.java @@ -0,0 +1,94 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListUsagePoliciesRequest { + /** A filter to apply to the list of policies. */ + @JsonIgnore + @QueryParam("filter_by") + private Filter filterBy; + + /** The maximum number of usage policies to return. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** A page token, received from a previous `ListUsagePolicies` call. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** The sort specification. */ + @JsonIgnore + @QueryParam("sort_spec") + private SortSpec sortSpec; + + public ListUsagePoliciesRequest setFilterBy(Filter filterBy) { + this.filterBy = filterBy; + return this; + } + + public Filter getFilterBy() { + return filterBy; + } + + public ListUsagePoliciesRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListUsagePoliciesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListUsagePoliciesRequest setSortSpec(SortSpec sortSpec) { + this.sortSpec = sortSpec; + return this; + } + + public SortSpec getSortSpec() { + return sortSpec; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListUsagePoliciesRequest that = (ListUsagePoliciesRequest) o; + return Objects.equals(filterBy, that.filterBy) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(sortSpec, that.sortSpec); + } + + @Override + public int hashCode() { + return Objects.hash(filterBy, pageSize, pageToken, sortSpec); + } + + @Override + public String toString() { + return new ToStringer(ListUsagePoliciesRequest.class) + .add("filterBy", filterBy) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("sortSpec", sortSpec) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesResponse.java new file mode 100755 index 000000000..6fb7a443c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListUsagePoliciesResponse.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** A list of usage policies. */ +@Generated +public class ListUsagePoliciesResponse { + /** A token that can be sent as `page_token` to retrieve the next page. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("policies") + private Collection policies; + + /** A token that can be sent as `page_token` to retrieve the previous page. */ + @JsonProperty("previous_page_token") + private String previousPageToken; + + public ListUsagePoliciesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListUsagePoliciesResponse setPolicies(Collection policies) { + this.policies = policies; + return this; + } + + public Collection getPolicies() { + return policies; + } + + public ListUsagePoliciesResponse setPreviousPageToken(String previousPageToken) { + this.previousPageToken = previousPageToken; + return this; + } + + public String getPreviousPageToken() { + return previousPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListUsagePoliciesResponse that = (ListUsagePoliciesResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(policies, that.policies) + && Objects.equals(previousPageToken, that.previousPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, policies, previousPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListUsagePoliciesResponse.class) + .add("nextPageToken", nextPageToken) + .add("policies", policies) + .add("previousPageToken", previousPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java index baafb73c5..d213220e6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java @@ -27,6 +27,11 @@ public class UpdateBudgetPolicyRequest { /** The Id of the policy. This field is generated by Databricks and globally unique. */ @JsonIgnore private String policyId; + /** Field mask specifying which fields to update. When not provided, all fields are updated. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + public UpdateBudgetPolicyRequest setLimitConfig(LimitConfig limitConfig) { this.limitConfig = limitConfig; return this; @@ -54,6 +59,15 @@ public String getPolicyId() { return policyId; } + public UpdateBudgetPolicyRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -61,12 +75,13 @@ public boolean equals(Object o) { UpdateBudgetPolicyRequest that = (UpdateBudgetPolicyRequest) o; return Objects.equals(limitConfig, that.limitConfig) && Objects.equals(policy, that.policy) - && Objects.equals(policyId, that.policyId); + && Objects.equals(policyId, that.policyId) + && Objects.equals(updateMask, that.updateMask); } @Override public int hashCode() { - return Objects.hash(limitConfig, policy, policyId); + return Objects.hash(limitConfig, policy, policyId, updateMask); } @Override @@ -75,6 +90,7 @@ public String toString() { .add("limitConfig", limitConfig) .add("policy", policy) .add("policyId", policyId) + .add("updateMask", updateMask) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateUsagePolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateUsagePolicyRequest.java new file mode 100755 index 000000000..b8a7f1824 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateUsagePolicyRequest.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateUsagePolicyRequest { + /** DEPRECATED. This is redundant field as LimitConfig is part of the UsagePolicy */ + @JsonIgnore + @QueryParam("limit_config") + private LimitConfig limitConfig; + + /** The policy to update. `creator_user_id` cannot be specified in the request. */ + @JsonProperty("policy") + private UsagePolicy policy; + + /** The Id of the policy. This field is generated by Databricks and globally unique. */ + @JsonIgnore private String policyId; + + public UpdateUsagePolicyRequest setLimitConfig(LimitConfig limitConfig) { + this.limitConfig = limitConfig; + return this; + } + + public LimitConfig getLimitConfig() { + return limitConfig; + } + + public UpdateUsagePolicyRequest setPolicy(UsagePolicy policy) { + this.policy = policy; + return this; + } + + public UsagePolicy getPolicy() { + return policy; + } + + public UpdateUsagePolicyRequest setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateUsagePolicyRequest that = (UpdateUsagePolicyRequest) o; + return Objects.equals(limitConfig, that.limitConfig) + && Objects.equals(policy, that.policy) + && Objects.equals(policyId, that.policyId); + } + + @Override + public int hashCode() { + return Objects.hash(limitConfig, policy, policyId); + } + + @Override + public String toString() { + return new ToStringer(UpdateUsagePolicyRequest.class) + .add("limitConfig", limitConfig) + .add("policy", policy) + .add("policyId", policyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicy.java new file mode 100755 index 000000000..8481284e1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicy.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Contains the UsagePolicy details (same structure as BudgetPolicy) */ +@Generated +public class UsagePolicy { + /** List of workspaces that this usage policy will be exclusively bound to. */ + @JsonProperty("binding_workspace_ids") + private Collection bindingWorkspaceIds; + + /** A list of tags defined by the customer. At most 20 entries are allowed per policy. */ + @JsonProperty("custom_tags") + private Collection customTags; + + /** The Id of the policy. This field is generated by Databricks and globally unique. */ + @JsonProperty("policy_id") + private String policyId; + + /** The name of the policy. */ + @JsonProperty("policy_name") + private String policyName; + + public UsagePolicy setBindingWorkspaceIds(Collection bindingWorkspaceIds) { + this.bindingWorkspaceIds = bindingWorkspaceIds; + return this; + } + + public Collection getBindingWorkspaceIds() { + return bindingWorkspaceIds; + } + + public UsagePolicy setCustomTags( + Collection customTags) { + this.customTags = customTags; + return this; + } + + public Collection getCustomTags() { + return customTags; + } + + public UsagePolicy setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public UsagePolicy setPolicyName(String policyName) { + this.policyName = policyName; + return this; + } + + public String getPolicyName() { + return policyName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UsagePolicy that = (UsagePolicy) o; + return Objects.equals(bindingWorkspaceIds, that.bindingWorkspaceIds) + && Objects.equals(customTags, that.customTags) + && Objects.equals(policyId, that.policyId) + && Objects.equals(policyName, that.policyName); + } + + @Override + public int hashCode() { + return Objects.hash(bindingWorkspaceIds, customTags, policyId, policyName); + } + + @Override + public String toString() { + return new ToStringer(UsagePolicy.class) + .add("bindingWorkspaceIds", bindingWorkspaceIds) + .add("customTags", customTags) + .add("policyId", policyId) + .add("policyName", policyName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyAPI.java new file mode 100755 index 000000000..b0ec7d9f0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyAPI.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** A service serves REST API about Usage policies */ +@Generated +public class UsagePolicyAPI { + private static final Logger LOG = LoggerFactory.getLogger(UsagePolicyAPI.class); + + private final UsagePolicyService impl; + + /** Regular-use constructor */ + public UsagePolicyAPI(ApiClient apiClient) { + impl = new UsagePolicyImpl(apiClient); + } + + /** Constructor for mocks */ + public UsagePolicyAPI(UsagePolicyService mock) { + impl = mock; + } + + /** Creates a new usage policy. */ + public UsagePolicy create(CreateUsagePolicyRequest request) { + return impl.create(request); + } + + public void delete(String policyId) { + delete(new DeleteUsagePolicyRequest().setPolicyId(policyId)); + } + + /** Deletes a usage policy */ + public void delete(DeleteUsagePolicyRequest request) { + impl.delete(request); + } + + public UsagePolicy get(String policyId) { + return get(new GetUsagePolicyRequest().setPolicyId(policyId)); + } + + /** Retrieves a usage policy by it's ID. */ + public UsagePolicy get(GetUsagePolicyRequest request) { + return impl.get(request); + } + + /** + * Lists all usage policies. Policies are returned in the alphabetically ascending order of their + * names. + */ + public Iterable list(ListUsagePoliciesRequest request) { + return new Paginator<>( + request, + impl::list, + ListUsagePoliciesResponse::getPolicies, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + /** Updates a usage policy */ + public UsagePolicy update(UpdateUsagePolicyRequest request) { + return impl.update(request); + } + + public UsagePolicyService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyImpl.java new file mode 100755 index 000000000..75072e20c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyImpl.java @@ -0,0 +1,96 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of UsagePolicy */ +@Generated +class UsagePolicyImpl implements UsagePolicyService { + private final ApiClient apiClient; + + public UsagePolicyImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public UsagePolicy create(CreateUsagePolicyRequest request) { + String path = + String.format("/api/2.1/accounts/%s/usage-policies", apiClient.configuredAccountID()); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UsagePolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void delete(DeleteUsagePolicyRequest request) { + String path = + String.format( + "/api/2.1/accounts/%s/usage-policies/%s", + apiClient.configuredAccountID(), request.getPolicyId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public UsagePolicy get(GetUsagePolicyRequest request) { + String path = + String.format( + "/api/2.1/accounts/%s/usage-policies/%s", + apiClient.configuredAccountID(), request.getPolicyId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, UsagePolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListUsagePoliciesResponse list(ListUsagePoliciesRequest request) { + String path = + String.format("/api/2.1/accounts/%s/usage-policies", apiClient.configuredAccountID()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListUsagePoliciesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public UsagePolicy update(UpdateUsagePolicyRequest request) { + String path = + String.format( + "/api/2.1/accounts/%s/usage-policies/%s", + apiClient.configuredAccountID(), request.getPolicyId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getPolicy())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UsagePolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyService.java new file mode 100755 index 000000000..d9edca42f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsagePolicyService.java @@ -0,0 +1,32 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; + +/** + * A service serves REST API about Usage policies + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface UsagePolicyService { + /** Creates a new usage policy. */ + UsagePolicy create(CreateUsagePolicyRequest createUsagePolicyRequest); + + /** Deletes a usage policy */ + void delete(DeleteUsagePolicyRequest deleteUsagePolicyRequest); + + /** Retrieves a usage policy by it's ID. */ + UsagePolicy get(GetUsagePolicyRequest getUsagePolicyRequest); + + /** + * Lists all usage policies. Policies are returned in the alphabetically ascending order of their + * names. + */ + ListUsagePoliciesResponse list(ListUsagePoliciesRequest listUsagePoliciesRequest); + + /** Updates a usage policy */ + UsagePolicy update(UpdateUsagePolicyRequest updateUsagePolicyRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java index 61feaf2f3..6e1cd7c9e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java @@ -25,20 +25,22 @@ public AccountMetastoreAssignmentsAPI(AccountMetastoreAssignmentsService mock) { } /** Creates an assignment to a metastore for a workspace */ - public void create(AccountsCreateMetastoreAssignment request) { - impl.create(request); + public AccountsCreateMetastoreAssignmentResponse create( + AccountsCreateMetastoreAssignment request) { + return impl.create(request); } - public void delete(long workspaceId, String metastoreId) { - delete( + public AccountsDeleteMetastoreAssignmentResponse delete(long workspaceId, String metastoreId) { + return delete( new DeleteAccountMetastoreAssignmentRequest() .setWorkspaceId(workspaceId) .setMetastoreId(metastoreId)); } /** Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. */ - public void delete(DeleteAccountMetastoreAssignmentRequest request) { - impl.delete(request); + public AccountsDeleteMetastoreAssignmentResponse delete( + DeleteAccountMetastoreAssignmentRequest request) { + return impl.delete(request); } public AccountsMetastoreAssignment get(long workspaceId) { @@ -47,7 +49,7 @@ public AccountsMetastoreAssignment get(long workspaceId) { /** * Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is - * assigned a metastore, the mappig will be returned. If no metastore is assigned to the + * assigned a metastore, the mapping will be returned. If no metastore is assigned to the * workspace, the assignment will not be found and a 404 returned. */ public AccountsMetastoreAssignment get(GetAccountMetastoreAssignmentRequest request) { @@ -71,8 +73,9 @@ public Iterable list(ListAccountMetastoreAssignmentsRequest request) { * Updates an assignment to a metastore for a workspace. Currently, only the default catalog may * be updated. */ - public void update(AccountsUpdateMetastoreAssignment request) { - impl.update(request); + public AccountsUpdateMetastoreAssignmentResponse update( + AccountsUpdateMetastoreAssignment request) { + return impl.update(request); } public AccountMetastoreAssignmentsService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java index f0418c91e..d9696ba4d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java @@ -17,7 +17,8 @@ public AccountMetastoreAssignmentsImpl(ApiClient apiClient) { } @Override - public void create(AccountsCreateMetastoreAssignment request) { + public AccountsCreateMetastoreAssignmentResponse create( + AccountsCreateMetastoreAssignment request) { String path = String.format( "/api/2.0/accounts/%s/workspaces/%s/metastores/%s", @@ -27,14 +28,15 @@ public void create(AccountsCreateMetastoreAssignment request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, AccountsCreateMetastoreAssignmentResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public void delete(DeleteAccountMetastoreAssignmentRequest request) { + public AccountsDeleteMetastoreAssignmentResponse delete( + DeleteAccountMetastoreAssignmentRequest request) { String path = String.format( "/api/2.0/accounts/%s/workspaces/%s/metastores/%s", @@ -43,7 +45,7 @@ public void delete(DeleteAccountMetastoreAssignmentRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, AccountsDeleteMetastoreAssignmentResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -83,7 +85,8 @@ public ListAccountMetastoreAssignmentsResponse list( } @Override - public void update(AccountsUpdateMetastoreAssignment request) { + public AccountsUpdateMetastoreAssignmentResponse update( + AccountsUpdateMetastoreAssignment request) { String path = String.format( "/api/2.0/accounts/%s/workspaces/%s/metastores/%s", @@ -93,7 +96,7 @@ public void update(AccountsUpdateMetastoreAssignment request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, AccountsUpdateMetastoreAssignmentResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java index e1b89bd09..6ad225224 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java @@ -13,14 +13,16 @@ @Generated public interface AccountMetastoreAssignmentsService { /** Creates an assignment to a metastore for a workspace */ - void create(AccountsCreateMetastoreAssignment accountsCreateMetastoreAssignment); + AccountsCreateMetastoreAssignmentResponse create( + AccountsCreateMetastoreAssignment accountsCreateMetastoreAssignment); /** Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. */ - void delete(DeleteAccountMetastoreAssignmentRequest deleteAccountMetastoreAssignmentRequest); + AccountsDeleteMetastoreAssignmentResponse delete( + DeleteAccountMetastoreAssignmentRequest deleteAccountMetastoreAssignmentRequest); /** * Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is - * assigned a metastore, the mappig will be returned. If no metastore is assigned to the + * assigned a metastore, the mapping will be returned. If no metastore is assigned to the * workspace, the assignment will not be found and a 404 returned. */ AccountsMetastoreAssignment get( @@ -34,5 +36,6 @@ ListAccountMetastoreAssignmentsResponse list( * Updates an assignment to a metastore for a workspace. Currently, only the default catalog may * be updated. */ - void update(AccountsUpdateMetastoreAssignment accountsUpdateMetastoreAssignment); + AccountsUpdateMetastoreAssignmentResponse update( + AccountsUpdateMetastoreAssignment accountsUpdateMetastoreAssignment); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java index 601b17d68..52fd325f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java @@ -28,36 +28,39 @@ public AccountMetastoresAPI(AccountMetastoresService mock) { } /** Creates a Unity Catalog metastore. */ - public AccountsMetastoreInfo create(AccountsCreateMetastore request) { + public AccountsCreateMetastoreResponse create(AccountsCreateMetastore request) { return impl.create(request); } - public void delete(String metastoreId) { - delete(new DeleteAccountMetastoreRequest().setMetastoreId(metastoreId)); + public AccountsDeleteMetastoreResponse delete(String metastoreId) { + return delete(new DeleteAccountMetastoreRequest().setMetastoreId(metastoreId)); } /** Deletes a Unity Catalog metastore for an account, both specified by ID. */ - public void delete(DeleteAccountMetastoreRequest request) { - impl.delete(request); + public AccountsDeleteMetastoreResponse delete(DeleteAccountMetastoreRequest request) { + return impl.delete(request); } - public AccountsMetastoreInfo get(String metastoreId) { + public AccountsGetMetastoreResponse get(String metastoreId) { return get(new GetAccountMetastoreRequest().setMetastoreId(metastoreId)); } /** Gets a Unity Catalog metastore from an account, both specified by ID. */ - public AccountsMetastoreInfo get(GetAccountMetastoreRequest request) { + public AccountsGetMetastoreResponse get(GetAccountMetastoreRequest request) { return impl.get(request); } /** Gets all Unity Catalog metastores associated with an account specified by ID. */ public Iterable list() { return new Paginator<>( - null, (Void v) -> impl.list(), ListMetastoresResponse::getMetastores, response -> null); + null, + (Void v) -> impl.list(), + AccountsListMetastoresResponse::getMetastores, + response -> null); } /** Updates an existing Unity Catalog metastore. */ - public AccountsMetastoreInfo update(AccountsUpdateMetastore request) { + public AccountsUpdateMetastoreResponse update(AccountsUpdateMetastore request) { return impl.update(request); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java index 56328c4f0..e983809d0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java @@ -17,21 +17,21 @@ public AccountMetastoresImpl(ApiClient apiClient) { } @Override - public AccountsMetastoreInfo create(AccountsCreateMetastore request) { + public AccountsCreateMetastoreResponse create(AccountsCreateMetastore request) { String path = String.format("/api/2.0/accounts/%s/metastores", apiClient.configuredAccountID()); try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute(req, AccountsMetastoreInfo.class); + return apiClient.execute(req, AccountsCreateMetastoreResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public void delete(DeleteAccountMetastoreRequest request) { + public AccountsDeleteMetastoreResponse delete(DeleteAccountMetastoreRequest request) { String path = String.format( "/api/2.0/accounts/%s/metastores/%s", @@ -40,14 +40,14 @@ public void delete(DeleteAccountMetastoreRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, AccountsDeleteMetastoreResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public AccountsMetastoreInfo get(GetAccountMetastoreRequest request) { + public AccountsGetMetastoreResponse get(GetAccountMetastoreRequest request) { String path = String.format( "/api/2.0/accounts/%s/metastores/%s", @@ -56,26 +56,26 @@ public AccountsMetastoreInfo get(GetAccountMetastoreRequest request) { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, AccountsMetastoreInfo.class); + return apiClient.execute(req, AccountsGetMetastoreResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public ListMetastoresResponse list() { + public AccountsListMetastoresResponse list() { String path = String.format("/api/2.0/accounts/%s/metastores", apiClient.configuredAccountID()); try { Request req = new Request("GET", path); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, ListMetastoresResponse.class); + return apiClient.execute(req, AccountsListMetastoresResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public AccountsMetastoreInfo update(AccountsUpdateMetastore request) { + public AccountsUpdateMetastoreResponse update(AccountsUpdateMetastore request) { String path = String.format( "/api/2.0/accounts/%s/metastores/%s", @@ -85,7 +85,7 @@ public AccountsMetastoreInfo update(AccountsUpdateMetastore request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute(req, AccountsMetastoreInfo.class); + return apiClient.execute(req, AccountsUpdateMetastoreResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresService.java index 07e71499f..1ec720114 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresService.java @@ -14,17 +14,18 @@ @Generated public interface AccountMetastoresService { /** Creates a Unity Catalog metastore. */ - AccountsMetastoreInfo create(AccountsCreateMetastore accountsCreateMetastore); + AccountsCreateMetastoreResponse create(AccountsCreateMetastore accountsCreateMetastore); /** Deletes a Unity Catalog metastore for an account, both specified by ID. */ - void delete(DeleteAccountMetastoreRequest deleteAccountMetastoreRequest); + AccountsDeleteMetastoreResponse delete( + DeleteAccountMetastoreRequest deleteAccountMetastoreRequest); /** Gets a Unity Catalog metastore from an account, both specified by ID. */ - AccountsMetastoreInfo get(GetAccountMetastoreRequest getAccountMetastoreRequest); + AccountsGetMetastoreResponse get(GetAccountMetastoreRequest getAccountMetastoreRequest); /** Gets all Unity Catalog metastores associated with an account specified by ID. */ - ListMetastoresResponse list(); + AccountsListMetastoresResponse list(); /** Updates an existing Unity Catalog metastore. */ - AccountsMetastoreInfo update(AccountsUpdateMetastore accountsUpdateMetastore); + AccountsUpdateMetastoreResponse update(AccountsUpdateMetastore accountsUpdateMetastore); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java index b6ff09529..254dc846d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java @@ -25,20 +25,20 @@ public AccountStorageCredentialsAPI(AccountStorageCredentialsService mock) { } /** - * Creates a new storage credential. The request object is specific to the cloud: + * Creates a new storage credential. The request object is specific to the cloud: - **AwsIamRole** + * for AWS credentials - **AzureServicePrincipal** for Azure credentials - + * **GcpServiceAccountKey** for GCP credentials * - *

* **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials * - * **GcpServiceAcountKey** for GCP credentials. - * - *

The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on + *

The caller must be a metastore admin and have the `CREATE_STORAGE_CREDENTIAL` privilege on * the metastore. */ - public AccountsStorageCredentialInfo create(AccountsCreateStorageCredential request) { + public AccountsCreateStorageCredentialInfo create(AccountsCreateStorageCredential request) { return impl.create(request); } - public void delete(String metastoreId, String storageCredentialName) { - delete( + public AccountsDeleteStorageCredentialResponse delete( + String metastoreId, String storageCredentialName) { + return delete( new DeleteAccountStorageCredentialRequest() .setMetastoreId(metastoreId) .setStorageCredentialName(storageCredentialName)); @@ -48,8 +48,9 @@ public void delete(String metastoreId, String storageCredentialName) { * Deletes a storage credential from the metastore. The caller must be an owner of the storage * credential. */ - public void delete(DeleteAccountStorageCredentialRequest request) { - impl.delete(request); + public AccountsDeleteStorageCredentialResponse delete( + DeleteAccountStorageCredentialRequest request) { + return impl.delete(request); } public AccountsStorageCredentialInfo get(String metastoreId, String storageCredentialName) { @@ -82,9 +83,9 @@ public Iterable list(ListAccountStorageCredentialsRequest /** * Updates a storage credential on the metastore. The caller must be the owner of the storage - * credential. If the caller is a metastore admin, only the __owner__ credential can be changed. + * credential. If the caller is a metastore admin, only the **owner** credential can be changed. */ - public AccountsStorageCredentialInfo update(AccountsUpdateStorageCredential request) { + public AccountsUpdateStorageCredentialResponse update(AccountsUpdateStorageCredential request) { return impl.update(request); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java index 5eb10df59..26ed48604 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java @@ -17,7 +17,7 @@ public AccountStorageCredentialsImpl(ApiClient apiClient) { } @Override - public AccountsStorageCredentialInfo create(AccountsCreateStorageCredential request) { + public AccountsCreateStorageCredentialInfo create(AccountsCreateStorageCredential request) { String path = String.format( "/api/2.0/accounts/%s/metastores/%s/storage-credentials", @@ -27,14 +27,15 @@ public AccountsStorageCredentialInfo create(AccountsCreateStorageCredential requ ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute(req, AccountsStorageCredentialInfo.class); + return apiClient.execute(req, AccountsCreateStorageCredentialInfo.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public void delete(DeleteAccountStorageCredentialRequest request) { + public AccountsDeleteStorageCredentialResponse delete( + DeleteAccountStorageCredentialRequest request) { String path = String.format( "/api/2.0/accounts/%s/metastores/%s/storage-credentials/%s", @@ -45,7 +46,7 @@ public void delete(DeleteAccountStorageCredentialRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, AccountsDeleteStorageCredentialResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -86,7 +87,7 @@ public ListAccountStorageCredentialsResponse list(ListAccountStorageCredentialsR } @Override - public AccountsStorageCredentialInfo update(AccountsUpdateStorageCredential request) { + public AccountsUpdateStorageCredentialResponse update(AccountsUpdateStorageCredential request) { String path = String.format( "/api/2.0/accounts/%s/metastores/%s/storage-credentials/%s", @@ -98,7 +99,7 @@ public AccountsStorageCredentialInfo update(AccountsUpdateStorageCredential requ ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute(req, AccountsStorageCredentialInfo.class); + return apiClient.execute(req, AccountsUpdateStorageCredentialResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java index 2cceee11c..5c537dacb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java @@ -13,22 +13,22 @@ @Generated public interface AccountStorageCredentialsService { /** - * Creates a new storage credential. The request object is specific to the cloud: + * Creates a new storage credential. The request object is specific to the cloud: - **AwsIamRole** + * for AWS credentials - **AzureServicePrincipal** for Azure credentials - + * **GcpServiceAccountKey** for GCP credentials * - *

* **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials * - * **GcpServiceAcountKey** for GCP credentials. - * - *

The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on + *

The caller must be a metastore admin and have the `CREATE_STORAGE_CREDENTIAL` privilege on * the metastore. */ - AccountsStorageCredentialInfo create( + AccountsCreateStorageCredentialInfo create( AccountsCreateStorageCredential accountsCreateStorageCredential); /** * Deletes a storage credential from the metastore. The caller must be an owner of the storage * credential. */ - void delete(DeleteAccountStorageCredentialRequest deleteAccountStorageCredentialRequest); + AccountsDeleteStorageCredentialResponse delete( + DeleteAccountStorageCredentialRequest deleteAccountStorageCredentialRequest); /** * Gets a storage credential from the metastore. The caller must be a metastore admin, the owner @@ -43,8 +43,8 @@ ListAccountStorageCredentialsResponse list( /** * Updates a storage credential on the metastore. The caller must be the owner of the storage - * credential. If the caller is a metastore admin, only the __owner__ credential can be changed. + * credential. If the caller is a metastore admin, only the **owner** credential can be changed. */ - AccountsStorageCredentialInfo update( + AccountsUpdateStorageCredentialResponse update( AccountsUpdateStorageCredential accountsUpdateStorageCredential); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java index b297cbf98..e86dbfa1e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastore.java @@ -7,18 +7,19 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Properties of the new metastore. */ @Generated public class AccountsCreateMetastore { /** */ @JsonProperty("metastore_info") - private CreateMetastore metastoreInfo; + private CreateAccountsMetastore metastoreInfo; - public AccountsCreateMetastore setMetastoreInfo(CreateMetastore metastoreInfo) { + public AccountsCreateMetastore setMetastoreInfo(CreateAccountsMetastore metastoreInfo) { this.metastoreInfo = metastoreInfo; return this; } - public CreateMetastore getMetastoreInfo() { + public CreateAccountsMetastore getMetastoreInfo() { return metastoreInfo; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java index fa3e7a1e7..5ce5863fe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignment.java @@ -8,6 +8,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** The mapping from workspace to metastore. */ @Generated public class AccountsCreateMetastoreAssignment { /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignmentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignmentResponse.java new file mode 100755 index 000000000..d8b003eaf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreAssignmentResponse.java @@ -0,0 +1,29 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** The metastore assignment was successfully created. */ +@Generated +public class AccountsCreateMetastoreAssignmentResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(AccountsCreateMetastoreAssignmentResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreResponse.java similarity index 76% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfo.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreResponse.java index 249aeb544..b6848ebe1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateMetastoreResponse.java @@ -8,12 +8,12 @@ import java.util.Objects; @Generated -public class AccountsMetastoreInfo { +public class AccountsCreateMetastoreResponse { /** */ @JsonProperty("metastore_info") private MetastoreInfo metastoreInfo; - public AccountsMetastoreInfo setMetastoreInfo(MetastoreInfo metastoreInfo) { + public AccountsCreateMetastoreResponse setMetastoreInfo(MetastoreInfo metastoreInfo) { this.metastoreInfo = metastoreInfo; return this; } @@ -26,7 +26,7 @@ public MetastoreInfo getMetastoreInfo() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - AccountsMetastoreInfo that = (AccountsMetastoreInfo) o; + AccountsCreateMetastoreResponse that = (AccountsCreateMetastoreResponse) o; return Objects.equals(metastoreInfo, that.metastoreInfo); } @@ -37,7 +37,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(AccountsMetastoreInfo.class) + return new ToStringer(AccountsCreateMetastoreResponse.class) .add("metastoreInfo", metastoreInfo) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java index c1c33ea9f..a19caa490 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredential.java @@ -12,17 +12,25 @@ public class AccountsCreateStorageCredential { /** */ @JsonProperty("credential_info") - private CreateStorageCredential credentialInfo; + private CreateAccountsStorageCredential credentialInfo; /** Unity Catalog metastore ID */ @JsonIgnore private String metastoreId; - public AccountsCreateStorageCredential setCredentialInfo(CreateStorageCredential credentialInfo) { + /** + * Optional, default false. Supplying true to this argument skips validation of the created set of + * credentials. + */ + @JsonProperty("skip_validation") + private Boolean skipValidation; + + public AccountsCreateStorageCredential setCredentialInfo( + CreateAccountsStorageCredential credentialInfo) { this.credentialInfo = credentialInfo; return this; } - public CreateStorageCredential getCredentialInfo() { + public CreateAccountsStorageCredential getCredentialInfo() { return credentialInfo; } @@ -35,18 +43,28 @@ public String getMetastoreId() { return metastoreId; } + public AccountsCreateStorageCredential setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AccountsCreateStorageCredential that = (AccountsCreateStorageCredential) o; return Objects.equals(credentialInfo, that.credentialInfo) - && Objects.equals(metastoreId, that.metastoreId); + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(skipValidation, that.skipValidation); } @Override public int hashCode() { - return Objects.hash(credentialInfo, metastoreId); + return Objects.hash(credentialInfo, metastoreId, skipValidation); } @Override @@ -54,6 +72,7 @@ public String toString() { return new ToStringer(AccountsCreateStorageCredential.class) .add("credentialInfo", credentialInfo) .add("metastoreId", metastoreId) + .add("skipValidation", skipValidation) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredentialInfo.java new file mode 100755 index 000000000..420d976e3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsCreateStorageCredentialInfo.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AccountsCreateStorageCredentialInfo { + /** */ + @JsonProperty("credential_info") + private StorageCredentialInfo credentialInfo; + + public AccountsCreateStorageCredentialInfo setCredentialInfo( + StorageCredentialInfo credentialInfo) { + this.credentialInfo = credentialInfo; + return this; + } + + public StorageCredentialInfo getCredentialInfo() { + return credentialInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsCreateStorageCredentialInfo that = (AccountsCreateStorageCredentialInfo) o; + return Objects.equals(credentialInfo, that.credentialInfo); + } + + @Override + public int hashCode() { + return Objects.hash(credentialInfo); + } + + @Override + public String toString() { + return new ToStringer(AccountsCreateStorageCredentialInfo.class) + .add("credentialInfo", credentialInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreAssignmentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreAssignmentResponse.java new file mode 100755 index 000000000..bb80b9155 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreAssignmentResponse.java @@ -0,0 +1,29 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** The metastore assignment was successfully deleted. */ +@Generated +public class AccountsDeleteMetastoreAssignmentResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(AccountsDeleteMetastoreAssignmentResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreResponse.java new file mode 100755 index 000000000..c764feedf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteMetastoreResponse.java @@ -0,0 +1,29 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** The metastore was successfully deleted. */ +@Generated +public class AccountsDeleteMetastoreResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(AccountsDeleteMetastoreResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteStorageCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteStorageCredentialResponse.java new file mode 100755 index 000000000..125aa3bdf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsDeleteStorageCredentialResponse.java @@ -0,0 +1,29 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** The storage credential was successfully deleted. */ +@Generated +public class AccountsDeleteStorageCredentialResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(AccountsDeleteStorageCredentialResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsGetMetastoreResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsGetMetastoreResponse.java new file mode 100755 index 000000000..2da0eb3f3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsGetMetastoreResponse.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The metastore was successfully returned. */ +@Generated +public class AccountsGetMetastoreResponse { + /** */ + @JsonProperty("metastore_info") + private MetastoreInfo metastoreInfo; + + public AccountsGetMetastoreResponse setMetastoreInfo(MetastoreInfo metastoreInfo) { + this.metastoreInfo = metastoreInfo; + return this; + } + + public MetastoreInfo getMetastoreInfo() { + return metastoreInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsGetMetastoreResponse that = (AccountsGetMetastoreResponse) o; + return Objects.equals(metastoreInfo, that.metastoreInfo); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreInfo); + } + + @Override + public String toString() { + return new ToStringer(AccountsGetMetastoreResponse.class) + .add("metastoreInfo", metastoreInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsListMetastoresResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsListMetastoresResponse.java new file mode 100755 index 000000000..95620fe4d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsListMetastoresResponse.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Metastores were returned successfully. */ +@Generated +public class AccountsListMetastoresResponse { + /** An array of metastore information objects. */ + @JsonProperty("metastores") + private Collection metastores; + + public AccountsListMetastoresResponse setMetastores(Collection metastores) { + this.metastores = metastores; + return this; + } + + public Collection getMetastores() { + return metastores; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsListMetastoresResponse that = (AccountsListMetastoresResponse) o; + return Objects.equals(metastores, that.metastores); + } + + @Override + public int hashCode() { + return Objects.hash(metastores); + } + + @Override + public String toString() { + return new ToStringer(AccountsListMetastoresResponse.class) + .add("metastores", metastores) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java index bf989d674..fbe83d2bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsMetastoreAssignment.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** The workspace metastore assignment was successfully returned. */ @Generated public class AccountsMetastoreAssignment { /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java index 696342a98..f3e5074e9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsStorageCredentialInfo.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** The storage credential was successfully retrieved. */ @Generated public class AccountsStorageCredentialInfo { /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java index 044d8c6f2..74a421445 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastore.java @@ -8,14 +8,15 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Properties of the metastore to change. */ @Generated public class AccountsUpdateMetastore { /** Unity Catalog metastore ID */ @JsonIgnore private String metastoreId; - /** */ + /** Properties of the metastore to change. */ @JsonProperty("metastore_info") - private UpdateMetastore metastoreInfo; + private UpdateAccountsMetastore metastoreInfo; public AccountsUpdateMetastore setMetastoreId(String metastoreId) { this.metastoreId = metastoreId; @@ -26,12 +27,12 @@ public String getMetastoreId() { return metastoreId; } - public AccountsUpdateMetastore setMetastoreInfo(UpdateMetastore metastoreInfo) { + public AccountsUpdateMetastore setMetastoreInfo(UpdateAccountsMetastore metastoreInfo) { this.metastoreInfo = metastoreInfo; return this; } - public UpdateMetastore getMetastoreInfo() { + public UpdateAccountsMetastore getMetastoreInfo() { return metastoreInfo; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java index 3ce7c6f48..a50a5eab0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignment.java @@ -8,6 +8,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** The metastore assignment to update. */ @Generated public class AccountsUpdateMetastoreAssignment { /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignmentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignmentResponse.java new file mode 100755 index 000000000..648dbfa47 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreAssignmentResponse.java @@ -0,0 +1,29 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** The metastore assignment was successfully updated. */ +@Generated +public class AccountsUpdateMetastoreAssignmentResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(AccountsUpdateMetastoreAssignmentResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreResponse.java new file mode 100755 index 000000000..4a4d5d560 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateMetastoreResponse.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The metastore update request succeeded. */ +@Generated +public class AccountsUpdateMetastoreResponse { + /** */ + @JsonProperty("metastore_info") + private MetastoreInfo metastoreInfo; + + public AccountsUpdateMetastoreResponse setMetastoreInfo(MetastoreInfo metastoreInfo) { + this.metastoreInfo = metastoreInfo; + return this; + } + + public MetastoreInfo getMetastoreInfo() { + return metastoreInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsUpdateMetastoreResponse that = (AccountsUpdateMetastoreResponse) o; + return Objects.equals(metastoreInfo, that.metastoreInfo); + } + + @Override + public int hashCode() { + return Objects.hash(metastoreInfo); + } + + @Override + public String toString() { + return new ToStringer(AccountsUpdateMetastoreResponse.class) + .add("metastoreInfo", metastoreInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java index bca8ab349..f654e4d3b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java @@ -8,24 +8,32 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** The storage credential to update. */ @Generated public class AccountsUpdateStorageCredential { /** */ @JsonProperty("credential_info") - private UpdateStorageCredential credentialInfo; + private UpdateAccountsStorageCredential credentialInfo; /** Unity Catalog metastore ID */ @JsonIgnore private String metastoreId; + /** + * Optional. Supplying true to this argument skips validation of the updated set of credentials. + */ + @JsonProperty("skip_validation") + private Boolean skipValidation; + /** Name of the storage credential. */ @JsonIgnore private String storageCredentialName; - public AccountsUpdateStorageCredential setCredentialInfo(UpdateStorageCredential credentialInfo) { + public AccountsUpdateStorageCredential setCredentialInfo( + UpdateAccountsStorageCredential credentialInfo) { this.credentialInfo = credentialInfo; return this; } - public UpdateStorageCredential getCredentialInfo() { + public UpdateAccountsStorageCredential getCredentialInfo() { return credentialInfo; } @@ -38,6 +46,15 @@ public String getMetastoreId() { return metastoreId; } + public AccountsUpdateStorageCredential setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + public AccountsUpdateStorageCredential setStorageCredentialName(String storageCredentialName) { this.storageCredentialName = storageCredentialName; return this; @@ -54,12 +71,13 @@ public boolean equals(Object o) { AccountsUpdateStorageCredential that = (AccountsUpdateStorageCredential) o; return Objects.equals(credentialInfo, that.credentialInfo) && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(skipValidation, that.skipValidation) && Objects.equals(storageCredentialName, that.storageCredentialName); } @Override public int hashCode() { - return Objects.hash(credentialInfo, metastoreId, storageCredentialName); + return Objects.hash(credentialInfo, metastoreId, skipValidation, storageCredentialName); } @Override @@ -67,6 +85,7 @@ public String toString() { return new ToStringer(AccountsUpdateStorageCredential.class) .add("credentialInfo", credentialInfo) .add("metastoreId", metastoreId) + .add("skipValidation", skipValidation) .add("storageCredentialName", storageCredentialName) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredentialResponse.java new file mode 100755 index 000000000..0f1796167 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredentialResponse.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The storage credential was successfully updated. */ +@Generated +public class AccountsUpdateStorageCredentialResponse { + /** */ + @JsonProperty("credential_info") + private StorageCredentialInfo credentialInfo; + + public AccountsUpdateStorageCredentialResponse setCredentialInfo( + StorageCredentialInfo credentialInfo) { + this.credentialInfo = credentialInfo; + return this; + } + + public StorageCredentialInfo getCredentialInfo() { + return credentialInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountsUpdateStorageCredentialResponse that = (AccountsUpdateStorageCredentialResponse) o; + return Objects.equals(credentialInfo, that.credentialInfo); + } + + @Override + public int hashCode() { + return Objects.hash(credentialInfo); + } + + @Override + public String toString() { + return new ToStringer(AccountsUpdateStorageCredentialResponse.class) + .add("credentialInfo", credentialInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java index 170f10432..e400d5bdb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java @@ -29,6 +29,10 @@ public class CatalogInfo { @JsonProperty("connection_name") private String connectionName; + /** Status of conversion of FOREIGN catalog to UC Native catalog. */ + @JsonProperty("conversion_info") + private ConversionInfo conversionInfo; + /** Time at which this catalog was created, in epoch milliseconds. */ @JsonProperty("created_at") private Long createdAt; @@ -37,6 +41,10 @@ public class CatalogInfo { @JsonProperty("created_by") private String createdBy; + /** Disaster Recovery replication state snapshot. */ + @JsonProperty("dr_replication_info") + private DrReplicationInfo drReplicationInfo; + /** */ @JsonProperty("effective_predictive_optimization_flag") private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag; @@ -149,6 +157,15 @@ public String getConnectionName() { return connectionName; } + public CatalogInfo setConversionInfo(ConversionInfo conversionInfo) { + this.conversionInfo = conversionInfo; + return this; + } + + public ConversionInfo getConversionInfo() { + return conversionInfo; + } + public CatalogInfo setCreatedAt(Long createdAt) { this.createdAt = createdAt; return this; @@ -167,6 +184,15 @@ public String getCreatedBy() { return createdBy; } + public CatalogInfo setDrReplicationInfo(DrReplicationInfo drReplicationInfo) { + this.drReplicationInfo = drReplicationInfo; + return this; + } + + public DrReplicationInfo getDrReplicationInfo() { + return drReplicationInfo; + } + public CatalogInfo setEffectivePredictiveOptimizationFlag( EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag) { this.effectivePredictiveOptimizationFlag = effectivePredictiveOptimizationFlag; @@ -331,8 +357,10 @@ public boolean equals(Object o) { && Objects.equals(catalogType, that.catalogType) && Objects.equals(comment, that.comment) && Objects.equals(connectionName, that.connectionName) + && Objects.equals(conversionInfo, that.conversionInfo) && Objects.equals(createdAt, that.createdAt) && Objects.equals(createdBy, that.createdBy) + && Objects.equals(drReplicationInfo, that.drReplicationInfo) && Objects.equals( effectivePredictiveOptimizationFlag, that.effectivePredictiveOptimizationFlag) && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) @@ -360,8 +388,10 @@ public int hashCode() { catalogType, comment, connectionName, + conversionInfo, createdAt, createdBy, + drReplicationInfo, effectivePredictiveOptimizationFlag, enablePredictiveOptimization, fullName, @@ -388,8 +418,10 @@ public String toString() { .add("catalogType", catalogType) .add("comment", comment) .add("connectionName", connectionName) + .add("conversionInfo", conversionInfo) .add("createdAt", createdAt) .add("createdBy", createdBy) + .add("drReplicationInfo", drReplicationInfo) .add("effectivePredictiveOptimizationFlag", effectivePredictiveOptimizationFlag) .add("enablePredictiveOptimization", enablePredictiveOptimization) .add("fullName", fullName) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java index c5852d711..a9bd905b4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java @@ -69,6 +69,14 @@ public CatalogInfo get(GetCatalogRequest request) { * will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has * the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering * of the elements in the array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ public Iterable list(ListCatalogsRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java index 870e321f2..26c5dc9fa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java @@ -41,6 +41,14 @@ public interface CatalogsService { * will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has * the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering * of the elements in the array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ ListCatalogsResponse list(ListCatalogsRequest listCatalogsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java index 948ca6bb2..a60688b06 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java @@ -35,6 +35,10 @@ public class ConnectionInfo { @JsonProperty("credential_type") private CredentialType credentialType; + /** [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. */ + @JsonProperty("environment_settings") + private EnvironmentSettings environmentSettings; + /** Full name of connection. */ @JsonProperty("full_name") private String fullName; @@ -137,6 +141,15 @@ public CredentialType getCredentialType() { return credentialType; } + public ConnectionInfo setEnvironmentSettings(EnvironmentSettings environmentSettings) { + this.environmentSettings = environmentSettings; + return this; + } + + public EnvironmentSettings getEnvironmentSettings() { + return environmentSettings; + } + public ConnectionInfo setFullName(String fullName) { this.fullName = fullName; return this; @@ -256,6 +269,7 @@ public boolean equals(Object o) { && Objects.equals(createdAt, that.createdAt) && Objects.equals(createdBy, that.createdBy) && Objects.equals(credentialType, that.credentialType) + && Objects.equals(environmentSettings, that.environmentSettings) && Objects.equals(fullName, that.fullName) && Objects.equals(metastoreId, that.metastoreId) && Objects.equals(name, that.name) @@ -279,6 +293,7 @@ public int hashCode() { createdAt, createdBy, credentialType, + environmentSettings, fullName, metastoreId, name, @@ -302,6 +317,7 @@ public String toString() { .add("createdAt", createdAt) .add("createdBy", createdBy) .add("credentialType", credentialType) + .add("environmentSettings", environmentSettings) .add("fullName", fullName) .add("metastoreId", metastoreId) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java index f2bdbb8e4..3386cd766 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java @@ -62,7 +62,17 @@ public ConnectionInfo get(GetConnectionRequest request) { return impl.get(request); } - /** List all connections. */ + /** + * List all connections. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. + */ public Iterable list(ListConnectionsRequest request) { return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java index 45bea7e36..6800d3aef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java @@ -34,7 +34,17 @@ public interface ConnectionsService { /** Gets a connection from it's name. */ ConnectionInfo get(GetConnectionRequest getConnectionRequest); - /** List all connections. */ + /** + * List all connections. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. + */ ListConnectionsResponse list(ListConnectionsRequest listConnectionsRequest); /** Updates the connection that matches the supplied name. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfo.java new file mode 100755 index 000000000..da4dac820 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfo.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Status of conversion of FOREIGN entity into UC Native entity. */ +@Generated +public class ConversionInfo { + /** The conversion state of the resource. */ + @JsonProperty("state") + private ConversionInfoState state; + + public ConversionInfo setState(ConversionInfoState state) { + this.state = state; + return this; + } + + public ConversionInfoState getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConversionInfo that = (ConversionInfo) o; + return Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash(state); + } + + @Override + public String toString() { + return new ToStringer(ConversionInfo.class).add("state", state).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfoState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfoState.java new file mode 100755 index 000000000..0b3566efa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConversionInfoState.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum ConversionInfoState { + COMPLETED, + IN_PROGRESS, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsMetastore.java new file mode 100755 index 000000000..2d82924c0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsMetastore.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateAccountsMetastore { + /** The user-specified name of the metastore. */ + @JsonProperty("name") + private String name; + + /** Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). */ + @JsonProperty("region") + private String region; + + /** The storage root URL for metastore */ + @JsonProperty("storage_root") + private String storageRoot; + + public CreateAccountsMetastore setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateAccountsMetastore setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public CreateAccountsMetastore setStorageRoot(String storageRoot) { + this.storageRoot = storageRoot; + return this; + } + + public String getStorageRoot() { + return storageRoot; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateAccountsMetastore that = (CreateAccountsMetastore) o; + return Objects.equals(name, that.name) + && Objects.equals(region, that.region) + && Objects.equals(storageRoot, that.storageRoot); + } + + @Override + public int hashCode() { + return Objects.hash(name, region, storageRoot); + } + + @Override + public String toString() { + return new ToStringer(CreateAccountsMetastore.class) + .add("name", name) + .add("region", region) + .add("storageRoot", storageRoot) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsStorageCredential.java new file mode 100755 index 000000000..8e636a900 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsStorageCredential.java @@ -0,0 +1,167 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateAccountsStorageCredential { + /** The AWS IAM role configuration. */ + @JsonProperty("aws_iam_role") + private AwsIamRoleRequest awsIamRole; + + /** The Azure managed identity configuration. */ + @JsonProperty("azure_managed_identity") + private AzureManagedIdentityRequest azureManagedIdentity; + + /** The Azure service principal configuration. */ + @JsonProperty("azure_service_principal") + private AzureServicePrincipal azureServicePrincipal; + + /** The Cloudflare API token configuration. */ + @JsonProperty("cloudflare_api_token") + private CloudflareApiToken cloudflareApiToken; + + /** Comment associated with the credential. */ + @JsonProperty("comment") + private String comment; + + /** The Databricks managed GCP service account configuration. */ + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccountRequest databricksGcpServiceAccount; + + /** + * The credential name. The name must be unique among storage and service credentials within the + * metastore. + */ + @JsonProperty("name") + private String name; + + /** + * Whether the credential is usable only for read operations. Only applicable when purpose is + * **STORAGE**. + */ + @JsonProperty("read_only") + private Boolean readOnly; + + public CreateAccountsStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRoleRequest getAwsIamRole() { + return awsIamRole; + } + + public CreateAccountsStorageCredential setAzureManagedIdentity( + AzureManagedIdentityRequest azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentityRequest getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public CreateAccountsStorageCredential setAzureServicePrincipal( + AzureServicePrincipal azureServicePrincipal) { + this.azureServicePrincipal = azureServicePrincipal; + return this; + } + + public AzureServicePrincipal getAzureServicePrincipal() { + return azureServicePrincipal; + } + + public CreateAccountsStorageCredential setCloudflareApiToken( + CloudflareApiToken cloudflareApiToken) { + this.cloudflareApiToken = cloudflareApiToken; + return this; + } + + public CloudflareApiToken getCloudflareApiToken() { + return cloudflareApiToken; + } + + public CreateAccountsStorageCredential setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateAccountsStorageCredential setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccountRequest databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; + return this; + } + + public DatabricksGcpServiceAccountRequest getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; + } + + public CreateAccountsStorageCredential setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateAccountsStorageCredential setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateAccountsStorageCredential that = (CreateAccountsStorageCredential) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(azureServicePrincipal, that.azureServicePrincipal) + && Objects.equals(cloudflareApiToken, that.cloudflareApiToken) + && Objects.equals(comment, that.comment) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) + && Objects.equals(name, that.name) + && Objects.equals(readOnly, that.readOnly); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + azureServicePrincipal, + cloudflareApiToken, + comment, + databricksGcpServiceAccount, + name, + readOnly); + } + + @Override + public String toString() { + return new ToStringer(CreateAccountsStorageCredential.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("azureServicePrincipal", azureServicePrincipal) + .add("cloudflareApiToken", cloudflareApiToken) + .add("comment", comment) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) + .add("name", name) + .add("readOnly", readOnly) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java index 2d8d187df..de43b40bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java @@ -18,6 +18,14 @@ public class CreateCatalog { @JsonProperty("connection_name") private String connectionName; + /** Status of conversion of FOREIGN catalog to UC Native catalog. */ + @JsonProperty("conversion_info") + private ConversionInfo conversionInfo; + + /** Disaster Recovery replication state snapshot. */ + @JsonProperty("dr_replication_info") + private DrReplicationInfo drReplicationInfo; + /** Name of catalog. */ @JsonProperty("name") private String name; @@ -65,6 +73,24 @@ public String getConnectionName() { return connectionName; } + public CreateCatalog setConversionInfo(ConversionInfo conversionInfo) { + this.conversionInfo = conversionInfo; + return this; + } + + public ConversionInfo getConversionInfo() { + return conversionInfo; + } + + public CreateCatalog setDrReplicationInfo(DrReplicationInfo drReplicationInfo) { + this.drReplicationInfo = drReplicationInfo; + return this; + } + + public DrReplicationInfo getDrReplicationInfo() { + return drReplicationInfo; + } + public CreateCatalog setName(String name) { this.name = name; return this; @@ -126,6 +152,8 @@ public boolean equals(Object o) { CreateCatalog that = (CreateCatalog) o; return Objects.equals(comment, that.comment) && Objects.equals(connectionName, that.connectionName) + && Objects.equals(conversionInfo, that.conversionInfo) + && Objects.equals(drReplicationInfo, that.drReplicationInfo) && Objects.equals(name, that.name) && Objects.equals(options, that.options) && Objects.equals(properties, that.properties) @@ -137,7 +165,16 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - comment, connectionName, name, options, properties, providerName, shareName, storageRoot); + comment, + connectionName, + conversionInfo, + drReplicationInfo, + name, + options, + properties, + providerName, + shareName, + storageRoot); } @Override @@ -145,6 +182,8 @@ public String toString() { return new ToStringer(CreateCatalog.class) .add("comment", comment) .add("connectionName", connectionName) + .add("conversionInfo", conversionInfo) + .add("drReplicationInfo", drReplicationInfo) .add("name", name) .add("options", options) .add("properties", properties) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java index 3eea7832c..f890b1b6e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java @@ -18,6 +18,10 @@ public class CreateConnection { @JsonProperty("connection_type") private ConnectionType connectionType; + /** [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. */ + @JsonProperty("environment_settings") + private EnvironmentSettings environmentSettings; + /** Name of the connection. */ @JsonProperty("name") private String name; @@ -52,6 +56,15 @@ public ConnectionType getConnectionType() { return connectionType; } + public CreateConnection setEnvironmentSettings(EnvironmentSettings environmentSettings) { + this.environmentSettings = environmentSettings; + return this; + } + + public EnvironmentSettings getEnvironmentSettings() { + return environmentSettings; + } + public CreateConnection setName(String name) { this.name = name; return this; @@ -95,6 +108,7 @@ public boolean equals(Object o) { CreateConnection that = (CreateConnection) o; return Objects.equals(comment, that.comment) && Objects.equals(connectionType, that.connectionType) + && Objects.equals(environmentSettings, that.environmentSettings) && Objects.equals(name, that.name) && Objects.equals(options, that.options) && Objects.equals(properties, that.properties) @@ -103,7 +117,8 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(comment, connectionType, name, options, properties, readOnly); + return Objects.hash( + comment, connectionType, environmentSettings, name, options, properties, readOnly); } @Override @@ -111,6 +126,7 @@ public String toString() { return new ToStringer(CreateConnection.class) .add("comment", comment) .add("connectionType", connectionType) + .add("environmentSettings", environmentSettings) .add("name", name) .add("options", options) .add("properties", properties) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java index 83052df1f..56a9b59e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java @@ -9,7 +9,7 @@ @Generated public class CreateFunction { - /** Name of parent catalog. */ + /** Name of parent Catalog. */ @JsonProperty("catalog_name") private String catalogName; @@ -33,7 +33,7 @@ public class CreateFunction { @JsonProperty("full_data_type") private String fullDataType; - /** */ + /** Function input parameters. */ @JsonProperty("input_params") private FunctionParameterInfos inputParams; @@ -63,8 +63,8 @@ public class CreateFunction { /** * Function language. When **EXTERNAL** is used, the language of the routine function should be - * specified in the __external_language__ field, and the __return_params__ of the function cannot - * be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + * specified in the **external_language** field, and the **return_params** of the function cannot + * be used (as **TABLE** return type is not supported), and the **sql_data_access** field must be * **NO_SQL**. */ @JsonProperty("routine_body") @@ -74,11 +74,11 @@ public class CreateFunction { @JsonProperty("routine_definition") private String routineDefinition; - /** Function dependencies. */ + /** function dependencies. */ @JsonProperty("routine_dependencies") private DependencyList routineDependencies; - /** Name of parent schema relative to its parent catalog. */ + /** Name of parent Schema relative to its parent Catalog. */ @JsonProperty("schema_name") private String schemaName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java index 691b8e514..721bb01f9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** Function parameter style. **S** is the value for SQL. */ @Generated public enum CreateFunctionParameterStyle { S, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRoutineBody.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRoutineBody.java index 6132a4c2a..f5b1b42e9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRoutineBody.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRoutineBody.java @@ -4,12 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * Function language. When **EXTERNAL** is used, the language of the routine function should be - * specified in the __external_language__ field, and the __return_params__ of the function cannot be - * used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - * **NO_SQL**. - */ @Generated public enum CreateFunctionRoutineBody { EXTERNAL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java index a0b13a4ee..480b1279a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** The security type of the function. */ @Generated public enum CreateFunctionSecurityType { DEFINER, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSqlDataAccess.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSqlDataAccess.java index d8cb91987..28cb1b373 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSqlDataAccess.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSqlDataAccess.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** Function SQL data access. */ @Generated public enum CreateFunctionSqlDataAccess { CONTAINS_SQL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java index 520b0f60a..71a3650f9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java @@ -5,10 +5,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated public class CreateRegisteredModelRequest { + /** List of aliases associated with the registered model */ + @JsonProperty("aliases") + private Collection aliases; + + /** + * Indicates whether the principal is limited to retrieving metadata for the associated object + * through the BROWSE privilege when include_browse is enabled in the request. + */ + @JsonProperty("browse_only") + private Boolean browseOnly; + /** The name of the catalog where the schema and the registered model reside */ @JsonProperty("catalog_name") private String catalogName; @@ -17,10 +29,30 @@ public class CreateRegisteredModelRequest { @JsonProperty("comment") private String comment; + /** Creation timestamp of the registered model in milliseconds since the Unix epoch */ + @JsonProperty("created_at") + private Long createdAt; + + /** The identifier of the user who created the registered model */ + @JsonProperty("created_by") + private String createdBy; + + /** The three-level (fully qualified) name of the registered model */ + @JsonProperty("full_name") + private String fullName; + + /** The unique identifier of the metastore */ + @JsonProperty("metastore_id") + private String metastoreId; + /** The name of the registered model */ @JsonProperty("name") private String name; + /** The identifier of the user who owns the registered model */ + @JsonProperty("owner") + private String owner; + /** The name of the schema where the registered model resides */ @JsonProperty("schema_name") private String schemaName; @@ -29,6 +61,32 @@ public class CreateRegisteredModelRequest { @JsonProperty("storage_location") private String storageLocation; + /** Last-update timestamp of the registered model in milliseconds since the Unix epoch */ + @JsonProperty("updated_at") + private Long updatedAt; + + /** The identifier of the user who updated the registered model last time */ + @JsonProperty("updated_by") + private String updatedBy; + + public CreateRegisteredModelRequest setAliases(Collection aliases) { + this.aliases = aliases; + return this; + } + + public Collection getAliases() { + return aliases; + } + + public CreateRegisteredModelRequest setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + public CreateRegisteredModelRequest setCatalogName(String catalogName) { this.catalogName = catalogName; return this; @@ -47,6 +105,42 @@ public String getComment() { return comment; } + public CreateRegisteredModelRequest setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public CreateRegisteredModelRequest setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public CreateRegisteredModelRequest setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public CreateRegisteredModelRequest setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + public CreateRegisteredModelRequest setName(String name) { this.name = name; return this; @@ -56,6 +150,15 @@ public String getName() { return name; } + public CreateRegisteredModelRequest setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + public CreateRegisteredModelRequest setSchemaName(String schemaName) { this.schemaName = schemaName; return this; @@ -74,31 +177,81 @@ public String getStorageLocation() { return storageLocation; } + public CreateRegisteredModelRequest setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public CreateRegisteredModelRequest setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateRegisteredModelRequest that = (CreateRegisteredModelRequest) o; - return Objects.equals(catalogName, that.catalogName) + return Objects.equals(aliases, that.aliases) + && Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogName, that.catalogName) && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(fullName, that.fullName) + && Objects.equals(metastoreId, that.metastoreId) && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) && Objects.equals(schemaName, that.schemaName) - && Objects.equals(storageLocation, that.storageLocation); + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); } @Override public int hashCode() { - return Objects.hash(catalogName, comment, name, schemaName, storageLocation); + return Objects.hash( + aliases, + browseOnly, + catalogName, + comment, + createdAt, + createdBy, + fullName, + metastoreId, + name, + owner, + schemaName, + storageLocation, + updatedAt, + updatedBy); } @Override public String toString() { return new ToStringer(CreateRegisteredModelRequest.class) + .add("aliases", aliases) + .add("browseOnly", browseOnly) .add("catalogName", catalogName) .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("fullName", fullName) + .add("metastoreId", metastoreId) .add("name", name) + .add("owner", owner) .add("schemaName", schemaName) .add("storageLocation", storageLocation) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java index 16f0ebbc6..bbe39faf4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java @@ -29,7 +29,13 @@ public class CreateVolumeRequestContent { @JsonProperty("storage_location") private String storageLocation; - /** */ + /** + * The type of the volume. An external volume is located in the specified external location. A + * managed volume is located in the default location which is specified by the parent schema, or + * the parent catalog, or the Metastore. [Learn more] + * + *

[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external + */ @JsonProperty("volume_type") private VolumeType volumeType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java index 3609ad11f..35a82e3ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java @@ -17,7 +17,7 @@ public class DeleteFunctionRequest { /** * The fully-qualified name of the function (of the form - * __catalog_name__.__schema_name__.__function__name__). + * __catalog_name__.__schema_name__.__function__name__) . */ @JsonIgnore private String name; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationInfo.java new file mode 100755 index 000000000..e9997e150 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationInfo.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Metadata related to Disaster Recovery. */ +@Generated +public class DrReplicationInfo { + /** */ + @JsonProperty("status") + private DrReplicationStatus status; + + public DrReplicationInfo setStatus(DrReplicationStatus status) { + this.status = status; + return this; + } + + public DrReplicationStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DrReplicationInfo that = (DrReplicationInfo) o; + return Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(status); + } + + @Override + public String toString() { + return new ToStringer(DrReplicationInfo.class).add("status", status).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationStatus.java new file mode 100755 index 000000000..804977116 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DrReplicationStatus.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum DrReplicationStatus { + DR_REPLICATION_STATUS_PRIMARY, + DR_REPLICATION_STATUS_SECONDARY, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnvironmentSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnvironmentSettings.java new file mode 100755 index 000000000..1e8e3a92e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnvironmentSettings.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class EnvironmentSettings { + /** */ + @JsonProperty("environment_version") + private String environmentVersion; + + /** */ + @JsonProperty("java_dependencies") + private Collection javaDependencies; + + public EnvironmentSettings setEnvironmentVersion(String environmentVersion) { + this.environmentVersion = environmentVersion; + return this; + } + + public String getEnvironmentVersion() { + return environmentVersion; + } + + public EnvironmentSettings setJavaDependencies(Collection javaDependencies) { + this.javaDependencies = javaDependencies; + return this; + } + + public Collection getJavaDependencies() { + return javaDependencies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnvironmentSettings that = (EnvironmentSettings) o; + return Objects.equals(environmentVersion, that.environmentVersion) + && Objects.equals(javaDependencies, that.javaDependencies); + } + + @Override + public int hashCode() { + return Objects.hash(environmentVersion, javaDependencies); + } + + @Override + public String toString() { + return new ToStringer(EnvironmentSettings.class) + .add("environmentVersion", environmentVersion) + .add("javaDependencies", javaDependencies) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java index 49699b629..1f5fbae89 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java @@ -73,6 +73,14 @@ public ExternalLocationInfo get(GetExternalLocationRequest request) { * caller must be a metastore admin, the owner of the external location, or a user that has some * privilege on the external location. There is no guarantee of a specific ordering of the * elements in the array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ public Iterable list(ListExternalLocationsRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java index f42879409..fcfe5cc8d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java @@ -45,6 +45,14 @@ public interface ExternalLocationsService { * caller must be a metastore admin, the owner of the external location, or a user that has some * privilege on the external location. There is no guarantee of a specific ordering of the * elements in the array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ ListExternalLocationsResponse list(ListExternalLocationsRequest listExternalLocationsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java index 02b5d835d..69ca56bd1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java @@ -16,7 +16,7 @@ public class FunctionInfo { @JsonProperty("browse_only") private Boolean browseOnly; - /** Name of parent catalog. */ + /** Name of parent Catalog. */ @JsonProperty("catalog_name") private String catalogName; @@ -48,7 +48,7 @@ public class FunctionInfo { @JsonProperty("full_data_type") private String fullDataType; - /** Full name of function, in form of __catalog_name__.__schema_name__.__function__name__ */ + /** Full name of Function, in form of **catalog_name**.**schema_name**.**function_name** */ @JsonProperty("full_name") private String fullName; @@ -56,7 +56,7 @@ public class FunctionInfo { @JsonProperty("function_id") private String functionId; - /** */ + /** Function input parameters. */ @JsonProperty("input_params") private FunctionParameterInfos inputParams; @@ -76,7 +76,7 @@ public class FunctionInfo { @JsonProperty("name") private String name; - /** Username of current owner of function. */ + /** Username of current owner of the function. */ @JsonProperty("owner") private String owner; @@ -94,8 +94,8 @@ public class FunctionInfo { /** * Function language. When **EXTERNAL** is used, the language of the routine function should be - * specified in the __external_language__ field, and the __return_params__ of the function cannot - * be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + * specified in the **external_language** field, and the **return_params** of the function cannot + * be used (as **TABLE** return type is not supported), and the **sql_data_access** field must be * **NO_SQL**. */ @JsonProperty("routine_body") @@ -105,11 +105,11 @@ public class FunctionInfo { @JsonProperty("routine_definition") private String routineDefinition; - /** Function dependencies. */ + /** function dependencies. */ @JsonProperty("routine_dependencies") private DependencyList routineDependencies; - /** Name of parent schema relative to its parent catalog. */ + /** Name of parent Schema relative to its parent Catalog. */ @JsonProperty("schema_name") private String schemaName; @@ -129,11 +129,11 @@ public class FunctionInfo { @JsonProperty("sql_path") private String sqlPath; - /** Time at which this function was created, in epoch milliseconds. */ + /** Time at which this function was last modified, in epoch milliseconds. */ @JsonProperty("updated_at") private Long updatedAt; - /** Username of user who last modified function. */ + /** Username of user who last modified the function. */ @JsonProperty("updated_by") private String updatedBy; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoParameterStyle.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoParameterStyle.java index fab71fe1e..608574f72 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoParameterStyle.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoParameterStyle.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** Function parameter style. **S** is the value for SQL. */ @Generated public enum FunctionInfoParameterStyle { S, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoRoutineBody.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoRoutineBody.java index 24f8266e0..f69f1f670 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoRoutineBody.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoRoutineBody.java @@ -4,12 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * Function language. When **EXTERNAL** is used, the language of the routine function should be - * specified in the __external_language__ field, and the __return_params__ of the function cannot be - * used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - * **NO_SQL**. - */ @Generated public enum FunctionInfoRoutineBody { EXTERNAL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java index 5b45675b4..ce6545a69 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** The security type of the function. */ @Generated public enum FunctionInfoSecurityType { DEFINER, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSqlDataAccess.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSqlDataAccess.java index 69b362394..fee8adcc8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSqlDataAccess.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSqlDataAccess.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** Function SQL data access. */ @Generated public enum FunctionInfoSqlDataAccess { CONTAINS_SQL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java index 7e41e1dc0..ce5724ef9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java @@ -13,7 +13,7 @@ public class FunctionParameterInfo { @JsonProperty("comment") private String comment; - /** Name of parameter. */ + /** Name of Parameter. */ @JsonProperty("name") private String name; @@ -21,11 +21,11 @@ public class FunctionParameterInfo { @JsonProperty("parameter_default") private String parameterDefault; - /** */ + /** Function parameter mode. */ @JsonProperty("parameter_mode") private FunctionParameterMode parameterMode; - /** */ + /** Function parameter type. */ @JsonProperty("parameter_type") private FunctionParameterType parameterType; @@ -41,7 +41,7 @@ public class FunctionParameterInfo { @JsonProperty("type_json") private String typeJson; - /** */ + /** Name of type (INT, STRUCT, MAP, etc.) */ @JsonProperty("type_name") private ColumnTypeName typeName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfos.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfos.java index 8242101b9..f2941005c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfos.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfos.java @@ -10,7 +10,7 @@ @Generated public class FunctionParameterInfos { - /** The array of __FunctionParameterInfo__ definitions of the function's parameters. */ + /** */ @JsonProperty("parameters") private Collection parameters; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterMode.java index 731e91b2c..48a9a1870 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterMode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterMode.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** The mode of the function parameter. */ @Generated public enum FunctionParameterMode { IN, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterType.java index 7e930bcd6..046f5037f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterType.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** The type of function parameter. */ @Generated public enum FunctionParameterType { COLUMN, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java index ac2c1815a..f759c65bf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java @@ -86,6 +86,14 @@ public Iterable list(String catalogName, String schemaName) { * the output list contains only functions for which either the user has the **EXECUTE** privilege * or the user is the owner. There is no guarantee of a specific ordering of the elements in the * array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ public Iterable list(ListFunctionsRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java index 387db0b64..2f7c48378 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java @@ -36,7 +36,6 @@ public void delete(DeleteFunctionRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java index a88771087..3fbcccaa8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java @@ -54,6 +54,14 @@ public interface FunctionsService { * the output list contains only functions for which either the user has the **EXECUTE** privilege * or the user is the owner. There is no guarantee of a specific ordering of the elements in the * array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ ListFunctionsResponse list(ListFunctionsRequest listFunctionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java index 29c20c2fe..54943217b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java @@ -12,7 +12,7 @@ public class GetAccountStorageCredentialRequest { /** Unity Catalog metastore ID */ @JsonIgnore private String metastoreId; - /** Name of the storage credential. */ + /** Required. Name of the storage credential. */ @JsonIgnore private String storageCredentialName; public GetAccountStorageCredentialRequest setMetastoreId(String metastoreId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java index 59e2565c2..4e83c75d1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java @@ -13,6 +13,11 @@ public class GetGrantRequest { /** Full name of securable. */ @JsonIgnore private String fullName; + /** Optional. If true, also return privilege assignments whose principals have been deleted. */ + @JsonIgnore + @QueryParam("include_deleted_principals") + private Boolean includeDeletedPrincipals; + /** * Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment * present in a single page response is guaranteed to contain all the privileges granted on the @@ -50,6 +55,15 @@ public String getFullName() { return fullName; } + public GetGrantRequest setIncludeDeletedPrincipals(Boolean includeDeletedPrincipals) { + this.includeDeletedPrincipals = includeDeletedPrincipals; + return this; + } + + public Boolean getIncludeDeletedPrincipals() { + return includeDeletedPrincipals; + } + public GetGrantRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; return this; @@ -92,6 +106,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; GetGrantRequest that = (GetGrantRequest) o; return Objects.equals(fullName, that.fullName) + && Objects.equals(includeDeletedPrincipals, that.includeDeletedPrincipals) && Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken) && Objects.equals(principal, that.principal) @@ -100,13 +115,15 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(fullName, maxResults, pageToken, principal, securableType); + return Objects.hash( + fullName, includeDeletedPrincipals, maxResults, pageToken, principal, securableType); } @Override public String toString() { return new ToStringer(GetGrantRequest.class) .add("fullName", fullName) + .add("includeDeletedPrincipals", includeDeletedPrincipals) .add("maxResults", maxResults) .add("pageToken", pageToken) .add("principal", principal) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java index 5ca1d4263..be5de6d2e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java @@ -8,7 +8,7 @@ import java.util.Collection; import java.util.Objects; -/** The list of workspaces to which the given metastore is assigned. */ +/** The metastore assignments were successfully returned. */ @Generated public class ListAccountMetastoreAssignmentsResponse { /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponse.java index a5da186e4..fde3a512a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsResponse.java @@ -8,6 +8,7 @@ import java.util.Collection; import java.util.Objects; +/** The metastore storage credentials were successfully returned. */ @Generated public class ListAccountStorageCredentialsResponse { /** An array of metastore storage credentials. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java index 05fe12886..758e8afff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java @@ -18,6 +18,14 @@ public class ListCatalogsRequest { @QueryParam("include_browse") private Boolean includeBrowse; + /** + * Whether to include catalogs not bound to the workspace. Effective only if the user has + * permission to update the catalog–workspace binding. + */ + @JsonIgnore + @QueryParam("include_unbound") + private Boolean includeUnbound; + /** * Maximum number of catalogs to return. - when set to 0, the page length is set to a server * configured value (recommended); - when set to a value greater than 0, the page length is the @@ -45,6 +53,15 @@ public Boolean getIncludeBrowse() { return includeBrowse; } + public ListCatalogsRequest setIncludeUnbound(Boolean includeUnbound) { + this.includeUnbound = includeUnbound; + return this; + } + + public Boolean getIncludeUnbound() { + return includeUnbound; + } + public ListCatalogsRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; return this; @@ -69,19 +86,21 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ListCatalogsRequest that = (ListCatalogsRequest) o; return Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(includeUnbound, that.includeUnbound) && Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); } @Override public int hashCode() { - return Objects.hash(includeBrowse, maxResults, pageToken); + return Objects.hash(includeBrowse, includeUnbound, maxResults, pageToken); } @Override public String toString() { return new ToStringer(ListCatalogsRequest.class) .add("includeBrowse", includeBrowse) + .add("includeUnbound", includeUnbound) .add("maxResults", maxResults) .add("pageToken", pageToken) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java index 32dfc1888..c9b2c2dc2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java @@ -10,6 +10,14 @@ @Generated public class ListCredentialsRequest { + /** + * Whether to include credentials not bound to the workspace. Effective only if the user has + * permission to update the credential–workspace binding. + */ + @JsonIgnore + @QueryParam("include_unbound") + private Boolean includeUnbound; + /** * Maximum number of credentials to return. - If not set, the default max page size is used. - * When set to a value greater than 0, the page length is the minimum of this value and a @@ -30,6 +38,15 @@ public class ListCredentialsRequest { @QueryParam("purpose") private CredentialPurpose purpose; + public ListCredentialsRequest setIncludeUnbound(Boolean includeUnbound) { + this.includeUnbound = includeUnbound; + return this; + } + + public Boolean getIncludeUnbound() { + return includeUnbound; + } + public ListCredentialsRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; return this; @@ -62,19 +79,21 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListCredentialsRequest that = (ListCredentialsRequest) o; - return Objects.equals(maxResults, that.maxResults) + return Objects.equals(includeUnbound, that.includeUnbound) + && Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken) && Objects.equals(purpose, that.purpose); } @Override public int hashCode() { - return Objects.hash(maxResults, pageToken, purpose); + return Objects.hash(includeUnbound, maxResults, pageToken, purpose); } @Override public String toString() { return new ToStringer(ListCredentialsRequest.class) + .add("includeUnbound", includeUnbound) .add("maxResults", maxResults) .add("pageToken", pageToken) .add("purpose", purpose) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java index ce3805d49..71bfa3314 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java @@ -18,6 +18,14 @@ public class ListExternalLocationsRequest { @QueryParam("include_browse") private Boolean includeBrowse; + /** + * Whether to include external locations not bound to the workspace. Effective only if the user + * has permission to update the location–workspace binding. + */ + @JsonIgnore + @QueryParam("include_unbound") + private Boolean includeUnbound; + /** * Maximum number of external locations to return. If not set, all the external locations are * returned (not recommended). - when set to a value greater than 0, the page length is the @@ -43,6 +51,15 @@ public Boolean getIncludeBrowse() { return includeBrowse; } + public ListExternalLocationsRequest setIncludeUnbound(Boolean includeUnbound) { + this.includeUnbound = includeUnbound; + return this; + } + + public Boolean getIncludeUnbound() { + return includeUnbound; + } + public ListExternalLocationsRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; return this; @@ -67,19 +84,21 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ListExternalLocationsRequest that = (ListExternalLocationsRequest) o; return Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(includeUnbound, that.includeUnbound) && Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); } @Override public int hashCode() { - return Objects.hash(includeBrowse, maxResults, pageToken); + return Objects.hash(includeBrowse, includeUnbound, maxResults, pageToken); } @Override public String toString() { return new ToStringer(ListExternalLocationsRequest.class) .add("includeBrowse", includeBrowse) + .add("includeUnbound", includeUnbound) .add("maxResults", maxResults) .add("pageToken", pageToken) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java index c91be8012..9f1f82035 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java @@ -10,6 +10,14 @@ @Generated public class ListStorageCredentialsRequest { + /** + * Whether to include credentials not bound to the workspace. Effective only if the user has + * permission to update the credential–workspace binding. + */ + @JsonIgnore + @QueryParam("include_unbound") + private Boolean includeUnbound; + /** * Maximum number of storage credentials to return. If not set, all the storage credentials are * returned (not recommended). - when set to a value greater than 0, the page length is the @@ -26,6 +34,15 @@ public class ListStorageCredentialsRequest { @QueryParam("page_token") private String pageToken; + public ListStorageCredentialsRequest setIncludeUnbound(Boolean includeUnbound) { + this.includeUnbound = includeUnbound; + return this; + } + + public Boolean getIncludeUnbound() { + return includeUnbound; + } + public ListStorageCredentialsRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; return this; @@ -49,17 +66,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListStorageCredentialsRequest that = (ListStorageCredentialsRequest) o; - return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); + return Objects.equals(includeUnbound, that.includeUnbound) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken); } @Override public int hashCode() { - return Objects.hash(maxResults, pageToken); + return Objects.hash(includeUnbound, maxResults, pageToken); } @Override public String toString() { return new ToStringer(ListStorageCredentialsRequest.class) + .add("includeUnbound", includeUnbound) .add("maxResults", maxResults) .add("pageToken", pageToken) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java index 0d9640d5f..3f78acc3b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java @@ -9,7 +9,10 @@ @Generated public class MetastoreAssignment { - /** The name of the default catalog in the metastore. */ + /** + * The name of the default catalog in the metastore. This field is deprecated. Please use "Default + * Namespace API" to configure the default catalog for a Databricks workspace. + */ @JsonProperty("default_catalog_name") private String defaultCatalogName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java index e9d5011e1..ef832d957 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java @@ -85,6 +85,14 @@ public MetastoreInfo get(GetMetastoreRequest request) { * Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an * admin to retrieve this info. There is no guarantee of a specific ordering of the elements in * the array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ public Iterable list(ListMetastoresRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java index 2f6f582ca..ff228ee46 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java @@ -53,6 +53,14 @@ public interface MetastoresService { * Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an * admin to retrieve this info. There is no guarantee of a specific ordering of the elements in * the array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ ListMetastoresResponse list(ListMetastoresRequest listMetastoresRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java index 8dbd67ae1..dd2cc00b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java @@ -14,13 +14,6 @@ public class ModelVersionInfo { @JsonProperty("aliases") private Collection aliases; - /** - * Indicates whether the principal is limited to retrieving metadata for the associated object - * through the BROWSE privilege when include_browse is enabled in the request. - */ - @JsonProperty("browse_only") - private Boolean browseOnly; - /** The name of the catalog containing the model version */ @JsonProperty("catalog_name") private String catalogName; @@ -109,15 +102,6 @@ public Collection getAliases() { return aliases; } - public ModelVersionInfo setBrowseOnly(Boolean browseOnly) { - this.browseOnly = browseOnly; - return this; - } - - public Boolean getBrowseOnly() { - return browseOnly; - } - public ModelVersionInfo setCatalogName(String catalogName) { this.catalogName = catalogName; return this; @@ -277,7 +261,6 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ModelVersionInfo that = (ModelVersionInfo) o; return Objects.equals(aliases, that.aliases) - && Objects.equals(browseOnly, that.browseOnly) && Objects.equals(catalogName, that.catalogName) && Objects.equals(comment, that.comment) && Objects.equals(createdAt, that.createdAt) @@ -301,7 +284,6 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( aliases, - browseOnly, catalogName, comment, createdAt, @@ -325,7 +307,6 @@ public int hashCode() { public String toString() { return new ToStringer(ModelVersionInfo.class) .add("aliases", aliases) - .add("browseOnly", browseOnly) .add("catalogName", catalogName) .add("comment", comment) .add("createdAt", createdAt) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfoStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfoStatus.java index ffbbbc6a5..5ff0b7b8a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfoStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfoStatus.java @@ -4,14 +4,10 @@ import com.databricks.sdk.support.Generated; -/** - * Current status of the model version. Newly created model versions start in PENDING_REGISTRATION - * status, then move to READY status once the model version files are uploaded and the model version - * is finalized. Only model versions in READY status can be loaded for inference or served. - */ @Generated public enum ModelVersionInfoStatus { FAILED_REGISTRATION, + MODEL_VERSION_STATUS_UNKNOWN, PENDING_REGISTRATION, READY, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java index cbd425817..9860e1026 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java @@ -21,6 +21,18 @@ public class PermissionsChange { @JsonProperty("principal") private String principal; + /** + * An opaque internal ID that identifies the principal whose privileges should be removed. + * + *

This field is intended for removing privileges associated with a deleted user. When set, + * only the entries specified in the remove field are processed; any entries in the add field will + * be rejected. + * + *

Only one of principal or principal_id should be specified, never both at the same time. + */ + @JsonProperty("principal_id") + private Long principalId; + /** The set of privileges to remove. */ @JsonProperty("remove") private Collection remove; @@ -43,6 +55,15 @@ public String getPrincipal() { return principal; } + public PermissionsChange setPrincipalId(Long principalId) { + this.principalId = principalId; + return this; + } + + public Long getPrincipalId() { + return principalId; + } + public PermissionsChange setRemove(Collection remove) { this.remove = remove; return this; @@ -59,12 +80,13 @@ public boolean equals(Object o) { PermissionsChange that = (PermissionsChange) o; return Objects.equals(add, that.add) && Objects.equals(principal, that.principal) + && Objects.equals(principalId, that.principalId) && Objects.equals(remove, that.remove); } @Override public int hashCode() { - return Objects.hash(add, principal, remove); + return Objects.hash(add, principal, principalId, remove); } @Override @@ -72,6 +94,7 @@ public String toString() { return new ToStringer(PermissionsChange.class) .add("add", add) .add("principal", principal) + .add("principalId", principalId) .add("remove", remove) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java index 9d4ea5c05..5b76c8967 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java @@ -34,6 +34,7 @@ public enum Privilege { CREATE_VOLUME, EXECUTE, EXECUTE_CLEAN_ROOM_TASK, + EXTERNAL_USE_SCHEMA, MANAGE, MANAGE_ALLOWLIST, MODIFY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java index 3781e98f5..4dd3f9910 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java @@ -17,6 +17,13 @@ public class PrivilegeAssignment { @JsonProperty("principal") private String principal; + /** + * Unique identifier of the principal. For active principals, both `principal` and `principal_id` + * are present. + */ + @JsonProperty("principal_id") + private Long principalId; + /** The privileges assigned to the principal. */ @JsonProperty("privileges") private Collection privileges; @@ -30,6 +37,15 @@ public String getPrincipal() { return principal; } + public PrivilegeAssignment setPrincipalId(Long principalId) { + this.principalId = principalId; + return this; + } + + public Long getPrincipalId() { + return principalId; + } + public PrivilegeAssignment setPrivileges(Collection privileges) { this.privileges = privileges; return this; @@ -44,18 +60,21 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PrivilegeAssignment that = (PrivilegeAssignment) o; - return Objects.equals(principal, that.principal) && Objects.equals(privileges, that.privileges); + return Objects.equals(principal, that.principal) + && Objects.equals(principalId, that.principalId) + && Objects.equals(privileges, that.privileges); } @Override public int hashCode() { - return Objects.hash(principal, privileges); + return Objects.hash(principal, principalId, privileges); } @Override public String toString() { return new ToStringer(PrivilegeAssignment.class) .add("principal", principal) + .add("principalId", principalId) .add("privileges", privileges) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAlias.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAlias.java index 3bfde7bc5..043755b1d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAlias.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelAlias.java @@ -7,13 +7,28 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Registered model alias. */ @Generated public class RegisteredModelAlias { /** Name of the alias, e.g. 'champion' or 'latest_stable' */ @JsonProperty("alias_name") private String aliasName; + /** The name of the catalog containing the model version */ + @JsonProperty("catalog_name") + private String catalogName; + + /** The unique identifier of the alias */ + @JsonProperty("id") + private String id; + + /** The name of the parent registered model of the model version, relative to parent schema */ + @JsonProperty("model_name") + private String modelName; + + /** The name of the schema containing the model version, relative to parent catalog */ + @JsonProperty("schema_name") + private String schemaName; + /** Integer version number of the model version to which this alias points. */ @JsonProperty("version_num") private Long versionNum; @@ -27,6 +42,42 @@ public String getAliasName() { return aliasName; } + public RegisteredModelAlias setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public RegisteredModelAlias setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public RegisteredModelAlias setModelName(String modelName) { + this.modelName = modelName; + return this; + } + + public String getModelName() { + return modelName; + } + + public RegisteredModelAlias setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + public RegisteredModelAlias setVersionNum(Long versionNum) { this.versionNum = versionNum; return this; @@ -41,18 +92,27 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RegisteredModelAlias that = (RegisteredModelAlias) o; - return Objects.equals(aliasName, that.aliasName) && Objects.equals(versionNum, that.versionNum); + return Objects.equals(aliasName, that.aliasName) + && Objects.equals(catalogName, that.catalogName) + && Objects.equals(id, that.id) + && Objects.equals(modelName, that.modelName) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(versionNum, that.versionNum); } @Override public int hashCode() { - return Objects.hash(aliasName, versionNum); + return Objects.hash(aliasName, catalogName, id, modelName, schemaName, versionNum); } @Override public String toString() { return new ToStringer(RegisteredModelAlias.class) .add("aliasName", aliasName) + .add("catalogName", catalogName) + .add("id", id) + .add("modelName", modelName) + .add("schemaName", schemaName) .add("versionNum", versionNum) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java index 608438656..e5c508180 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java @@ -30,8 +30,8 @@ * metadata (comments, aliases) create a new model version, or update permissions on the registered * model, users must be owners of the registered model. * - *

Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) - * that specify a securable type, use "FUNCTION" as the securable type. + *

Note: The securable type for models is FUNCTION. When using REST APIs (e.g. tagging, grants) + * that specify a securable type, use FUNCTION as the securable type. */ @Generated public class RegisteredModelsAPI { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java index a03772d04..ccc99737b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java @@ -26,8 +26,8 @@ * metadata (comments, aliases) create a new model version, or update permissions on the registered * model, users must be owners of the registered model. * - *

Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) - * that specify a securable type, use "FUNCTION" as the securable type. + *

Note: The securable type for models is FUNCTION. When using REST APIs (e.g. tagging, grants) + * that specify a securable type, use FUNCTION as the securable type. * *

This is the high-level interface, that contains generated methods. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java index f8657471a..3edf06b15 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java @@ -70,6 +70,14 @@ public Iterable list(String catalogName) { * or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, * only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) * will be retrieved. There is no guarantee of a specific ordering of the elements in the array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ public Iterable list(ListSchemasRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java index abe123cb5..74b235095 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java @@ -38,6 +38,14 @@ public interface SchemasService { * or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, * only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) * will be retrieved. There is no guarantee of a specific ordering of the elements in the array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ ListSchemasResponse list(ListSchemasRequest listSchemasRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java index fd09c0225..a02ad9204 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Latest kind: CONNECTION_PALANTIR_OAUTH_M2M = 263; Next id:264 */ +/** Latest kind: CONNECTION_REDSHIFT_IAM = 265; Next id:266 */ @Generated public enum SecurableKind { TABLE_DB_STORAGE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java index 3c6c39fde..a024d5ded 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java @@ -4,18 +4,17 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; @Generated public class SetRegisteredModelAliasRequest { /** The name of the alias */ - @JsonProperty("alias") - private String alias; + @JsonIgnore private String alias; - /** Full name of the registered model */ - @JsonProperty("full_name") - private String fullName; + /** The three-level (fully qualified) name of the registered model */ + @JsonIgnore private String fullName; /** The version number of the model version to which the alias points */ @JsonProperty("version_num") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java index 2ce220029..89ee608ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java @@ -75,6 +75,14 @@ public StorageCredentialInfo get(GetStorageCredentialRequest request) { * limited to only those storage credentials the caller has permission to access. If the caller is * a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a * specific ordering of the elements in the array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ public Iterable list(ListStorageCredentialsRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java index b403f3e83..4687ed10e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java @@ -47,6 +47,14 @@ public interface StorageCredentialsService { * limited to only those storage credentials the caller has permission to access. If the caller is * a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a * specific ordering of the elements in the array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ ListStorageCredentialsResponse list(ListStorageCredentialsRequest listStorageCredentialsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java index d6784e1e6..a62bef4ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java @@ -55,6 +55,14 @@ public Iterable list(String metastoreId) { /** * Gets an array of system schemas for a metastore. The caller must be an account admin or a * metastore admin. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ public Iterable list(ListSystemSchemasRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasService.java index 01ce9aa13..59cf8627b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasService.java @@ -29,6 +29,14 @@ public interface SystemSchemasService { /** * Gets an array of system schemas for a metastore. The caller must be an account admin or a * metastore admin. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ ListSystemSchemasResponse list(ListSystemSchemasRequest listSystemSchemasRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java index 324e1b850..527a800b8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java @@ -25,6 +25,7 @@ public enum SystemType { SAP, SERVICENOW, SNOWFLAKE, + STREAM_NATIVE, TABLEAU, TERADATA, WORKDAY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java index 71c7e2a1b..7ae3a0063 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java @@ -116,6 +116,14 @@ public Iterable list(String catalogName, String schemaName) { * table. For the latter case, the caller must also be the owner or have the **USE_CATALOG** * privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is * no guarantee of a specific ordering of the elements in the array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ public Iterable list(ListTablesRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java index 6c08d2bc0..0f516bcd3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java @@ -78,6 +78,14 @@ public interface TablesService { * table. For the latter case, the caller must also be the owner or have the **USE_CATALOG** * privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is * no guarantee of a specific ordering of the elements in the array. + * + *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated + * calls will be deprecated soon. + * + *

PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero + * results while still providing a next_page_token. Clients must continue reading pages until + * next_page_token is absent, which is the only indication that the end of results has been + * reached. This behavior follows Google AIP-158 guidelines. */ ListTablesResponse list(ListTablesRequest listTablesRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsMetastore.java new file mode 100755 index 000000000..215f0eacf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsMetastore.java @@ -0,0 +1,134 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateAccountsMetastore { + /** + * The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta + * Sharing as the official name. + */ + @JsonProperty("delta_sharing_organization_name") + private String deltaSharingOrganizationName; + + /** The lifetime of delta sharing recipient token in seconds. */ + @JsonProperty("delta_sharing_recipient_token_lifetime_in_seconds") + private Long deltaSharingRecipientTokenLifetimeInSeconds; + + /** The scope of Delta Sharing enabled for the metastore. */ + @JsonProperty("delta_sharing_scope") + private DeltaSharingScopeEnum deltaSharingScope; + + /** The owner of the metastore. */ + @JsonProperty("owner") + private String owner; + + /** Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`). */ + @JsonProperty("privilege_model_version") + private String privilegeModelVersion; + + /** UUID of storage credential to access the metastore storage_root. */ + @JsonProperty("storage_root_credential_id") + private String storageRootCredentialId; + + public UpdateAccountsMetastore setDeltaSharingOrganizationName( + String deltaSharingOrganizationName) { + this.deltaSharingOrganizationName = deltaSharingOrganizationName; + return this; + } + + public String getDeltaSharingOrganizationName() { + return deltaSharingOrganizationName; + } + + public UpdateAccountsMetastore setDeltaSharingRecipientTokenLifetimeInSeconds( + Long deltaSharingRecipientTokenLifetimeInSeconds) { + this.deltaSharingRecipientTokenLifetimeInSeconds = deltaSharingRecipientTokenLifetimeInSeconds; + return this; + } + + public Long getDeltaSharingRecipientTokenLifetimeInSeconds() { + return deltaSharingRecipientTokenLifetimeInSeconds; + } + + public UpdateAccountsMetastore setDeltaSharingScope(DeltaSharingScopeEnum deltaSharingScope) { + this.deltaSharingScope = deltaSharingScope; + return this; + } + + public DeltaSharingScopeEnum getDeltaSharingScope() { + return deltaSharingScope; + } + + public UpdateAccountsMetastore setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateAccountsMetastore setPrivilegeModelVersion(String privilegeModelVersion) { + this.privilegeModelVersion = privilegeModelVersion; + return this; + } + + public String getPrivilegeModelVersion() { + return privilegeModelVersion; + } + + public UpdateAccountsMetastore setStorageRootCredentialId(String storageRootCredentialId) { + this.storageRootCredentialId = storageRootCredentialId; + return this; + } + + public String getStorageRootCredentialId() { + return storageRootCredentialId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAccountsMetastore that = (UpdateAccountsMetastore) o; + return Objects.equals(deltaSharingOrganizationName, that.deltaSharingOrganizationName) + && Objects.equals( + deltaSharingRecipientTokenLifetimeInSeconds, + that.deltaSharingRecipientTokenLifetimeInSeconds) + && Objects.equals(deltaSharingScope, that.deltaSharingScope) + && Objects.equals(owner, that.owner) + && Objects.equals(privilegeModelVersion, that.privilegeModelVersion) + && Objects.equals(storageRootCredentialId, that.storageRootCredentialId); + } + + @Override + public int hashCode() { + return Objects.hash( + deltaSharingOrganizationName, + deltaSharingRecipientTokenLifetimeInSeconds, + deltaSharingScope, + owner, + privilegeModelVersion, + storageRootCredentialId); + } + + @Override + public String toString() { + return new ToStringer(UpdateAccountsMetastore.class) + .add("deltaSharingOrganizationName", deltaSharingOrganizationName) + .add( + "deltaSharingRecipientTokenLifetimeInSeconds", + deltaSharingRecipientTokenLifetimeInSeconds) + .add("deltaSharingScope", deltaSharingScope) + .add("owner", owner) + .add("privilegeModelVersion", privilegeModelVersion) + .add("storageRootCredentialId", storageRootCredentialId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsStorageCredential.java new file mode 100755 index 000000000..22801de40 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsStorageCredential.java @@ -0,0 +1,183 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateAccountsStorageCredential { + /** The AWS IAM role configuration. */ + @JsonProperty("aws_iam_role") + private AwsIamRoleRequest awsIamRole; + + /** The Azure managed identity configuration. */ + @JsonProperty("azure_managed_identity") + private AzureManagedIdentityResponse azureManagedIdentity; + + /** The Azure service principal configuration. */ + @JsonProperty("azure_service_principal") + private AzureServicePrincipal azureServicePrincipal; + + /** The Cloudflare API token configuration. */ + @JsonProperty("cloudflare_api_token") + private CloudflareApiToken cloudflareApiToken; + + /** Comment associated with the credential. */ + @JsonProperty("comment") + private String comment; + + /** The Databricks managed GCP service account configuration. */ + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccountRequest databricksGcpServiceAccount; + + /** + * Whether the current securable is accessible from all workspaces or a specific set of + * workspaces. + */ + @JsonProperty("isolation_mode") + private IsolationMode isolationMode; + + /** Username of current owner of credential. */ + @JsonProperty("owner") + private String owner; + + /** + * Whether the credential is usable only for read operations. Only applicable when purpose is + * **STORAGE**. + */ + @JsonProperty("read_only") + private Boolean readOnly; + + public UpdateAccountsStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRoleRequest getAwsIamRole() { + return awsIamRole; + } + + public UpdateAccountsStorageCredential setAzureManagedIdentity( + AzureManagedIdentityResponse azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentityResponse getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public UpdateAccountsStorageCredential setAzureServicePrincipal( + AzureServicePrincipal azureServicePrincipal) { + this.azureServicePrincipal = azureServicePrincipal; + return this; + } + + public AzureServicePrincipal getAzureServicePrincipal() { + return azureServicePrincipal; + } + + public UpdateAccountsStorageCredential setCloudflareApiToken( + CloudflareApiToken cloudflareApiToken) { + this.cloudflareApiToken = cloudflareApiToken; + return this; + } + + public CloudflareApiToken getCloudflareApiToken() { + return cloudflareApiToken; + } + + public UpdateAccountsStorageCredential setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateAccountsStorageCredential setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccountRequest databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; + return this; + } + + public DatabricksGcpServiceAccountRequest getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; + } + + public UpdateAccountsStorageCredential setIsolationMode(IsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public IsolationMode getIsolationMode() { + return isolationMode; + } + + public UpdateAccountsStorageCredential setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateAccountsStorageCredential setReadOnly(Boolean readOnly) { + this.readOnly = readOnly; + return this; + } + + public Boolean getReadOnly() { + return readOnly; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAccountsStorageCredential that = (UpdateAccountsStorageCredential) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(azureServicePrincipal, that.azureServicePrincipal) + && Objects.equals(cloudflareApiToken, that.cloudflareApiToken) + && Objects.equals(comment, that.comment) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(owner, that.owner) + && Objects.equals(readOnly, that.readOnly); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + azureServicePrincipal, + cloudflareApiToken, + comment, + databricksGcpServiceAccount, + isolationMode, + owner, + readOnly); + } + + @Override + public String toString() { + return new ToStringer(UpdateAccountsStorageCredential.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("azureServicePrincipal", azureServicePrincipal) + .add("cloudflareApiToken", cloudflareApiToken) + .add("comment", comment) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) + .add("isolationMode", isolationMode) + .add("owner", owner) + .add("readOnly", readOnly) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java index b817347f1..e5352ef9d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java @@ -15,6 +15,14 @@ public class UpdateCatalog { @JsonProperty("comment") private String comment; + /** Status of conversion of FOREIGN catalog to UC Native catalog. */ + @JsonProperty("conversion_info") + private ConversionInfo conversionInfo; + + /** Disaster Recovery replication state snapshot. */ + @JsonProperty("dr_replication_info") + private DrReplicationInfo drReplicationInfo; + /** Whether predictive optimization should be enabled for this object and objects under it. */ @JsonProperty("enable_predictive_optimization") private EnablePredictiveOptimization enablePredictiveOptimization; @@ -54,6 +62,24 @@ public String getComment() { return comment; } + public UpdateCatalog setConversionInfo(ConversionInfo conversionInfo) { + this.conversionInfo = conversionInfo; + return this; + } + + public ConversionInfo getConversionInfo() { + return conversionInfo; + } + + public UpdateCatalog setDrReplicationInfo(DrReplicationInfo drReplicationInfo) { + this.drReplicationInfo = drReplicationInfo; + return this; + } + + public DrReplicationInfo getDrReplicationInfo() { + return drReplicationInfo; + } + public UpdateCatalog setEnablePredictiveOptimization( EnablePredictiveOptimization enablePredictiveOptimization) { this.enablePredictiveOptimization = enablePredictiveOptimization; @@ -124,6 +150,8 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; UpdateCatalog that = (UpdateCatalog) o; return Objects.equals(comment, that.comment) + && Objects.equals(conversionInfo, that.conversionInfo) + && Objects.equals(drReplicationInfo, that.drReplicationInfo) && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) && Objects.equals(isolationMode, that.isolationMode) && Objects.equals(name, that.name) @@ -137,6 +165,8 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( comment, + conversionInfo, + drReplicationInfo, enablePredictiveOptimization, isolationMode, name, @@ -150,6 +180,8 @@ public int hashCode() { public String toString() { return new ToStringer(UpdateCatalog.class) .add("comment", comment) + .add("conversionInfo", conversionInfo) + .add("drReplicationInfo", drReplicationInfo) .add("enablePredictiveOptimization", enablePredictiveOptimization) .add("isolationMode", isolationMode) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java index d37165c3a..5119aa1fa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java @@ -11,6 +11,10 @@ @Generated public class UpdateConnection { + /** [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. */ + @JsonProperty("environment_settings") + private EnvironmentSettings environmentSettings; + /** Name of the connection. */ @JsonIgnore private String name; @@ -26,6 +30,15 @@ public class UpdateConnection { @JsonProperty("owner") private String owner; + public UpdateConnection setEnvironmentSettings(EnvironmentSettings environmentSettings) { + this.environmentSettings = environmentSettings; + return this; + } + + public EnvironmentSettings getEnvironmentSettings() { + return environmentSettings; + } + public UpdateConnection setName(String name) { this.name = name; return this; @@ -67,7 +80,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateConnection that = (UpdateConnection) o; - return Objects.equals(name, that.name) + return Objects.equals(environmentSettings, that.environmentSettings) + && Objects.equals(name, that.name) && Objects.equals(newName, that.newName) && Objects.equals(options, that.options) && Objects.equals(owner, that.owner); @@ -75,12 +89,13 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(name, newName, options, owner); + return Objects.hash(environmentSettings, name, newName, options, owner); } @Override public String toString() { return new ToStringer(UpdateConnection.class) + .add("environmentSettings", environmentSettings) .add("name", name) .add("newName", newName) .add("options", options) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java index a785536bf..bdbf23c12 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java @@ -16,7 +16,7 @@ public class UpdateFunction { */ @JsonIgnore private String name; - /** Username of current owner of function. */ + /** Username of current owner of the function. */ @JsonProperty("owner") private String owner; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java index 4e220749c..b49178472 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java @@ -6,20 +6,114 @@ import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated public class UpdateModelVersionRequest { + /** List of aliases associated with the model version */ + @JsonProperty("aliases") + private Collection aliases; + + /** The name of the catalog containing the model version */ + @JsonProperty("catalog_name") + private String catalogName; + /** The comment attached to the model version */ @JsonProperty("comment") private String comment; + /** */ + @JsonProperty("created_at") + private Long createdAt; + + /** The identifier of the user who created the model version */ + @JsonProperty("created_by") + private String createdBy; + /** The three-level (fully qualified) name of the model version */ @JsonIgnore private String fullName; + /** The unique identifier of the model version */ + @JsonProperty("id") + private String id; + + /** The unique identifier of the metastore containing the model version */ + @JsonProperty("metastore_id") + private String metastoreId; + + /** The name of the parent registered model of the model version, relative to parent schema */ + @JsonProperty("model_name") + private String modelName; + + /** Model version dependencies, for feature-store packaged models */ + @JsonProperty("model_version_dependencies") + private DependencyList modelVersionDependencies; + + /** + * MLflow run ID used when creating the model version, if ``source`` was generated by an + * experiment run stored in an MLflow tracking server + */ + @JsonProperty("run_id") + private String runId; + + /** + * ID of the Databricks workspace containing the MLflow run that generated this model version, if + * applicable + */ + @JsonProperty("run_workspace_id") + private Long runWorkspaceId; + + /** The name of the schema containing the model version, relative to parent catalog */ + @JsonProperty("schema_name") + private String schemaName; + + /** URI indicating the location of the source artifacts (files) for the model version */ + @JsonProperty("source") + private String source; + + /** + * Current status of the model version. Newly created model versions start in PENDING_REGISTRATION + * status, then move to READY status once the model version files are uploaded and the model + * version is finalized. Only model versions in READY status can be loaded for inference or + * served. + */ + @JsonProperty("status") + private ModelVersionInfoStatus status; + + /** The storage location on the cloud under which model version data files are stored */ + @JsonProperty("storage_location") + private String storageLocation; + + /** */ + @JsonProperty("updated_at") + private Long updatedAt; + + /** The identifier of the user who updated the model version last time */ + @JsonProperty("updated_by") + private String updatedBy; + /** The integer version number of the model version */ @JsonIgnore private Long version; + public UpdateModelVersionRequest setAliases(Collection aliases) { + this.aliases = aliases; + return this; + } + + public Collection getAliases() { + return aliases; + } + + public UpdateModelVersionRequest setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + public UpdateModelVersionRequest setComment(String comment) { this.comment = comment; return this; @@ -29,6 +123,24 @@ public String getComment() { return comment; } + public UpdateModelVersionRequest setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public UpdateModelVersionRequest setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + public UpdateModelVersionRequest setFullName(String fullName) { this.fullName = fullName; return this; @@ -38,6 +150,115 @@ public String getFullName() { return fullName; } + public UpdateModelVersionRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateModelVersionRequest setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public UpdateModelVersionRequest setModelName(String modelName) { + this.modelName = modelName; + return this; + } + + public String getModelName() { + return modelName; + } + + public UpdateModelVersionRequest setModelVersionDependencies( + DependencyList modelVersionDependencies) { + this.modelVersionDependencies = modelVersionDependencies; + return this; + } + + public DependencyList getModelVersionDependencies() { + return modelVersionDependencies; + } + + public UpdateModelVersionRequest setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public UpdateModelVersionRequest setRunWorkspaceId(Long runWorkspaceId) { + this.runWorkspaceId = runWorkspaceId; + return this; + } + + public Long getRunWorkspaceId() { + return runWorkspaceId; + } + + public UpdateModelVersionRequest setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public UpdateModelVersionRequest setSource(String source) { + this.source = source; + return this; + } + + public String getSource() { + return source; + } + + public UpdateModelVersionRequest setStatus(ModelVersionInfoStatus status) { + this.status = status; + return this; + } + + public ModelVersionInfoStatus getStatus() { + return status; + } + + public UpdateModelVersionRequest setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public UpdateModelVersionRequest setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public UpdateModelVersionRequest setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + public UpdateModelVersionRequest setVersion(Long version) { this.version = version; return this; @@ -52,21 +273,72 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateModelVersionRequest that = (UpdateModelVersionRequest) o; - return Objects.equals(comment, that.comment) + return Objects.equals(aliases, that.aliases) + && Objects.equals(catalogName, that.catalogName) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) && Objects.equals(fullName, that.fullName) + && Objects.equals(id, that.id) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(modelName, that.modelName) + && Objects.equals(modelVersionDependencies, that.modelVersionDependencies) + && Objects.equals(runId, that.runId) + && Objects.equals(runWorkspaceId, that.runWorkspaceId) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(source, that.source) + && Objects.equals(status, that.status) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy) && Objects.equals(version, that.version); } @Override public int hashCode() { - return Objects.hash(comment, fullName, version); + return Objects.hash( + aliases, + catalogName, + comment, + createdAt, + createdBy, + fullName, + id, + metastoreId, + modelName, + modelVersionDependencies, + runId, + runWorkspaceId, + schemaName, + source, + status, + storageLocation, + updatedAt, + updatedBy, + version); } @Override public String toString() { return new ToStringer(UpdateModelVersionRequest.class) + .add("aliases", aliases) + .add("catalogName", catalogName) .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) .add("fullName", fullName) + .add("id", id) + .add("metastoreId", metastoreId) + .add("modelName", modelName) + .add("modelVersionDependencies", modelVersionDependencies) + .add("runId", runId) + .add("runWorkspaceId", runWorkspaceId) + .add("schemaName", schemaName) + .add("source", source) + .add("status", status) + .add("storageLocation", storageLocation) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) .add("version", version) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java index d7fdfe4b0..963ea7bc6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java @@ -6,17 +6,49 @@ import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated public class UpdateRegisteredModelRequest { + /** List of aliases associated with the registered model */ + @JsonProperty("aliases") + private Collection aliases; + + /** + * Indicates whether the principal is limited to retrieving metadata for the associated object + * through the BROWSE privilege when include_browse is enabled in the request. + */ + @JsonProperty("browse_only") + private Boolean browseOnly; + + /** The name of the catalog where the schema and the registered model reside */ + @JsonProperty("catalog_name") + private String catalogName; + /** The comment attached to the registered model */ @JsonProperty("comment") private String comment; + /** Creation timestamp of the registered model in milliseconds since the Unix epoch */ + @JsonProperty("created_at") + private Long createdAt; + + /** The identifier of the user who created the registered model */ + @JsonProperty("created_by") + private String createdBy; + /** The three-level (fully qualified) name of the registered model */ @JsonIgnore private String fullName; + /** The unique identifier of the metastore */ + @JsonProperty("metastore_id") + private String metastoreId; + + /** The name of the registered model */ + @JsonProperty("name") + private String name; + /** New name for the registered model. */ @JsonProperty("new_name") private String newName; @@ -25,6 +57,49 @@ public class UpdateRegisteredModelRequest { @JsonProperty("owner") private String owner; + /** The name of the schema where the registered model resides */ + @JsonProperty("schema_name") + private String schemaName; + + /** The storage location on the cloud under which model version data files are stored */ + @JsonProperty("storage_location") + private String storageLocation; + + /** Last-update timestamp of the registered model in milliseconds since the Unix epoch */ + @JsonProperty("updated_at") + private Long updatedAt; + + /** The identifier of the user who updated the registered model last time */ + @JsonProperty("updated_by") + private String updatedBy; + + public UpdateRegisteredModelRequest setAliases(Collection aliases) { + this.aliases = aliases; + return this; + } + + public Collection getAliases() { + return aliases; + } + + public UpdateRegisteredModelRequest setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + + public UpdateRegisteredModelRequest setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + public UpdateRegisteredModelRequest setComment(String comment) { this.comment = comment; return this; @@ -34,6 +109,24 @@ public String getComment() { return comment; } + public UpdateRegisteredModelRequest setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public UpdateRegisteredModelRequest setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + public UpdateRegisteredModelRequest setFullName(String fullName) { this.fullName = fullName; return this; @@ -43,6 +136,24 @@ public String getFullName() { return fullName; } + public UpdateRegisteredModelRequest setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public UpdateRegisteredModelRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + public UpdateRegisteredModelRequest setNewName(String newName) { this.newName = newName; return this; @@ -61,29 +172,102 @@ public String getOwner() { return owner; } + public UpdateRegisteredModelRequest setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public UpdateRegisteredModelRequest setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public UpdateRegisteredModelRequest setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public UpdateRegisteredModelRequest setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateRegisteredModelRequest that = (UpdateRegisteredModelRequest) o; - return Objects.equals(comment, that.comment) + return Objects.equals(aliases, that.aliases) + && Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogName, that.catalogName) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) && Objects.equals(fullName, that.fullName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) && Objects.equals(newName, that.newName) - && Objects.equals(owner, that.owner); + && Objects.equals(owner, that.owner) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); } @Override public int hashCode() { - return Objects.hash(comment, fullName, newName, owner); + return Objects.hash( + aliases, + browseOnly, + catalogName, + comment, + createdAt, + createdBy, + fullName, + metastoreId, + name, + newName, + owner, + schemaName, + storageLocation, + updatedAt, + updatedBy); } @Override public String toString() { return new ToStringer(UpdateRegisteredModelRequest.class) + .add("aliases", aliases) + .add("browseOnly", browseOnly) + .add("catalogName", catalogName) .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) .add("fullName", fullName) + .add("metastoreId", metastoreId) + .add("name", name) .add("newName", newName) .add("owner", owner) + .add("schemaName", schemaName) + .add("storageLocation", storageLocation) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java index 3af63d755..21f22415c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java @@ -11,11 +11,15 @@ @Generated public class UpdateWorkspaceBindingsParameters { - /** List of workspace bindings. */ + /** + * List of workspace bindings to add. If a binding for the workspace already exists with a + * different binding_type, adding it again with a new binding_type will update the existing + * binding (e.g., from READ_WRITE to READ_ONLY). + */ @JsonProperty("add") private Collection add; - /** List of workspace bindings. */ + /** List of workspace bindings to remove. */ @JsonProperty("remove") private Collection remove; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java index 21ac9b83e..3f74f8647 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java @@ -76,7 +76,13 @@ public class VolumeInfo { @JsonProperty("volume_id") private String volumeId; - /** */ + /** + * The type of the volume. An external volume is located in the specified external location. A + * managed volume is located in the default location which is specified by the parent schema, or + * the parent catalog, or the Metastore. [Learn more] + * + *

[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external + */ @JsonProperty("volume_type") private VolumeType volumeType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java index fcb9f83ac..044f72a39 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java @@ -4,13 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * The type of the volume. An external volume is located in the specified external location. A - * managed volume is located in the default location which is specified by the parent schema, or the - * parent catalog, or the Metastore. [Learn more] - * - *

[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external - */ @Generated public enum VolumeType { EXTERNAL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java index 609ef16ba..7b0ea8f0a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java @@ -78,7 +78,7 @@ public Iterable list(String catalogName, String schemaName) { * *

The returned volumes are filtered based on the privileges of the calling user. For example, * the metastore admin is able to list all the volumes. A regular user needs to be the owner or - * have the **READ VOLUME** privilege on the volume to recieve the volumes in the response. For + * have the **READ VOLUME** privilege on the volume to receive the volumes in the response. For * the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the * parent catalog and the **USE_SCHEMA** privilege on the parent schema. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java index fe725c7ef..7ff906c48 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java @@ -52,7 +52,7 @@ public interface VolumesService { * *

The returned volumes are filtered based on the privileges of the calling user. For example, * the metastore admin is able to list all the volumes. A regular user needs to be the owner or - * have the **READ VOLUME** privilege on the volume to recieve the volumes in the response. For + * have the **READ VOLUME** privilege on the volume to receive the volumes in the response. For * the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the * parent catalog and the **USE_SCHEMA** privilege on the parent schema. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/BaseEnvironmentType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/BaseEnvironmentType.java new file mode 100755 index 000000000..93018e7a6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/BaseEnvironmentType.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; + +/** If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto */ +@Generated +public enum BaseEnvironmentType { + CPU, + GPU, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateDefaultBaseEnvironmentRequest.java new file mode 100755 index 000000000..598f95361 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateDefaultBaseEnvironmentRequest.java @@ -0,0 +1,63 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateDefaultBaseEnvironmentRequest { + /** */ + @JsonProperty("default_base_environment") + private DefaultBaseEnvironment defaultBaseEnvironment; + + /** + * A unique identifier for this request. A random UUID is recommended. This request is only + * idempotent if a `request_id` is provided. + */ + @JsonProperty("request_id") + private String requestId; + + public CreateDefaultBaseEnvironmentRequest setDefaultBaseEnvironment( + DefaultBaseEnvironment defaultBaseEnvironment) { + this.defaultBaseEnvironment = defaultBaseEnvironment; + return this; + } + + public DefaultBaseEnvironment getDefaultBaseEnvironment() { + return defaultBaseEnvironment; + } + + public CreateDefaultBaseEnvironmentRequest setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDefaultBaseEnvironmentRequest that = (CreateDefaultBaseEnvironmentRequest) o; + return Objects.equals(defaultBaseEnvironment, that.defaultBaseEnvironment) + && Objects.equals(requestId, that.requestId); + } + + @Override + public int hashCode() { + return Objects.hash(defaultBaseEnvironment, requestId); + } + + @Override + public String toString() { + return new ToStringer(CreateDefaultBaseEnvironmentRequest.class) + .add("defaultBaseEnvironment", defaultBaseEnvironment) + .add("requestId", requestId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java index cdc49aa18..828e11a2d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java @@ -38,6 +38,13 @@ public class CreateInstancePool { @JsonProperty("disk_spec") private DiskSpec diskSpec; + /** + * For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids + * are enabled. This field should not be true if node_type_flexibility is set. + */ + @JsonProperty("enable_auto_alternate_node_types") + private Boolean enableAutoAlternateNodeTypes; + /** * Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire * additional disk space when its Spark workers are running low on disk space. In AWS, this @@ -83,6 +90,14 @@ public class CreateInstancePool { @JsonProperty("min_idle_instances") private Long minIdleInstances; + /** + * For pools with node type flexibility (Fleet-V2), this object contains the information about the + * alternate node type ids to use when attempting to launch a cluster if the node type id is not + * available. This field should not be set if enable_auto_alternate_node_types is true. + */ + @JsonProperty("node_type_flexibility") + private NodeTypeFlexibility nodeTypeFlexibility; + /** * This field encodes, through a single value, the resources available to each of the Spark nodes * in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -154,6 +169,15 @@ public DiskSpec getDiskSpec() { return diskSpec; } + public CreateInstancePool setEnableAutoAlternateNodeTypes(Boolean enableAutoAlternateNodeTypes) { + this.enableAutoAlternateNodeTypes = enableAutoAlternateNodeTypes; + return this; + } + + public Boolean getEnableAutoAlternateNodeTypes() { + return enableAutoAlternateNodeTypes; + } + public CreateInstancePool setEnableElasticDisk(Boolean enableElasticDisk) { this.enableElasticDisk = enableElasticDisk; return this; @@ -209,6 +233,15 @@ public Long getMinIdleInstances() { return minIdleInstances; } + public CreateInstancePool setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) { + this.nodeTypeFlexibility = nodeTypeFlexibility; + return this; + } + + public NodeTypeFlexibility getNodeTypeFlexibility() { + return nodeTypeFlexibility; + } + public CreateInstancePool setNodeTypeId(String nodeTypeId) { this.nodeTypeId = nodeTypeId; return this; @@ -264,6 +297,7 @@ public boolean equals(Object o) { && Objects.equals(azureAttributes, that.azureAttributes) && Objects.equals(customTags, that.customTags) && Objects.equals(diskSpec, that.diskSpec) + && Objects.equals(enableAutoAlternateNodeTypes, that.enableAutoAlternateNodeTypes) && Objects.equals(enableElasticDisk, that.enableElasticDisk) && Objects.equals(gcpAttributes, that.gcpAttributes) && Objects.equals( @@ -271,6 +305,7 @@ public boolean equals(Object o) { && Objects.equals(instancePoolName, that.instancePoolName) && Objects.equals(maxCapacity, that.maxCapacity) && Objects.equals(minIdleInstances, that.minIdleInstances) + && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility) && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(preloadedDockerImages, that.preloadedDockerImages) && Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions) @@ -285,12 +320,14 @@ public int hashCode() { azureAttributes, customTags, diskSpec, + enableAutoAlternateNodeTypes, enableElasticDisk, gcpAttributes, idleInstanceAutoterminationMinutes, instancePoolName, maxCapacity, minIdleInstances, + nodeTypeFlexibility, nodeTypeId, preloadedDockerImages, preloadedSparkVersions, @@ -305,12 +342,14 @@ public String toString() { .add("azureAttributes", azureAttributes) .add("customTags", customTags) .add("diskSpec", diskSpec) + .add("enableAutoAlternateNodeTypes", enableAutoAlternateNodeTypes) .add("enableElasticDisk", enableElasticDisk) .add("gcpAttributes", gcpAttributes) .add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes) .add("instancePoolName", instancePoolName) .add("maxCapacity", maxCapacity) .add("minIdleInstances", minIdleInstances) + .add("nodeTypeFlexibility", nodeTypeFlexibility) .add("nodeTypeId", nodeTypeId) .add("preloadedDockerImages", preloadedDockerImages) .add("preloadedSparkVersions", preloadedSparkVersions) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironment.java new file mode 100755 index 000000000..234b6cbc3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironment.java @@ -0,0 +1,258 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class DefaultBaseEnvironment { + /** */ + @JsonProperty("base_environment_cache") + private Collection baseEnvironmentCache; + + /** */ + @JsonProperty("base_environment_type") + private BaseEnvironmentType baseEnvironmentType; + + /** */ + @JsonProperty("created_timestamp") + private Long createdTimestamp; + + /** */ + @JsonProperty("creator_user_id") + private Long creatorUserId; + + /** + * Note: we made `environment` non-internal because we need to expose its `client` field. All + * other fields should be treated as internal. + */ + @JsonProperty("environment") + private Environment environment; + + /** */ + @JsonProperty("filepath") + private String filepath; + + /** */ + @JsonProperty("id") + private String id; + + /** */ + @JsonProperty("is_default") + private Boolean isDefault; + + /** */ + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + /** */ + @JsonProperty("last_updated_user_id") + private Long lastUpdatedUserId; + + /** */ + @JsonProperty("message") + private String message; + + /** */ + @JsonProperty("name") + private String name; + + /** */ + @JsonProperty("principal_ids") + private Collection principalIds; + + /** */ + @JsonProperty("status") + private DefaultBaseEnvironmentCacheStatus status; + + public DefaultBaseEnvironment setBaseEnvironmentCache( + Collection baseEnvironmentCache) { + this.baseEnvironmentCache = baseEnvironmentCache; + return this; + } + + public Collection getBaseEnvironmentCache() { + return baseEnvironmentCache; + } + + public DefaultBaseEnvironment setBaseEnvironmentType(BaseEnvironmentType baseEnvironmentType) { + this.baseEnvironmentType = baseEnvironmentType; + return this; + } + + public BaseEnvironmentType getBaseEnvironmentType() { + return baseEnvironmentType; + } + + public DefaultBaseEnvironment setCreatedTimestamp(Long createdTimestamp) { + this.createdTimestamp = createdTimestamp; + return this; + } + + public Long getCreatedTimestamp() { + return createdTimestamp; + } + + public DefaultBaseEnvironment setCreatorUserId(Long creatorUserId) { + this.creatorUserId = creatorUserId; + return this; + } + + public Long getCreatorUserId() { + return creatorUserId; + } + + public DefaultBaseEnvironment setEnvironment(Environment environment) { + this.environment = environment; + return this; + } + + public Environment getEnvironment() { + return environment; + } + + public DefaultBaseEnvironment setFilepath(String filepath) { + this.filepath = filepath; + return this; + } + + public String getFilepath() { + return filepath; + } + + public DefaultBaseEnvironment setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public DefaultBaseEnvironment setIsDefault(Boolean isDefault) { + this.isDefault = isDefault; + return this; + } + + public Boolean getIsDefault() { + return isDefault; + } + + public DefaultBaseEnvironment setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + public DefaultBaseEnvironment setLastUpdatedUserId(Long lastUpdatedUserId) { + this.lastUpdatedUserId = lastUpdatedUserId; + return this; + } + + public Long getLastUpdatedUserId() { + return lastUpdatedUserId; + } + + public DefaultBaseEnvironment setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public DefaultBaseEnvironment setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DefaultBaseEnvironment setPrincipalIds(Collection principalIds) { + this.principalIds = principalIds; + return this; + } + + public Collection getPrincipalIds() { + return principalIds; + } + + public DefaultBaseEnvironment setStatus(DefaultBaseEnvironmentCacheStatus status) { + this.status = status; + return this; + } + + public DefaultBaseEnvironmentCacheStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DefaultBaseEnvironment that = (DefaultBaseEnvironment) o; + return Objects.equals(baseEnvironmentCache, that.baseEnvironmentCache) + && Objects.equals(baseEnvironmentType, that.baseEnvironmentType) + && Objects.equals(createdTimestamp, that.createdTimestamp) + && Objects.equals(creatorUserId, that.creatorUserId) + && Objects.equals(environment, that.environment) + && Objects.equals(filepath, that.filepath) + && Objects.equals(id, that.id) + && Objects.equals(isDefault, that.isDefault) + && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(lastUpdatedUserId, that.lastUpdatedUserId) + && Objects.equals(message, that.message) + && Objects.equals(name, that.name) + && Objects.equals(principalIds, that.principalIds) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash( + baseEnvironmentCache, + baseEnvironmentType, + createdTimestamp, + creatorUserId, + environment, + filepath, + id, + isDefault, + lastUpdatedTimestamp, + lastUpdatedUserId, + message, + name, + principalIds, + status); + } + + @Override + public String toString() { + return new ToStringer(DefaultBaseEnvironment.class) + .add("baseEnvironmentCache", baseEnvironmentCache) + .add("baseEnvironmentType", baseEnvironmentType) + .add("createdTimestamp", createdTimestamp) + .add("creatorUserId", creatorUserId) + .add("environment", environment) + .add("filepath", filepath) + .add("id", id) + .add("isDefault", isDefault) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("lastUpdatedUserId", lastUpdatedUserId) + .add("message", message) + .add("name", name) + .add("principalIds", principalIds) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCache.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCache.java new file mode 100755 index 000000000..ea464af11 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCache.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DefaultBaseEnvironmentCache { + /** */ + @JsonProperty("indefinite_materialized_environment") + private MaterializedEnvironment indefiniteMaterializedEnvironment; + + /** */ + @JsonProperty("materialized_environment") + private MaterializedEnvironment materializedEnvironment; + + /** */ + @JsonProperty("message") + private String message; + + /** */ + @JsonProperty("status") + private DefaultBaseEnvironmentCacheStatus status; + + public DefaultBaseEnvironmentCache setIndefiniteMaterializedEnvironment( + MaterializedEnvironment indefiniteMaterializedEnvironment) { + this.indefiniteMaterializedEnvironment = indefiniteMaterializedEnvironment; + return this; + } + + public MaterializedEnvironment getIndefiniteMaterializedEnvironment() { + return indefiniteMaterializedEnvironment; + } + + public DefaultBaseEnvironmentCache setMaterializedEnvironment( + MaterializedEnvironment materializedEnvironment) { + this.materializedEnvironment = materializedEnvironment; + return this; + } + + public MaterializedEnvironment getMaterializedEnvironment() { + return materializedEnvironment; + } + + public DefaultBaseEnvironmentCache setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public DefaultBaseEnvironmentCache setStatus(DefaultBaseEnvironmentCacheStatus status) { + this.status = status; + return this; + } + + public DefaultBaseEnvironmentCacheStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DefaultBaseEnvironmentCache that = (DefaultBaseEnvironmentCache) o; + return Objects.equals(indefiniteMaterializedEnvironment, that.indefiniteMaterializedEnvironment) + && Objects.equals(materializedEnvironment, that.materializedEnvironment) + && Objects.equals(message, that.message) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash( + indefiniteMaterializedEnvironment, materializedEnvironment, message, status); + } + + @Override + public String toString() { + return new ToStringer(DefaultBaseEnvironmentCache.class) + .add("indefiniteMaterializedEnvironment", indefiniteMaterializedEnvironment) + .add("materializedEnvironment", materializedEnvironment) + .add("message", message) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCacheStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCacheStatus.java new file mode 100755 index 000000000..aaee91c80 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCacheStatus.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum DefaultBaseEnvironmentCacheStatus { + CREATED, + EXPIRED, + FAILED, + INVALID, + PENDING, + REFRESHING, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteDefaultBaseEnvironmentRequest.java new file mode 100755 index 000000000..bef81a175 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteDefaultBaseEnvironmentRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteDefaultBaseEnvironmentRequest { + /** */ + @JsonIgnore private String id; + + public DeleteDefaultBaseEnvironmentRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDefaultBaseEnvironmentRequest that = (DeleteDefaultBaseEnvironmentRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteDefaultBaseEnvironmentRequest.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java index b5cd70eb2..c2eea1d95 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java @@ -19,6 +19,13 @@ public class EditInstancePool { @JsonProperty("custom_tags") private Map customTags; + /** + * For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids + * are enabled. This field should not be true if node_type_flexibility is set. + */ + @JsonProperty("enable_auto_alternate_node_types") + private Boolean enableAutoAlternateNodeTypes; + /** * Automatically terminates the extra instances in the pool cache after they are inactive for this * time in minutes if min_idle_instances requirement is already met. If not set, the extra pool @@ -52,6 +59,14 @@ public class EditInstancePool { @JsonProperty("min_idle_instances") private Long minIdleInstances; + /** + * For pools with node type flexibility (Fleet-V2), this object contains the information about the + * alternate node type ids to use when attempting to launch a cluster if the node type id is not + * available. This field should not be set if enable_auto_alternate_node_types is true. + */ + @JsonProperty("node_type_flexibility") + private NodeTypeFlexibility nodeTypeFlexibility; + /** * This field encodes, through a single value, the resources available to each of the Spark nodes * in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -84,6 +99,15 @@ public Map getCustomTags() { return customTags; } + public EditInstancePool setEnableAutoAlternateNodeTypes(Boolean enableAutoAlternateNodeTypes) { + this.enableAutoAlternateNodeTypes = enableAutoAlternateNodeTypes; + return this; + } + + public Boolean getEnableAutoAlternateNodeTypes() { + return enableAutoAlternateNodeTypes; + } + public EditInstancePool setIdleInstanceAutoterminationMinutes( Long idleInstanceAutoterminationMinutes) { this.idleInstanceAutoterminationMinutes = idleInstanceAutoterminationMinutes; @@ -130,6 +154,15 @@ public Long getMinIdleInstances() { return minIdleInstances; } + public EditInstancePool setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) { + this.nodeTypeFlexibility = nodeTypeFlexibility; + return this; + } + + public NodeTypeFlexibility getNodeTypeFlexibility() { + return nodeTypeFlexibility; + } + public EditInstancePool setNodeTypeId(String nodeTypeId) { this.nodeTypeId = nodeTypeId; return this; @@ -163,12 +196,14 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; EditInstancePool that = (EditInstancePool) o; return Objects.equals(customTags, that.customTags) + && Objects.equals(enableAutoAlternateNodeTypes, that.enableAutoAlternateNodeTypes) && Objects.equals( idleInstanceAutoterminationMinutes, that.idleInstanceAutoterminationMinutes) && Objects.equals(instancePoolId, that.instancePoolId) && Objects.equals(instancePoolName, that.instancePoolName) && Objects.equals(maxCapacity, that.maxCapacity) && Objects.equals(minIdleInstances, that.minIdleInstances) + && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility) && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(remoteDiskThroughput, that.remoteDiskThroughput) && Objects.equals(totalInitialRemoteDiskSize, that.totalInitialRemoteDiskSize); @@ -178,11 +213,13 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( customTags, + enableAutoAlternateNodeTypes, idleInstanceAutoterminationMinutes, instancePoolId, instancePoolName, maxCapacity, minIdleInstances, + nodeTypeFlexibility, nodeTypeId, remoteDiskThroughput, totalInitialRemoteDiskSize); @@ -192,11 +229,13 @@ public int hashCode() { public String toString() { return new ToStringer(EditInstancePool.class) .add("customTags", customTags) + .add("enableAutoAlternateNodeTypes", enableAutoAlternateNodeTypes) .add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes) .add("instancePoolId", instancePoolId) .add("instancePoolName", instancePoolName) .add("maxCapacity", maxCapacity) .add("minIdleInstances", minIdleInstances) + .add("nodeTypeFlexibility", nodeTypeFlexibility) .add("nodeTypeId", nodeTypeId) .add("remoteDiskThroughput", remoteDiskThroughput) .add("totalInitialRemoteDiskSize", totalInitialRemoteDiskSize) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java index 28acb8090..7e131ef28 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java @@ -36,13 +36,9 @@ public class Environment { @JsonProperty("environment_version") private String environmentVersion; - /** Use `java_dependencies` instead. */ - @JsonProperty("jar_dependencies") - private Collection jarDependencies; - /** - * List of jar dependencies, should be string representing volume paths. For example: - * `/Volumes/path/to/test.jar`. + * List of java dependencies. Each dependency is a string representing a java library path. For + * example: `/Volumes/path/to/test.jar`. */ @JsonProperty("java_dependencies") private Collection javaDependencies; @@ -74,15 +70,6 @@ public String getEnvironmentVersion() { return environmentVersion; } - public Environment setJarDependencies(Collection jarDependencies) { - this.jarDependencies = jarDependencies; - return this; - } - - public Collection getJarDependencies() { - return jarDependencies; - } - public Environment setJavaDependencies(Collection javaDependencies) { this.javaDependencies = javaDependencies; return this; @@ -100,14 +87,12 @@ public boolean equals(Object o) { return Objects.equals(client, that.client) && Objects.equals(dependencies, that.dependencies) && Objects.equals(environmentVersion, that.environmentVersion) - && Objects.equals(jarDependencies, that.jarDependencies) && Objects.equals(javaDependencies, that.javaDependencies); } @Override public int hashCode() { - return Objects.hash( - client, dependencies, environmentVersion, jarDependencies, javaDependencies); + return Objects.hash(client, dependencies, environmentVersion, javaDependencies); } @Override @@ -116,7 +101,6 @@ public String toString() { .add("client", client) .add("dependencies", dependencies) .add("environmentVersion", environmentVersion) - .add("jarDependencies", jarDependencies) .add("javaDependencies", javaDependencies) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetDefaultBaseEnvironmentRequest.java new file mode 100755 index 000000000..6c682a012 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetDefaultBaseEnvironmentRequest.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetDefaultBaseEnvironmentRequest { + /** */ + @JsonIgnore + @QueryParam("id") + private String id; + + /** Deprecated: use ctx.requestId instead */ + @JsonIgnore + @QueryParam("trace_id") + private String traceId; + + public GetDefaultBaseEnvironmentRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public GetDefaultBaseEnvironmentRequest setTraceId(String traceId) { + this.traceId = traceId; + return this; + } + + public String getTraceId() { + return traceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDefaultBaseEnvironmentRequest that = (GetDefaultBaseEnvironmentRequest) o; + return Objects.equals(id, that.id) && Objects.equals(traceId, that.traceId); + } + + @Override + public int hashCode() { + return Objects.hash(id, traceId); + } + + @Override + public String toString() { + return new ToStringer(GetDefaultBaseEnvironmentRequest.class) + .add("id", id) + .add("traceId", traceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java index be68fd165..c9dd511d8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java @@ -52,6 +52,13 @@ public class GetInstancePool { @JsonProperty("disk_spec") private DiskSpec diskSpec; + /** + * For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids + * are enabled. This field should not be true if node_type_flexibility is set. + */ + @JsonProperty("enable_auto_alternate_node_types") + private Boolean enableAutoAlternateNodeTypes; + /** * Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire * additional disk space when its Spark workers are running low on disk space. In AWS, this @@ -101,6 +108,14 @@ public class GetInstancePool { @JsonProperty("min_idle_instances") private Long minIdleInstances; + /** + * For pools with node type flexibility (Fleet-V2), this object contains the information about the + * alternate node type ids to use when attempting to launch a cluster if the node type id is not + * available. This field should not be set if enable_auto_alternate_node_types is true. + */ + @JsonProperty("node_type_flexibility") + private NodeTypeFlexibility nodeTypeFlexibility; + /** * This field encodes, through a single value, the resources available to each of the Spark nodes * in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -193,6 +208,15 @@ public DiskSpec getDiskSpec() { return diskSpec; } + public GetInstancePool setEnableAutoAlternateNodeTypes(Boolean enableAutoAlternateNodeTypes) { + this.enableAutoAlternateNodeTypes = enableAutoAlternateNodeTypes; + return this; + } + + public Boolean getEnableAutoAlternateNodeTypes() { + return enableAutoAlternateNodeTypes; + } + public GetInstancePool setEnableElasticDisk(Boolean enableElasticDisk) { this.enableElasticDisk = enableElasticDisk; return this; @@ -257,6 +281,15 @@ public Long getMinIdleInstances() { return minIdleInstances; } + public GetInstancePool setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) { + this.nodeTypeFlexibility = nodeTypeFlexibility; + return this; + } + + public NodeTypeFlexibility getNodeTypeFlexibility() { + return nodeTypeFlexibility; + } + public GetInstancePool setNodeTypeId(String nodeTypeId) { this.nodeTypeId = nodeTypeId; return this; @@ -339,6 +372,7 @@ public boolean equals(Object o) { && Objects.equals(customTags, that.customTags) && Objects.equals(defaultTags, that.defaultTags) && Objects.equals(diskSpec, that.diskSpec) + && Objects.equals(enableAutoAlternateNodeTypes, that.enableAutoAlternateNodeTypes) && Objects.equals(enableElasticDisk, that.enableElasticDisk) && Objects.equals(gcpAttributes, that.gcpAttributes) && Objects.equals( @@ -347,6 +381,7 @@ public boolean equals(Object o) { && Objects.equals(instancePoolName, that.instancePoolName) && Objects.equals(maxCapacity, that.maxCapacity) && Objects.equals(minIdleInstances, that.minIdleInstances) + && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility) && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(preloadedDockerImages, that.preloadedDockerImages) && Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions) @@ -365,6 +400,7 @@ public int hashCode() { customTags, defaultTags, diskSpec, + enableAutoAlternateNodeTypes, enableElasticDisk, gcpAttributes, idleInstanceAutoterminationMinutes, @@ -372,6 +408,7 @@ public int hashCode() { instancePoolName, maxCapacity, minIdleInstances, + nodeTypeFlexibility, nodeTypeId, preloadedDockerImages, preloadedSparkVersions, @@ -390,6 +427,7 @@ public String toString() { .add("customTags", customTags) .add("defaultTags", defaultTags) .add("diskSpec", diskSpec) + .add("enableAutoAlternateNodeTypes", enableAutoAlternateNodeTypes) .add("enableElasticDisk", enableElasticDisk) .add("gcpAttributes", gcpAttributes) .add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes) @@ -397,6 +435,7 @@ public String toString() { .add("instancePoolName", instancePoolName) .add("maxCapacity", maxCapacity) .add("minIdleInstances", minIdleInstances) + .add("nodeTypeFlexibility", nodeTypeFlexibility) .add("nodeTypeId", nodeTypeId) .add("preloadedDockerImages", preloadedDockerImages) .add("preloadedSparkVersions", preloadedSparkVersions) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java index 485798092..9f9932894 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java @@ -52,6 +52,13 @@ public class InstancePoolAndStats { @JsonProperty("disk_spec") private DiskSpec diskSpec; + /** + * For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids + * are enabled. This field should not be true if node_type_flexibility is set. + */ + @JsonProperty("enable_auto_alternate_node_types") + private Boolean enableAutoAlternateNodeTypes; + /** * Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire * additional disk space when its Spark workers are running low on disk space. In AWS, this @@ -101,6 +108,14 @@ public class InstancePoolAndStats { @JsonProperty("min_idle_instances") private Long minIdleInstances; + /** + * For pools with node type flexibility (Fleet-V2), this object contains the information about the + * alternate node type ids to use when attempting to launch a cluster if the node type id is not + * available. This field should not be set if enable_auto_alternate_node_types is true. + */ + @JsonProperty("node_type_flexibility") + private NodeTypeFlexibility nodeTypeFlexibility; + /** * This field encodes, through a single value, the resources available to each of the Spark nodes * in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -193,6 +208,16 @@ public DiskSpec getDiskSpec() { return diskSpec; } + public InstancePoolAndStats setEnableAutoAlternateNodeTypes( + Boolean enableAutoAlternateNodeTypes) { + this.enableAutoAlternateNodeTypes = enableAutoAlternateNodeTypes; + return this; + } + + public Boolean getEnableAutoAlternateNodeTypes() { + return enableAutoAlternateNodeTypes; + } + public InstancePoolAndStats setEnableElasticDisk(Boolean enableElasticDisk) { this.enableElasticDisk = enableElasticDisk; return this; @@ -257,6 +282,15 @@ public Long getMinIdleInstances() { return minIdleInstances; } + public InstancePoolAndStats setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) { + this.nodeTypeFlexibility = nodeTypeFlexibility; + return this; + } + + public NodeTypeFlexibility getNodeTypeFlexibility() { + return nodeTypeFlexibility; + } + public InstancePoolAndStats setNodeTypeId(String nodeTypeId) { this.nodeTypeId = nodeTypeId; return this; @@ -340,6 +374,7 @@ public boolean equals(Object o) { && Objects.equals(customTags, that.customTags) && Objects.equals(defaultTags, that.defaultTags) && Objects.equals(diskSpec, that.diskSpec) + && Objects.equals(enableAutoAlternateNodeTypes, that.enableAutoAlternateNodeTypes) && Objects.equals(enableElasticDisk, that.enableElasticDisk) && Objects.equals(gcpAttributes, that.gcpAttributes) && Objects.equals( @@ -348,6 +383,7 @@ public boolean equals(Object o) { && Objects.equals(instancePoolName, that.instancePoolName) && Objects.equals(maxCapacity, that.maxCapacity) && Objects.equals(minIdleInstances, that.minIdleInstances) + && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility) && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(preloadedDockerImages, that.preloadedDockerImages) && Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions) @@ -366,6 +402,7 @@ public int hashCode() { customTags, defaultTags, diskSpec, + enableAutoAlternateNodeTypes, enableElasticDisk, gcpAttributes, idleInstanceAutoterminationMinutes, @@ -373,6 +410,7 @@ public int hashCode() { instancePoolName, maxCapacity, minIdleInstances, + nodeTypeFlexibility, nodeTypeId, preloadedDockerImages, preloadedSparkVersions, @@ -391,6 +429,7 @@ public String toString() { .add("customTags", customTags) .add("defaultTags", defaultTags) .add("diskSpec", diskSpec) + .add("enableAutoAlternateNodeTypes", enableAutoAlternateNodeTypes) .add("enableElasticDisk", enableElasticDisk) .add("gcpAttributes", gcpAttributes) .add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes) @@ -398,6 +437,7 @@ public String toString() { .add("instancePoolName", instancePoolName) .add("maxCapacity", maxCapacity) .add("minIdleInstances", minIdleInstances) + .add("nodeTypeFlexibility", nodeTypeFlexibility) .add("nodeTypeId", nodeTypeId) .add("preloadedDockerImages", preloadedDockerImages) .add("preloadedSparkVersions", preloadedSparkVersions) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java index 2520eca50..6e7b6041b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java @@ -14,6 +14,19 @@ public class InstancePoolAwsAttributes { @JsonProperty("availability") private InstancePoolAwsAttributesAvailability availability; + /** + * All AWS instances belonging to the instance pool will have this instance profile. If omitted, + * instances will initially be launched with the workspace's default instance profile. If defined, + * clusters that use the pool will inherit the instance profile, and must not specify their own + * instance profile on cluster creation or update. If the pool does not specify an instance + * profile, clusters using the pool may specify any instance profile. The instance profile must + * have previously been added to the Databricks environment by an account administrator. + * + *

This feature may only be available to certain customer plans. + */ + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + /** * Calculates the bid price for AWS spot instances, as a percentage of the corresponding instance * type's on-demand price. For example, if this field is set to 50, and the cluster needs a new @@ -48,6 +61,15 @@ public InstancePoolAwsAttributesAvailability getAvailability() { return availability; } + public InstancePoolAwsAttributes setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + public InstancePoolAwsAttributes setSpotBidPricePercent(Long spotBidPricePercent) { this.spotBidPricePercent = spotBidPricePercent; return this; @@ -72,19 +94,21 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; InstancePoolAwsAttributes that = (InstancePoolAwsAttributes) o; return Objects.equals(availability, that.availability) + && Objects.equals(instanceProfileArn, that.instanceProfileArn) && Objects.equals(spotBidPricePercent, that.spotBidPricePercent) && Objects.equals(zoneId, that.zoneId); } @Override public int hashCode() { - return Objects.hash(availability, spotBidPricePercent, zoneId); + return Objects.hash(availability, instanceProfileArn, spotBidPricePercent, zoneId); } @Override public String toString() { return new ToStringer(InstancePoolAwsAttributes.class) .add("availability", availability) + .add("instanceProfileArn", instanceProfileArn) .add("spotBidPricePercent", spotBidPricePercent) .add("zoneId", zoneId) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java index bf8a99af3..4a692cb61 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java @@ -4,6 +4,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.Paginator; +import java.util.Collection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -68,6 +69,38 @@ public Iterable clusterStatus(ClusterStatus request) { request, impl::clusterStatus, ClusterLibraryStatuses::getLibraryStatuses, response -> null); } + /** + * Create a default base environment within workspaces to define the environment version and a + * list of dependencies to be used in serverless notebooks and jobs. This process will + * asynchronously generate a cache to optimize dependency resolution. + */ + public DefaultBaseEnvironment createDefaultBaseEnvironment( + CreateDefaultBaseEnvironmentRequest request) { + return impl.createDefaultBaseEnvironment(request); + } + + public void deleteDefaultBaseEnvironment(String id) { + deleteDefaultBaseEnvironment(new DeleteDefaultBaseEnvironmentRequest().setId(id)); + } + + /** + * Delete the default base environment given an ID. The default base environment may be used by + * downstream workloads. Please ensure that the deletion is intentional. + */ + public void deleteDefaultBaseEnvironment(DeleteDefaultBaseEnvironmentRequest request) { + impl.deleteDefaultBaseEnvironment(request); + } + + public DefaultBaseEnvironment getDefaultBaseEnvironment(String id) { + return getDefaultBaseEnvironment(new GetDefaultBaseEnvironmentRequest().setId(id)); + } + + /** Return the default base environment details for a given ID. */ + public DefaultBaseEnvironment getDefaultBaseEnvironment( + GetDefaultBaseEnvironmentRequest request) { + return impl.getDefaultBaseEnvironment(request); + } + /** * Add libraries to install on a cluster. The installation is asynchronous; it happens in the * background after the completion of this request. @@ -76,6 +109,34 @@ public void install(InstallLibraries request) { impl.install(request); } + /** List default base environments defined in the workspaces for the requested user. */ + public Iterable listDefaultBaseEnvironments( + ListDefaultBaseEnvironmentsRequest request) { + return new Paginator<>( + request, + impl::listDefaultBaseEnvironments, + ListDefaultBaseEnvironmentsResponse::getDefaultBaseEnvironments, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public void refreshDefaultBaseEnvironments(Collection ids) { + refreshDefaultBaseEnvironments(new RefreshDefaultBaseEnvironmentsRequest().setIds(ids)); + } + + /** + * Refresh the cached default base environments for the given IDs. This process will + * asynchronously regenerate the caches. The existing caches remains available until it expires. + */ + public void refreshDefaultBaseEnvironments(RefreshDefaultBaseEnvironmentsRequest request) { + impl.refreshDefaultBaseEnvironments(request); + } + /** * Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster * is restarted. A request to uninstall a library that is not currently installed is ignored. @@ -84,6 +145,24 @@ public void uninstall(UninstallLibraries request) { impl.uninstall(request); } + /** + * Update the default base environment for the given ID. This process will asynchronously + * regenerate the cache. The existing cache remains available until it expires. + */ + public DefaultBaseEnvironment updateDefaultBaseEnvironment( + UpdateDefaultBaseEnvironmentRequest request) { + return impl.updateDefaultBaseEnvironment(request); + } + + /** + * Set the default base environment for the workspace. This marks the specified DBE as the + * workspace default. + */ + public DefaultBaseEnvironment updateDefaultDefaultBaseEnvironment( + UpdateDefaultDefaultBaseEnvironmentRequest request) { + return impl.updateDefaultDefaultBaseEnvironment(request); + } + public LibrariesService impl() { return impl; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java index 6bb0dd63e..e5a04da90 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java @@ -41,6 +41,48 @@ public ClusterLibraryStatuses clusterStatus(ClusterStatus request) { } } + @Override + public DefaultBaseEnvironment createDefaultBaseEnvironment( + CreateDefaultBaseEnvironmentRequest request) { + String path = "/api/2.0/default-base-environments"; + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DefaultBaseEnvironment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteDefaultBaseEnvironment(DeleteDefaultBaseEnvironmentRequest request) { + String path = String.format("/api/2.0/default-base-environments/%s", request.getId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DefaultBaseEnvironment getDefaultBaseEnvironment( + GetDefaultBaseEnvironmentRequest request) { + String path = "/api/2.0/default-base-environments:getDefaultBaseEnvironment"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DefaultBaseEnvironment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public void install(InstallLibraries request) { String path = "/api/2.0/libraries/install"; @@ -55,6 +97,34 @@ public void install(InstallLibraries request) { } } + @Override + public ListDefaultBaseEnvironmentsResponse listDefaultBaseEnvironments( + ListDefaultBaseEnvironmentsRequest request) { + String path = "/api/2.0/default-base-environments"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListDefaultBaseEnvironmentsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void refreshDefaultBaseEnvironments(RefreshDefaultBaseEnvironmentsRequest request) { + String path = "/api/2.0/default-base-environments/refresh"; + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public void uninstall(UninstallLibraries request) { String path = "/api/2.0/libraries/uninstall"; @@ -68,4 +138,34 @@ public void uninstall(UninstallLibraries request) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } + + @Override + public DefaultBaseEnvironment updateDefaultBaseEnvironment( + UpdateDefaultBaseEnvironmentRequest request) { + String path = String.format("/api/2.0/default-base-environments/%s", request.getId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DefaultBaseEnvironment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DefaultBaseEnvironment updateDefaultDefaultBaseEnvironment( + UpdateDefaultDefaultBaseEnvironmentRequest request) { + String path = "/api/2.0/default-base-environments:setDefault"; + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DefaultBaseEnvironment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java index 5ccaf55cb..d35cb4101 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java @@ -41,15 +41,59 @@ public interface LibrariesService { */ ClusterLibraryStatuses clusterStatus(ClusterStatus clusterStatus); + /** + * Create a default base environment within workspaces to define the environment version and a + * list of dependencies to be used in serverless notebooks and jobs. This process will + * asynchronously generate a cache to optimize dependency resolution. + */ + DefaultBaseEnvironment createDefaultBaseEnvironment( + CreateDefaultBaseEnvironmentRequest createDefaultBaseEnvironmentRequest); + + /** + * Delete the default base environment given an ID. The default base environment may be used by + * downstream workloads. Please ensure that the deletion is intentional. + */ + void deleteDefaultBaseEnvironment( + DeleteDefaultBaseEnvironmentRequest deleteDefaultBaseEnvironmentRequest); + + /** Return the default base environment details for a given ID. */ + DefaultBaseEnvironment getDefaultBaseEnvironment( + GetDefaultBaseEnvironmentRequest getDefaultBaseEnvironmentRequest); + /** * Add libraries to install on a cluster. The installation is asynchronous; it happens in the * background after the completion of this request. */ void install(InstallLibraries installLibraries); + /** List default base environments defined in the workspaces for the requested user. */ + ListDefaultBaseEnvironmentsResponse listDefaultBaseEnvironments( + ListDefaultBaseEnvironmentsRequest listDefaultBaseEnvironmentsRequest); + + /** + * Refresh the cached default base environments for the given IDs. This process will + * asynchronously regenerate the caches. The existing caches remains available until it expires. + */ + void refreshDefaultBaseEnvironments( + RefreshDefaultBaseEnvironmentsRequest refreshDefaultBaseEnvironmentsRequest); + /** * Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster * is restarted. A request to uninstall a library that is not currently installed is ignored. */ void uninstall(UninstallLibraries uninstallLibraries); + + /** + * Update the default base environment for the given ID. This process will asynchronously + * regenerate the cache. The existing cache remains available until it expires. + */ + DefaultBaseEnvironment updateDefaultBaseEnvironment( + UpdateDefaultBaseEnvironmentRequest updateDefaultBaseEnvironmentRequest); + + /** + * Set the default base environment for the workspace. This marks the specified DBE as the + * workspace default. + */ + DefaultBaseEnvironment updateDefaultDefaultBaseEnvironment( + UpdateDefaultDefaultBaseEnvironmentRequest updateDefaultDefaultBaseEnvironmentRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsRequest.java new file mode 100755 index 000000000..b4f8149ba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsRequest.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListDefaultBaseEnvironmentsRequest { + /** */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListDefaultBaseEnvironmentsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListDefaultBaseEnvironmentsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDefaultBaseEnvironmentsRequest that = (ListDefaultBaseEnvironmentsRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDefaultBaseEnvironmentsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsResponse.java new file mode 100755 index 000000000..c941b5fa9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListDefaultBaseEnvironmentsResponse { + /** */ + @JsonProperty("default_base_environments") + private Collection defaultBaseEnvironments; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListDefaultBaseEnvironmentsResponse setDefaultBaseEnvironments( + Collection defaultBaseEnvironments) { + this.defaultBaseEnvironments = defaultBaseEnvironments; + return this; + } + + public Collection getDefaultBaseEnvironments() { + return defaultBaseEnvironments; + } + + public ListDefaultBaseEnvironmentsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDefaultBaseEnvironmentsResponse that = (ListDefaultBaseEnvironmentsResponse) o; + return Objects.equals(defaultBaseEnvironments, that.defaultBaseEnvironments) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(defaultBaseEnvironments, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDefaultBaseEnvironmentsResponse.class) + .add("defaultBaseEnvironments", defaultBaseEnvironments) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MaterializedEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MaterializedEnvironment.java new file mode 100755 index 000000000..52a43e725 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MaterializedEnvironment.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Materialized Environment information enables environment sharing and reuse via Environment + * Caching during library installations. Currently this feature is only supported for Python + * libraries. + * + *

- If the env cache entry in LMv2 DB doesn't exist or invalid, library installations and + * environment materialization will occur. A new Materialized Environment metadata will be sent from + * DP upon successful library installations and env materialization, and is persisted into database + * by LMv2. - If the env cache entry in LMv2 DB is valid, the Materialized Environment will be sent + * to DP by LMv2, and DP will restore the cached environment from a store instead of reinstalling + * libraries from scratch. + * + *

If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto with new + * version + */ +@Generated +public class MaterializedEnvironment { + /** The timestamp (in epoch milliseconds) when the materialized env is updated. */ + @JsonProperty("last_updated_timestamp") + private Long lastUpdatedTimestamp; + + public MaterializedEnvironment setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + this.lastUpdatedTimestamp = lastUpdatedTimestamp; + return this; + } + + public Long getLastUpdatedTimestamp() { + return lastUpdatedTimestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MaterializedEnvironment that = (MaterializedEnvironment) o; + return Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp); + } + + @Override + public int hashCode() { + return Objects.hash(lastUpdatedTimestamp); + } + + @Override + public String toString() { + return new ToStringer(MaterializedEnvironment.class) + .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java new file mode 100755 index 000000000..7366ed43d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java @@ -0,0 +1,33 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** + * For Fleet-V2 using classic clusters, this object contains the information about the alternate + * node type ids to use when attempting to launch a cluster. It can be used with both the driver and + * worker node types. + */ +@Generated +public class NodeTypeFlexibility { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(NodeTypeFlexibility.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RefreshDefaultBaseEnvironmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RefreshDefaultBaseEnvironmentsRequest.java new file mode 100755 index 000000000..c7f60255b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RefreshDefaultBaseEnvironmentsRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class RefreshDefaultBaseEnvironmentsRequest { + /** */ + @JsonProperty("ids") + private Collection ids; + + public RefreshDefaultBaseEnvironmentsRequest setIds(Collection ids) { + this.ids = ids; + return this; + } + + public Collection getIds() { + return ids; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RefreshDefaultBaseEnvironmentsRequest that = (RefreshDefaultBaseEnvironmentsRequest) o; + return Objects.equals(ids, that.ids); + } + + @Override + public int hashCode() { + return Objects.hash(ids); + } + + @Override + public String toString() { + return new ToStringer(RefreshDefaultBaseEnvironmentsRequest.class).add("ids", ids).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java index ef5dfa156..4ea0454c8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java @@ -19,11 +19,19 @@ public class Results { @JsonProperty("data") private Object data; - /** The image filename */ + /** + * The image data in one of the following formats: + * + *

1. A Data URL with base64-encoded image data: `data:image/{type};base64,{base64-data}`. + * Example: `data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUA...` + * + *

2. A FileStore file path for large images: `/plots/{filename}.png`. Example: + * `/plots/b6a7ad70-fb2c-4353-8aed-3f1e015174a4.png` + */ @JsonProperty("fileName") private String fileName; - /** */ + /** List of image data for multiple images. Each element follows the same format as file_name. */ @JsonProperty("fileNames") private Collection fileNames; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java index 18917be99..2dcf7125a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java @@ -45,6 +45,7 @@ public enum TerminationReasonCode { BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG, BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED, BUDGET_POLICY_RESOLUTION_FAILURE, + CLOUD_ACCOUNT_POD_QUOTA_EXCEEDED, CLOUD_ACCOUNT_SETUP_FAILURE, CLOUD_OPERATION_CANCELLED, CLOUD_PROVIDER_DISK_SETUP_FAILURE, @@ -122,6 +123,7 @@ public enum TerminationReasonCode { IN_PENALTY_BOX, IP_EXHAUSTION_FAILURE, JOB_FINISHED, + K8S_ACTIVE_POD_QUOTA_EXCEEDED, K8S_AUTOSCALING_FAILURE, K8S_DBR_CLUSTER_LAUNCH_TIMEOUT, LAZY_ALLOCATION_TIMEOUT, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultBaseEnvironmentRequest.java new file mode 100755 index 000000000..fcf7e1e9d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultBaseEnvironmentRequest.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateDefaultBaseEnvironmentRequest { + /** */ + @JsonProperty("default_base_environment") + private DefaultBaseEnvironment defaultBaseEnvironment; + + /** */ + @JsonIgnore private String id; + + public UpdateDefaultBaseEnvironmentRequest setDefaultBaseEnvironment( + DefaultBaseEnvironment defaultBaseEnvironment) { + this.defaultBaseEnvironment = defaultBaseEnvironment; + return this; + } + + public DefaultBaseEnvironment getDefaultBaseEnvironment() { + return defaultBaseEnvironment; + } + + public UpdateDefaultBaseEnvironmentRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDefaultBaseEnvironmentRequest that = (UpdateDefaultBaseEnvironmentRequest) o; + return Objects.equals(defaultBaseEnvironment, that.defaultBaseEnvironment) + && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(defaultBaseEnvironment, id); + } + + @Override + public String toString() { + return new ToStringer(UpdateDefaultBaseEnvironmentRequest.class) + .add("defaultBaseEnvironment", defaultBaseEnvironment) + .add("id", id) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultDefaultBaseEnvironmentRequest.java new file mode 100755 index 000000000..3cd5d7508 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultDefaultBaseEnvironmentRequest.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateDefaultDefaultBaseEnvironmentRequest { + /** */ + @JsonProperty("base_environment_type") + private BaseEnvironmentType baseEnvironmentType; + + /** */ + @JsonProperty("id") + private String id; + + public UpdateDefaultDefaultBaseEnvironmentRequest setBaseEnvironmentType( + BaseEnvironmentType baseEnvironmentType) { + this.baseEnvironmentType = baseEnvironmentType; + return this; + } + + public BaseEnvironmentType getBaseEnvironmentType() { + return baseEnvironmentType; + } + + public UpdateDefaultDefaultBaseEnvironmentRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDefaultDefaultBaseEnvironmentRequest that = + (UpdateDefaultDefaultBaseEnvironmentRequest) o; + return Objects.equals(baseEnvironmentType, that.baseEnvironmentType) + && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(baseEnvironmentType, id); + } + + @Override + public String toString() { + return new ToStringer(UpdateDefaultDefaultBaseEnvironmentRequest.class) + .add("baseEnvironmentType", baseEnvironmentType) + .add("id", id) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java new file mode 100755 index 000000000..07776b06d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java @@ -0,0 +1,79 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class CancelPublishedQueryExecutionRequest { + /** */ + @JsonIgnore + @QueryParam("dashboard_name") + private String dashboardName; + + /** */ + @JsonIgnore + @QueryParam("dashboard_revision_id") + private String dashboardRevisionId; + + /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */ + @JsonIgnore + @QueryParam("tokens") + private Collection tokens; + + public CancelPublishedQueryExecutionRequest setDashboardName(String dashboardName) { + this.dashboardName = dashboardName; + return this; + } + + public String getDashboardName() { + return dashboardName; + } + + public CancelPublishedQueryExecutionRequest setDashboardRevisionId(String dashboardRevisionId) { + this.dashboardRevisionId = dashboardRevisionId; + return this; + } + + public String getDashboardRevisionId() { + return dashboardRevisionId; + } + + public CancelPublishedQueryExecutionRequest setTokens(Collection tokens) { + this.tokens = tokens; + return this; + } + + public Collection getTokens() { + return tokens; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CancelPublishedQueryExecutionRequest that = (CancelPublishedQueryExecutionRequest) o; + return Objects.equals(dashboardName, that.dashboardName) + && Objects.equals(dashboardRevisionId, that.dashboardRevisionId) + && Objects.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardName, dashboardRevisionId, tokens); + } + + @Override + public String toString() { + return new ToStringer(CancelPublishedQueryExecutionRequest.class) + .add("dashboardName", dashboardName) + .add("dashboardRevisionId", dashboardRevisionId) + .add("tokens", tokens) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java new file mode 100755 index 000000000..3476fb9ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class CancelQueryExecutionResponse { + /** */ + @JsonProperty("status") + private Collection status; + + public CancelQueryExecutionResponse setStatus( + Collection status) { + this.status = status; + return this; + } + + public Collection getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CancelQueryExecutionResponse that = (CancelQueryExecutionResponse) o; + return Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(status); + } + + @Override + public String toString() { + return new ToStringer(CancelQueryExecutionResponse.class).add("status", status).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java new file mode 100755 index 000000000..3d8a03c06 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CancelQueryExecutionResponseStatus { + /** + * The token to poll for result asynchronously Example: + * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + */ + @JsonProperty("data_token") + private String dataToken; + + /** */ + @JsonProperty("pending") + private Empty pending; + + /** */ + @JsonProperty("success") + private Empty success; + + public CancelQueryExecutionResponseStatus setDataToken(String dataToken) { + this.dataToken = dataToken; + return this; + } + + public String getDataToken() { + return dataToken; + } + + public CancelQueryExecutionResponseStatus setPending(Empty pending) { + this.pending = pending; + return this; + } + + public Empty getPending() { + return pending; + } + + public CancelQueryExecutionResponseStatus setSuccess(Empty success) { + this.success = success; + return this; + } + + public Empty getSuccess() { + return success; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CancelQueryExecutionResponseStatus that = (CancelQueryExecutionResponseStatus) o; + return Objects.equals(dataToken, that.dataToken) + && Objects.equals(pending, that.pending) + && Objects.equals(success, that.success); + } + + @Override + public int hashCode() { + return Objects.hash(dataToken, pending, success); + } + + @Override + public String toString() { + return new ToStringer(CancelQueryExecutionResponseStatus.class) + .add("dataToken", dataToken) + .add("pending", pending) + .add("success", success) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java new file mode 100755 index 000000000..8714d62a6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java @@ -0,0 +1,32 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** + * Represents an empty message, similar to google.protobuf.Empty, which is not available in the firm + * right now. + */ +@Generated +public class Empty { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(Empty.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java new file mode 100755 index 000000000..c5223007c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Execute query request for published Dashboards. Since published dashboards have the option of + * running as the publisher, the datasets, warehouse_id are excluded from the request and instead + * read from the source (lakeview-config) via the additional parameters (dashboardName and + * dashboardRevisionId) + */ +@Generated +public class ExecutePublishedDashboardQueryRequest { + /** + * Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains + * the list of datasets, warehouse_id, and embedded_credentials + */ + @JsonProperty("dashboard_name") + private String dashboardName; + + /** */ + @JsonProperty("dashboard_revision_id") + private String dashboardRevisionId; + + /** + * A dashboard schedule can override the warehouse used as compute for processing the published + * dashboard queries + */ + @JsonProperty("override_warehouse_id") + private String overrideWarehouseId; + + public ExecutePublishedDashboardQueryRequest setDashboardName(String dashboardName) { + this.dashboardName = dashboardName; + return this; + } + + public String getDashboardName() { + return dashboardName; + } + + public ExecutePublishedDashboardQueryRequest setDashboardRevisionId(String dashboardRevisionId) { + this.dashboardRevisionId = dashboardRevisionId; + return this; + } + + public String getDashboardRevisionId() { + return dashboardRevisionId; + } + + public ExecutePublishedDashboardQueryRequest setOverrideWarehouseId(String overrideWarehouseId) { + this.overrideWarehouseId = overrideWarehouseId; + return this; + } + + public String getOverrideWarehouseId() { + return overrideWarehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExecutePublishedDashboardQueryRequest that = (ExecutePublishedDashboardQueryRequest) o; + return Objects.equals(dashboardName, that.dashboardName) + && Objects.equals(dashboardRevisionId, that.dashboardRevisionId) + && Objects.equals(overrideWarehouseId, that.overrideWarehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardName, dashboardRevisionId, overrideWarehouseId); + } + + @Override + public String toString() { + return new ToStringer(ExecutePublishedDashboardQueryRequest.class) + .add("dashboardName", dashboardName) + .add("dashboardRevisionId", dashboardRevisionId) + .add("overrideWarehouseId", overrideWarehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java index 7bd915755..97dd4d4c1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java @@ -110,6 +110,11 @@ public Wait createMessage( response); } + /** Creates a Genie space from a serialized payload. */ + public GenieSpace createSpace(GenieCreateSpaceRequest request) { + return impl.createSpace(request); + } + public void deleteConversation(String spaceId, String conversationId) { deleteConversation( new GenieDeleteConversationRequest().setSpaceId(spaceId).setConversationId(conversationId)); @@ -151,6 +156,49 @@ public GenieGetMessageQueryResultResponse executeMessageQuery( return impl.executeMessageQuery(request); } + /** + * Initiates a new SQL execution and returns a `download_id` that you can use to track the + * progress of the download. The query result is stored in an external link and can be retrieved + * using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. + * Warning: Databricks strongly recommends that you protect the URLs that are returned by the + * `EXTERNAL_LINKS` disposition. See [Execute + * Statement](:method:statementexecution/executestatement) for more details. + */ + public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult( + GenieGenerateDownloadFullQueryResultRequest request) { + return impl.generateDownloadFullQueryResult(request); + } + + public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult( + String spaceId, + String conversationId, + String messageId, + String attachmentId, + String downloadId) { + return getDownloadFullQueryResult( + new GenieGetDownloadFullQueryResultRequest() + .setSpaceId(spaceId) + .setConversationId(conversationId) + .setMessageId(messageId) + .setAttachmentId(attachmentId) + .setDownloadId(downloadId)); + } + + /** + * After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and + * successfully receiving a `download_id`, use this API to poll the download progress. When the + * download is complete, the API returns one or more external links to the query result files. + * Warning: Databricks strongly recommends that you protect the URLs that are returned by the + * `EXTERNAL_LINKS` disposition. You must not set an Authorization header in download requests. + * When using the `EXTERNAL_LINKS` disposition, Databricks returns presigned URLs that grant + * temporary access to data. See [Execute Statement](:method:statementexecution/executestatement) + * for more details. + */ + public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult( + GenieGetDownloadFullQueryResultRequest request) { + return impl.getDownloadFullQueryResult(request); + } + public GenieMessage getMessage(String spaceId, String conversationId, String messageId) { return getMessage( new GenieGetConversationMessageRequest() @@ -286,6 +334,11 @@ public void trashSpace(GenieTrashSpaceRequest request) { impl.trashSpace(request); } + /** Updates a Genie space with a serialized payload. */ + public GenieSpace updateSpace(GenieUpdateSpaceRequest request) { + return impl.updateSpace(request); + } + public GenieService impl() { return impl; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java new file mode 100755 index 000000000..5d425c84b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java @@ -0,0 +1,104 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GenieCreateSpaceRequest { + /** Optional description */ + @JsonProperty("description") + private String description; + + /** Parent folder path where the space will be registered */ + @JsonProperty("parent_path") + private String parentPath; + + /** Serialized export model for the space contents */ + @JsonProperty("serialized_space") + private String serializedSpace; + + /** Optional title override */ + @JsonProperty("title") + private String title; + + /** Warehouse to associate with the new space */ + @JsonProperty("warehouse_id") + private String warehouseId; + + public GenieCreateSpaceRequest setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public GenieCreateSpaceRequest setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + + public GenieCreateSpaceRequest setSerializedSpace(String serializedSpace) { + this.serializedSpace = serializedSpace; + return this; + } + + public String getSerializedSpace() { + return serializedSpace; + } + + public GenieCreateSpaceRequest setTitle(String title) { + this.title = title; + return this; + } + + public String getTitle() { + return title; + } + + public GenieCreateSpaceRequest setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieCreateSpaceRequest that = (GenieCreateSpaceRequest) o; + return Objects.equals(description, that.description) + && Objects.equals(parentPath, that.parentPath) + && Objects.equals(serializedSpace, that.serializedSpace) + && Objects.equals(title, that.title) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(description, parentPath, serializedSpace, title, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(GenieCreateSpaceRequest.class) + .add("description", description) + .add("parentPath", parentPath) + .add("serializedSpace", serializedSpace) + .add("title", title) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java index 92d35fc98..86339a735 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java @@ -10,10 +10,23 @@ /** Feedback containing rating and optional comment */ @Generated public class GenieFeedback { + /** Optional feedback comment text */ + @JsonProperty("comment") + private String comment; + /** The feedback rating */ @JsonProperty("rating") private GenieFeedbackRating rating; + public GenieFeedback setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + public GenieFeedback setRating(GenieFeedbackRating rating) { this.rating = rating; return this; @@ -28,16 +41,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GenieFeedback that = (GenieFeedback) o; - return Objects.equals(rating, that.rating); + return Objects.equals(comment, that.comment) && Objects.equals(rating, that.rating); } @Override public int hashCode() { - return Objects.hash(rating); + return Objects.hash(comment, rating); } @Override public String toString() { - return new ToStringer(GenieFeedback.class).add("rating", rating).toString(); + return new ToStringer(GenieFeedback.class) + .add("comment", comment) + .add("rating", rating) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java new file mode 100755 index 000000000..7dc36298f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GenieGenerateDownloadFullQueryResultRequest { + /** Attachment ID */ + @JsonIgnore private String attachmentId; + + /** Conversation ID */ + @JsonIgnore private String conversationId; + + /** Message ID */ + @JsonIgnore private String messageId; + + /** Genie space ID */ + @JsonIgnore private String spaceId; + + public GenieGenerateDownloadFullQueryResultRequest setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieGenerateDownloadFullQueryResultRequest setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieGenerateDownloadFullQueryResultRequest setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieGenerateDownloadFullQueryResultRequest setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGenerateDownloadFullQueryResultRequest that = + (GenieGenerateDownloadFullQueryResultRequest) o; + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(attachmentId, conversationId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGenerateDownloadFullQueryResultRequest.class) + .add("attachmentId", attachmentId) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java new file mode 100755 index 000000000..e51751c8b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GenieGenerateDownloadFullQueryResultResponse { + /** Download ID. Use this ID to track the download request in subsequent polling calls */ + @JsonProperty("download_id") + private String downloadId; + + public GenieGenerateDownloadFullQueryResultResponse setDownloadId(String downloadId) { + this.downloadId = downloadId; + return this; + } + + public String getDownloadId() { + return downloadId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGenerateDownloadFullQueryResultResponse that = + (GenieGenerateDownloadFullQueryResultResponse) o; + return Objects.equals(downloadId, that.downloadId); + } + + @Override + public int hashCode() { + return Objects.hash(downloadId); + } + + @Override + public String toString() { + return new ToStringer(GenieGenerateDownloadFullQueryResultResponse.class) + .add("downloadId", downloadId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java new file mode 100755 index 000000000..73fd97ba2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java @@ -0,0 +1,102 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GenieGetDownloadFullQueryResultRequest { + /** Attachment ID */ + @JsonIgnore private String attachmentId; + + /** Conversation ID */ + @JsonIgnore private String conversationId; + + /** + * Download ID. This ID is provided by the [Generate Download + * endpoint](:method:genie/generateDownloadFullQueryResult) + */ + @JsonIgnore private String downloadId; + + /** Message ID */ + @JsonIgnore private String messageId; + + /** Genie space ID */ + @JsonIgnore private String spaceId; + + public GenieGetDownloadFullQueryResultRequest setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieGetDownloadFullQueryResultRequest setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieGetDownloadFullQueryResultRequest setDownloadId(String downloadId) { + this.downloadId = downloadId; + return this; + } + + public String getDownloadId() { + return downloadId; + } + + public GenieGetDownloadFullQueryResultRequest setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieGetDownloadFullQueryResultRequest setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetDownloadFullQueryResultRequest that = (GenieGetDownloadFullQueryResultRequest) o; + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(downloadId, that.downloadId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(attachmentId, conversationId, downloadId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGetDownloadFullQueryResultRequest.class) + .add("attachmentId", attachmentId) + .add("conversationId", conversationId) + .add("downloadId", downloadId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java new file mode 100755 index 000000000..490c5c518 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java @@ -0,0 +1,48 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GenieGetDownloadFullQueryResultResponse { + /** + * SQL Statement Execution response. See [Get status, manifest, and result first + * chunk](:method:statementexecution/getstatement) for more details. + */ + @JsonProperty("statement_response") + private com.databricks.sdk.service.sql.StatementResponse statementResponse; + + public GenieGetDownloadFullQueryResultResponse setStatementResponse( + com.databricks.sdk.service.sql.StatementResponse statementResponse) { + this.statementResponse = statementResponse; + return this; + } + + public com.databricks.sdk.service.sql.StatementResponse getStatementResponse() { + return statementResponse; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetDownloadFullQueryResultResponse that = (GenieGetDownloadFullQueryResultResponse) o; + return Objects.equals(statementResponse, that.statementResponse); + } + + @Override + public int hashCode() { + return Objects.hash(statementResponse); + } + + @Override + public String toString() { + return new ToStringer(GenieGetDownloadFullQueryResultResponse.class) + .add("statementResponse", statementResponse) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java index 432a981a9..e18be2894 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java @@ -33,6 +33,20 @@ public GenieMessage createMessage(GenieCreateConversationMessageRequest request) } } + @Override + public GenieSpace createSpace(GenieCreateSpaceRequest request) { + String path = "/api/2.0/genie/spaces"; + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, GenieSpace.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public void deleteConversation(GenieDeleteConversationRequest request) { String path = @@ -102,6 +116,47 @@ public GenieGetMessageQueryResultResponse executeMessageQuery( } } + @Override + public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult( + GenieGenerateDownloadFullQueryResultRequest request) { + String path = + String.format( + "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/attachments/%s/downloads", + request.getSpaceId(), + request.getConversationId(), + request.getMessageId(), + request.getAttachmentId()); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GenieGenerateDownloadFullQueryResultResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult( + GenieGetDownloadFullQueryResultRequest request) { + String path = + String.format( + "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/attachments/%s/downloads/%s", + request.getSpaceId(), + request.getConversationId(), + request.getMessageId(), + request.getAttachmentId(), + request.getDownloadId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GenieGetDownloadFullQueryResultResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GenieMessage getMessage(GenieGetConversationMessageRequest request) { String path = @@ -276,4 +331,18 @@ public void trashSpace(GenieTrashSpaceRequest request) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } + + @Override + public GenieSpace updateSpace(GenieUpdateSpaceRequest request) { + String path = String.format("/api/2.0/genie/spaces/%s", request.getSpaceId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, GenieSpace.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java index 5ad10ce62..cc0a48ab5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java @@ -5,6 +5,7 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated @@ -21,6 +22,10 @@ public class GenieQueryAttachment { @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; + /** */ + @JsonProperty("parameters") + private Collection parameters; + /** AI generated SQL query */ @JsonProperty("query") private String query; @@ -67,6 +72,15 @@ public Long getLastUpdatedTimestamp() { return lastUpdatedTimestamp; } + public GenieQueryAttachment setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + public GenieQueryAttachment setQuery(String query) { this.query = query; return this; @@ -111,6 +125,7 @@ public boolean equals(Object o) { return Objects.equals(description, that.description) && Objects.equals(id, that.id) && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(parameters, that.parameters) && Objects.equals(query, that.query) && Objects.equals(queryResultMetadata, that.queryResultMetadata) && Objects.equals(statementId, that.statementId) @@ -120,7 +135,14 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - description, id, lastUpdatedTimestamp, query, queryResultMetadata, statementId, title); + description, + id, + lastUpdatedTimestamp, + parameters, + query, + queryResultMetadata, + statementId, + title); } @Override @@ -129,6 +151,7 @@ public String toString() { .add("description", description) .add("id", id) .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("parameters", parameters) .add("query", query) .add("queryResultMetadata", queryResultMetadata) .add("statementId", statementId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java index 2ddc8b835..656cfabbf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java @@ -10,6 +10,10 @@ @Generated public class GenieSendMessageFeedbackRequest { + /** Optional text feedback that will be stored as a comment. */ + @JsonProperty("comment") + private String comment; + /** The ID associated with the conversation. */ @JsonIgnore private String conversationId; @@ -23,6 +27,15 @@ public class GenieSendMessageFeedbackRequest { /** The ID associated with the Genie space where the message is located. */ @JsonIgnore private String spaceId; + public GenieSendMessageFeedbackRequest setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + public GenieSendMessageFeedbackRequest setConversationId(String conversationId) { this.conversationId = conversationId; return this; @@ -64,7 +77,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GenieSendMessageFeedbackRequest that = (GenieSendMessageFeedbackRequest) o; - return Objects.equals(conversationId, that.conversationId) + return Objects.equals(comment, that.comment) + && Objects.equals(conversationId, that.conversationId) && Objects.equals(messageId, that.messageId) && Objects.equals(rating, that.rating) && Objects.equals(spaceId, that.spaceId); @@ -72,12 +86,13 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(conversationId, messageId, rating, spaceId); + return Objects.hash(comment, conversationId, messageId, rating, spaceId); } @Override public String toString() { return new ToStringer(GenieSendMessageFeedbackRequest.class) + .add("comment", comment) .add("conversationId", conversationId) .add("messageId", messageId) .add("rating", rating) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java index d12aa918c..37455fc2d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java @@ -22,6 +22,9 @@ public interface GenieService { GenieMessage createMessage( GenieCreateConversationMessageRequest genieCreateConversationMessageRequest); + /** Creates a Genie space from a serialized payload. */ + GenieSpace createSpace(GenieCreateSpaceRequest genieCreateSpaceRequest); + /** Delete a conversation. */ void deleteConversation(GenieDeleteConversationRequest genieDeleteConversationRequest); @@ -43,6 +46,30 @@ GenieGetMessageQueryResultResponse executeMessageAttachmentQuery( GenieGetMessageQueryResultResponse executeMessageQuery( GenieExecuteMessageQueryRequest genieExecuteMessageQueryRequest); + /** + * Initiates a new SQL execution and returns a `download_id` that you can use to track the + * progress of the download. The query result is stored in an external link and can be retrieved + * using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. + * Warning: Databricks strongly recommends that you protect the URLs that are returned by the + * `EXTERNAL_LINKS` disposition. See [Execute + * Statement](:method:statementexecution/executestatement) for more details. + */ + GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult( + GenieGenerateDownloadFullQueryResultRequest genieGenerateDownloadFullQueryResultRequest); + + /** + * After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and + * successfully receiving a `download_id`, use this API to poll the download progress. When the + * download is complete, the API returns one or more external links to the query result files. + * Warning: Databricks strongly recommends that you protect the URLs that are returned by the + * `EXTERNAL_LINKS` disposition. You must not set an Authorization header in download requests. + * When using the `EXTERNAL_LINKS` disposition, Databricks returns presigned URLs that grant + * temporary access to data. See [Execute Statement](:method:statementexecution/executestatement) + * for more details. + */ + GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult( + GenieGetDownloadFullQueryResultRequest genieGetDownloadFullQueryResultRequest); + /** Get message from conversation. */ GenieMessage getMessage(GenieGetConversationMessageRequest genieGetConversationMessageRequest); @@ -90,4 +117,7 @@ GenieStartConversationResponse startConversation( /** Move a Genie Space to the trash. */ void trashSpace(GenieTrashSpaceRequest genieTrashSpaceRequest); + + /** Updates a Genie space with a serialized payload. */ + GenieSpace updateSpace(GenieUpdateSpaceRequest genieUpdateSpaceRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java new file mode 100755 index 000000000..e8e67cedf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java @@ -0,0 +1,104 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GenieUpdateSpaceRequest { + /** Optional description */ + @JsonProperty("description") + private String description; + + /** Serialized export model for the space contents (full replacement) */ + @JsonProperty("serialized_space") + private String serializedSpace; + + /** Genie space ID */ + @JsonIgnore private String spaceId; + + /** Optional title override */ + @JsonProperty("title") + private String title; + + /** Optional warehouse override */ + @JsonProperty("warehouse_id") + private String warehouseId; + + public GenieUpdateSpaceRequest setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public GenieUpdateSpaceRequest setSerializedSpace(String serializedSpace) { + this.serializedSpace = serializedSpace; + return this; + } + + public String getSerializedSpace() { + return serializedSpace; + } + + public GenieUpdateSpaceRequest setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + public GenieUpdateSpaceRequest setTitle(String title) { + this.title = title; + return this; + } + + public String getTitle() { + return title; + } + + public GenieUpdateSpaceRequest setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieUpdateSpaceRequest that = (GenieUpdateSpaceRequest) o; + return Objects.equals(description, that.description) + && Objects.equals(serializedSpace, that.serializedSpace) + && Objects.equals(spaceId, that.spaceId) + && Objects.equals(title, that.title) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(description, serializedSpace, spaceId, title, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(GenieUpdateSpaceRequest.class) + .add("description", description) + .add("serializedSpace", serializedSpace) + .add("spaceId", spaceId) + .add("title", title) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java new file mode 100755 index 000000000..95f6048f1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetPublishedDashboardEmbeddedRequest { + /** UUID identifying the published dashboard. */ + @JsonIgnore private String dashboardId; + + public GetPublishedDashboardEmbeddedRequest setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPublishedDashboardEmbeddedRequest that = (GetPublishedDashboardEmbeddedRequest) o; + return Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId); + } + + @Override + public String toString() { + return new ToStringer(GetPublishedDashboardEmbeddedRequest.class) + .add("dashboardId", dashboardId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java index ed46478a1..a7bc6c10d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java @@ -23,6 +23,16 @@ public LakeviewEmbeddedAPI(LakeviewEmbeddedService mock) { impl = mock; } + public void getPublishedDashboardEmbedded(String dashboardId) { + getPublishedDashboardEmbedded( + new GetPublishedDashboardEmbeddedRequest().setDashboardId(dashboardId)); + } + + /** Get the current published dashboard within an embedded context. */ + public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) { + impl.getPublishedDashboardEmbedded(request); + } + public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo(String dashboardId) { return getPublishedDashboardTokenInfo( new GetPublishedDashboardTokenInfoRequest().setDashboardId(dashboardId)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java index 55a489702..171eb1e7f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java @@ -16,6 +16,21 @@ public LakeviewEmbeddedImpl(ApiClient apiClient) { this.apiClient = apiClient; } + @Override + public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) { + String path = + String.format( + "/api/2.0/lakeview/dashboards/%s/published/embedded", request.getDashboardId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo( GetPublishedDashboardTokenInfoRequest request) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java index 3aa679410..98c1b546d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java @@ -12,6 +12,10 @@ */ @Generated public interface LakeviewEmbeddedService { + /** Get the current published dashboard within an embedded context. */ + void getPublishedDashboardEmbedded( + GetPublishedDashboardEmbeddedRequest getPublishedDashboardEmbeddedRequest); + /** * Get a required authorization details and scopes of a published dashboard to mint an OAuth * token. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java index 8b6b10fc7..75e28eb70 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java @@ -18,6 +18,7 @@ public enum MessageErrorType { DESCRIBE_QUERY_INVALID_SQL_ERROR, DESCRIBE_QUERY_TIMEOUT, DESCRIBE_QUERY_UNEXPECTED_FAILURE, + EXCEEDED_MAX_TOKEN_LENGTH_EXCEPTION, FUNCTIONS_NOT_AVAILABLE_EXCEPTION, FUNCTION_ARGUMENTS_INVALID_EXCEPTION, FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION, @@ -28,6 +29,9 @@ public enum MessageErrorType { GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION, GENERIC_SQL_EXEC_API_CALL_EXCEPTION, ILLEGAL_PARAMETER_DEFINITION_EXCEPTION, + INTERNAL_CATALOG_ASSET_CREATION_FAILED_EXCEPTION, + INTERNAL_CATALOG_ASSET_CREATION_ONGOING_EXCEPTION, + INTERNAL_CATALOG_ASSET_CREATION_UNSUPPORTED_EXCEPTION, INTERNAL_CATALOG_MISSING_UC_PATH_EXCEPTION, INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION, INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java new file mode 100755 index 000000000..f041070b2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class PendingStatus { + /** + * The token to poll for result asynchronously Example: + * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + */ + @JsonProperty("data_token") + private String dataToken; + + public PendingStatus setDataToken(String dataToken) { + this.dataToken = dataToken; + return this; + } + + public String getDataToken() { + return dataToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PendingStatus that = (PendingStatus) o; + return Objects.equals(dataToken, that.dataToken); + } + + @Override + public int hashCode() { + return Objects.hash(dataToken); + } + + @Override + public String toString() { + return new ToStringer(PendingStatus.class).add("dataToken", dataToken).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java new file mode 100755 index 000000000..e34c7af59 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java @@ -0,0 +1,79 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class PollPublishedQueryStatusRequest { + /** */ + @JsonIgnore + @QueryParam("dashboard_name") + private String dashboardName; + + /** */ + @JsonIgnore + @QueryParam("dashboard_revision_id") + private String dashboardRevisionId; + + /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */ + @JsonIgnore + @QueryParam("tokens") + private Collection tokens; + + public PollPublishedQueryStatusRequest setDashboardName(String dashboardName) { + this.dashboardName = dashboardName; + return this; + } + + public String getDashboardName() { + return dashboardName; + } + + public PollPublishedQueryStatusRequest setDashboardRevisionId(String dashboardRevisionId) { + this.dashboardRevisionId = dashboardRevisionId; + return this; + } + + public String getDashboardRevisionId() { + return dashboardRevisionId; + } + + public PollPublishedQueryStatusRequest setTokens(Collection tokens) { + this.tokens = tokens; + return this; + } + + public Collection getTokens() { + return tokens; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PollPublishedQueryStatusRequest that = (PollPublishedQueryStatusRequest) o; + return Objects.equals(dashboardName, that.dashboardName) + && Objects.equals(dashboardRevisionId, that.dashboardRevisionId) + && Objects.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardName, dashboardRevisionId, tokens); + } + + @Override + public String toString() { + return new ToStringer(PollPublishedQueryStatusRequest.class) + .add("dashboardName", dashboardName) + .add("dashboardRevisionId", dashboardRevisionId) + .add("tokens", tokens) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java new file mode 100755 index 000000000..778e1d961 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class PollQueryStatusResponse { + /** */ + @JsonProperty("data") + private Collection data; + + public PollQueryStatusResponse setData(Collection data) { + this.data = data; + return this; + } + + public Collection getData() { + return data; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PollQueryStatusResponse that = (PollQueryStatusResponse) o; + return Objects.equals(data, that.data); + } + + @Override + public int hashCode() { + return Objects.hash(data); + } + + @Override + public String toString() { + return new ToStringer(PollQueryStatusResponse.class).add("data", data).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java new file mode 100755 index 000000000..9de9b2743 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class PollQueryStatusResponseData { + /** */ + @JsonProperty("status") + private QueryResponseStatus status; + + public PollQueryStatusResponseData setStatus(QueryResponseStatus status) { + this.status = status; + return this; + } + + public QueryResponseStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PollQueryStatusResponseData that = (PollQueryStatusResponseData) o; + return Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(status); + } + + @Override + public String toString() { + return new ToStringer(PollQueryStatusResponseData.class).add("status", status).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachmentParameter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachmentParameter.java new file mode 100755 index 000000000..e2609ee6a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachmentParameter.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class QueryAttachmentParameter { + /** */ + @JsonProperty("keyword") + private String keyword; + + /** */ + @JsonProperty("sql_type") + private String sqlType; + + /** */ + @JsonProperty("value") + private String value; + + public QueryAttachmentParameter setKeyword(String keyword) { + this.keyword = keyword; + return this; + } + + public String getKeyword() { + return keyword; + } + + public QueryAttachmentParameter setSqlType(String sqlType) { + this.sqlType = sqlType; + return this; + } + + public String getSqlType() { + return sqlType; + } + + public QueryAttachmentParameter setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryAttachmentParameter that = (QueryAttachmentParameter) o; + return Objects.equals(keyword, that.keyword) + && Objects.equals(sqlType, that.sqlType) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(keyword, sqlType, value); + } + + @Override + public String toString() { + return new ToStringer(QueryAttachmentParameter.class) + .add("keyword", keyword) + .add("sqlType", sqlType) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java new file mode 100755 index 000000000..5ab84661d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Query execution APIs for AI / BI Dashboards */ +@Generated +public class QueryExecutionAPI { + private static final Logger LOG = LoggerFactory.getLogger(QueryExecutionAPI.class); + + private final QueryExecutionService impl; + + /** Regular-use constructor */ + public QueryExecutionAPI(ApiClient apiClient) { + impl = new QueryExecutionImpl(apiClient); + } + + /** Constructor for mocks */ + public QueryExecutionAPI(QueryExecutionService mock) { + impl = mock; + } + + public CancelQueryExecutionResponse cancelPublishedQueryExecution( + String dashboardName, String dashboardRevisionId) { + return cancelPublishedQueryExecution( + new CancelPublishedQueryExecutionRequest() + .setDashboardName(dashboardName) + .setDashboardRevisionId(dashboardRevisionId)); + } + + /** Cancel the results for the a query for a published, embedded dashboard. */ + public CancelQueryExecutionResponse cancelPublishedQueryExecution( + CancelPublishedQueryExecutionRequest request) { + return impl.cancelPublishedQueryExecution(request); + } + + /** Execute a query for a published dashboard. */ + public void executePublishedDashboardQuery(ExecutePublishedDashboardQueryRequest request) { + impl.executePublishedDashboardQuery(request); + } + + public PollQueryStatusResponse pollPublishedQueryStatus( + String dashboardName, String dashboardRevisionId) { + return pollPublishedQueryStatus( + new PollPublishedQueryStatusRequest() + .setDashboardName(dashboardName) + .setDashboardRevisionId(dashboardRevisionId)); + } + + /** Poll the results for the a query for a published, embedded dashboard. */ + public PollQueryStatusResponse pollPublishedQueryStatus(PollPublishedQueryStatusRequest request) { + return impl.pollPublishedQueryStatus(request); + } + + public QueryExecutionService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java new file mode 100755 index 000000000..19efc614a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of QueryExecution */ +@Generated +class QueryExecutionImpl implements QueryExecutionService { + private final ApiClient apiClient; + + public QueryExecutionImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public CancelQueryExecutionResponse cancelPublishedQueryExecution( + CancelPublishedQueryExecutionRequest request) { + String path = "/api/2.0/lakeview-query/query/published"; + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, CancelQueryExecutionResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void executePublishedDashboardQuery(ExecutePublishedDashboardQueryRequest request) { + String path = "/api/2.0/lakeview-query/query/published"; + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public PollQueryStatusResponse pollPublishedQueryStatus(PollPublishedQueryStatusRequest request) { + String path = "/api/2.0/lakeview-query/query/published"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, PollQueryStatusResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java new file mode 100755 index 000000000..d30cda5b6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java @@ -0,0 +1,26 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; + +/** + * Query execution APIs for AI / BI Dashboards + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface QueryExecutionService { + /** Cancel the results for the a query for a published, embedded dashboard. */ + CancelQueryExecutionResponse cancelPublishedQueryExecution( + CancelPublishedQueryExecutionRequest cancelPublishedQueryExecutionRequest); + + /** Execute a query for a published dashboard. */ + void executePublishedDashboardQuery( + ExecutePublishedDashboardQueryRequest executePublishedDashboardQueryRequest); + + /** Poll the results for the a query for a published, embedded dashboard. */ + PollQueryStatusResponse pollPublishedQueryStatus( + PollPublishedQueryStatusRequest pollPublishedQueryStatusRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java new file mode 100755 index 000000000..a57d202ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java @@ -0,0 +1,108 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class QueryResponseStatus { + /** */ + @JsonProperty("canceled") + private Empty canceled; + + /** */ + @JsonProperty("closed") + private Empty closed; + + /** */ + @JsonProperty("pending") + private PendingStatus pending; + + /** + * The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be + * identical to data_token in SuccessStatus and PendingStatus. This field is created for audit + * logging purpose to record the statement_id of all QueryResponseStatus. + */ + @JsonProperty("statement_id") + private String statementId; + + /** */ + @JsonProperty("success") + private SuccessStatus success; + + public QueryResponseStatus setCanceled(Empty canceled) { + this.canceled = canceled; + return this; + } + + public Empty getCanceled() { + return canceled; + } + + public QueryResponseStatus setClosed(Empty closed) { + this.closed = closed; + return this; + } + + public Empty getClosed() { + return closed; + } + + public QueryResponseStatus setPending(PendingStatus pending) { + this.pending = pending; + return this; + } + + public PendingStatus getPending() { + return pending; + } + + public QueryResponseStatus setStatementId(String statementId) { + this.statementId = statementId; + return this; + } + + public String getStatementId() { + return statementId; + } + + public QueryResponseStatus setSuccess(SuccessStatus success) { + this.success = success; + return this; + } + + public SuccessStatus getSuccess() { + return success; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryResponseStatus that = (QueryResponseStatus) o; + return Objects.equals(canceled, that.canceled) + && Objects.equals(closed, that.closed) + && Objects.equals(pending, that.pending) + && Objects.equals(statementId, that.statementId) + && Objects.equals(success, that.success); + } + + @Override + public int hashCode() { + return Objects.hash(canceled, closed, pending, statementId, success); + } + + @Override + public String toString() { + return new ToStringer(QueryResponseStatus.class) + .add("canceled", canceled) + .add("closed", closed) + .add("pending", pending) + .add("statementId", statementId) + .add("success", success) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java new file mode 100755 index 000000000..c54d199d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class SuccessStatus { + /** + * The token to poll for result asynchronously Example: + * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + */ + @JsonProperty("data_token") + private String dataToken; + + /** Whether the query result is truncated (either by byte limit or row limit) */ + @JsonProperty("truncated") + private Boolean truncated; + + public SuccessStatus setDataToken(String dataToken) { + this.dataToken = dataToken; + return this; + } + + public String getDataToken() { + return dataToken; + } + + public SuccessStatus setTruncated(Boolean truncated) { + this.truncated = truncated; + return this; + } + + public Boolean getTruncated() { + return truncated; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SuccessStatus that = (SuccessStatus) o; + return Objects.equals(dataToken, that.dataToken) && Objects.equals(truncated, that.truncated); + } + + @Override + public int hashCode() { + return Objects.hash(dataToken, truncated); + } + + @Override + public String toString() { + return new ToStringer(SuccessStatus.class) + .add("dataToken", dataToken) + .add("truncated", truncated) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseBranchRequest.java new file mode 100755 index 000000000..cd862ab79 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseBranchRequest.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateDatabaseBranchRequest { + /** */ + @JsonProperty("database_branch") + private DatabaseBranch databaseBranch; + + /** */ + @JsonIgnore private String projectId; + + public CreateDatabaseBranchRequest setDatabaseBranch(DatabaseBranch databaseBranch) { + this.databaseBranch = databaseBranch; + return this; + } + + public DatabaseBranch getDatabaseBranch() { + return databaseBranch; + } + + public CreateDatabaseBranchRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDatabaseBranchRequest that = (CreateDatabaseBranchRequest) o; + return Objects.equals(databaseBranch, that.databaseBranch) + && Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(databaseBranch, projectId); + } + + @Override + public String toString() { + return new ToStringer(CreateDatabaseBranchRequest.class) + .add("databaseBranch", databaseBranch) + .add("projectId", projectId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseEndpointRequest.java new file mode 100755 index 000000000..b6437fc63 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseEndpointRequest.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateDatabaseEndpointRequest { + /** */ + @JsonIgnore private String branchId; + + /** */ + @JsonProperty("database_endpoint") + private DatabaseEndpoint databaseEndpoint; + + /** */ + @JsonIgnore private String projectId; + + public CreateDatabaseEndpointRequest setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + + public CreateDatabaseEndpointRequest setDatabaseEndpoint(DatabaseEndpoint databaseEndpoint) { + this.databaseEndpoint = databaseEndpoint; + return this; + } + + public DatabaseEndpoint getDatabaseEndpoint() { + return databaseEndpoint; + } + + public CreateDatabaseEndpointRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDatabaseEndpointRequest that = (CreateDatabaseEndpointRequest) o; + return Objects.equals(branchId, that.branchId) + && Objects.equals(databaseEndpoint, that.databaseEndpoint) + && Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(branchId, databaseEndpoint, projectId); + } + + @Override + public String toString() { + return new ToStringer(CreateDatabaseEndpointRequest.class) + .add("branchId", branchId) + .add("databaseEndpoint", databaseEndpoint) + .add("projectId", projectId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java index af69b9394..643688431 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java @@ -3,6 +3,7 @@ package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; @@ -10,6 +11,11 @@ @Generated public class CreateDatabaseInstanceRoleRequest { + /** */ + @JsonIgnore + @QueryParam("database_instance_name") + private String databaseInstanceName; + /** */ @JsonProperty("database_instance_role") private DatabaseInstanceRole databaseInstanceRole; @@ -17,6 +23,15 @@ public class CreateDatabaseInstanceRoleRequest { /** */ @JsonIgnore private String instanceName; + public CreateDatabaseInstanceRoleRequest setDatabaseInstanceName(String databaseInstanceName) { + this.databaseInstanceName = databaseInstanceName; + return this; + } + + public String getDatabaseInstanceName() { + return databaseInstanceName; + } + public CreateDatabaseInstanceRoleRequest setDatabaseInstanceRole( DatabaseInstanceRole databaseInstanceRole) { this.databaseInstanceRole = databaseInstanceRole; @@ -41,18 +56,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateDatabaseInstanceRoleRequest that = (CreateDatabaseInstanceRoleRequest) o; - return Objects.equals(databaseInstanceRole, that.databaseInstanceRole) + return Objects.equals(databaseInstanceName, that.databaseInstanceName) + && Objects.equals(databaseInstanceRole, that.databaseInstanceRole) && Objects.equals(instanceName, that.instanceName); } @Override public int hashCode() { - return Objects.hash(databaseInstanceRole, instanceName); + return Objects.hash(databaseInstanceName, databaseInstanceRole, instanceName); } @Override public String toString() { return new ToStringer(CreateDatabaseInstanceRoleRequest.class) + .add("databaseInstanceName", databaseInstanceName) .add("databaseInstanceRole", databaseInstanceRole) .add("instanceName", instanceName) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseProjectRequest.java new file mode 100755 index 000000000..489b30295 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseProjectRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateDatabaseProjectRequest { + /** */ + @JsonProperty("database_project") + private DatabaseProject databaseProject; + + public CreateDatabaseProjectRequest setDatabaseProject(DatabaseProject databaseProject) { + this.databaseProject = databaseProject; + return this; + } + + public DatabaseProject getDatabaseProject() { + return databaseProject; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDatabaseProjectRequest that = (CreateDatabaseProjectRequest) o; + return Objects.equals(databaseProject, that.databaseProject); + } + + @Override + public int hashCode() { + return Objects.hash(databaseProject); + } + + @Override + public String toString() { + return new ToStringer(CreateDatabaseProjectRequest.class) + .add("databaseProject", databaseProject) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CustomTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CustomTag.java new file mode 100755 index 000000000..61833a248 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CustomTag.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CustomTag { + /** The key of the custom tag. */ + @JsonProperty("key") + private String key; + + /** The value of the custom tag. */ + @JsonProperty("value") + private String value; + + public CustomTag setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public CustomTag setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomTag that = (CustomTag) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(CustomTag.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java index 8c96210c2..2d70135a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java @@ -150,6 +150,11 @@ public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request) impl.deleteSyncedDatabaseTable(request); } + /** Failover the primary node of a Database Instance to a secondary. */ + public DatabaseInstance failoverDatabaseInstance(FailoverDatabaseInstanceRequest request) { + return impl.failoverDatabaseInstance(request); + } + /** Find a Database Instance by uid. */ public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidRequest request) { return impl.findDatabaseInstanceByUid(request); @@ -297,6 +302,12 @@ public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest req return impl.updateDatabaseInstance(request); } + /** Update a role for a Database Instance. */ + public DatabaseInstanceRole updateDatabaseInstanceRole( + UpdateDatabaseInstanceRoleRequest request) { + return impl.updateDatabaseInstanceRole(request); + } + /** This API is currently unimplemented, but exposed for Terraform support. */ public SyncedDatabaseTable updateSyncedDatabaseTable(UpdateSyncedDatabaseTableRequest request) { return impl.updateSyncedDatabaseTable(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseBranch.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseBranch.java new file mode 100755 index 000000000..74abb5509 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseBranch.java @@ -0,0 +1,261 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DatabaseBranch { + /** */ + @JsonProperty("branch_id") + private String branchId; + + /** A timestamp indicating when the branch was created. */ + @JsonProperty("create_time") + private String createTime; + + /** The branch’s state, indicating if it is initializing, ready for use, or archived. */ + @JsonProperty("current_state") + private String currentState; + + /** + * Whether the branch is the project's default branch. This field is only returned on + * create/update responses. See effective_default for the value that is actually applied to the + * database branch. + */ + @JsonProperty("default") + private Boolean defaultValue; + + /** Whether the branch is the project's default branch. */ + @JsonProperty("effective_default") + private Boolean effectiveDefault; + + /** Whether the branch is protected. */ + @JsonProperty("is_protected") + private Boolean isProtected; + + /** The logical size of the branch. */ + @JsonProperty("logical_size_bytes") + private Long logicalSizeBytes; + + /** The id of the parent branch */ + @JsonProperty("parent_id") + private String parentId; + + /** + * The Log Sequence Number (LSN) on the parent branch from which this branch was created. When + * restoring a branch using the Restore Database Branch endpoint, this value isn’t finalized until + * all operations related to the restore have completed successfully. + */ + @JsonProperty("parent_lsn") + private String parentLsn; + + /** The point in time on the parent branch from which this branch was created. */ + @JsonProperty("parent_time") + private String parentTime; + + /** */ + @JsonProperty("pending_state") + private String pendingState; + + /** */ + @JsonProperty("project_id") + private String projectId; + + /** A timestamp indicating when the `current_state` began. */ + @JsonProperty("state_change_time") + private String stateChangeTime; + + /** A timestamp indicating when the branch was last updated. */ + @JsonProperty("update_time") + private String updateTime; + + public DatabaseBranch setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + + public DatabaseBranch setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public DatabaseBranch setCurrentState(String currentState) { + this.currentState = currentState; + return this; + } + + public String getCurrentState() { + return currentState; + } + + public DatabaseBranch setDefault(Boolean defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + public Boolean getDefault() { + return defaultValue; + } + + public DatabaseBranch setEffectiveDefault(Boolean effectiveDefault) { + this.effectiveDefault = effectiveDefault; + return this; + } + + public Boolean getEffectiveDefault() { + return effectiveDefault; + } + + public DatabaseBranch setIsProtected(Boolean isProtected) { + this.isProtected = isProtected; + return this; + } + + public Boolean getIsProtected() { + return isProtected; + } + + public DatabaseBranch setLogicalSizeBytes(Long logicalSizeBytes) { + this.logicalSizeBytes = logicalSizeBytes; + return this; + } + + public Long getLogicalSizeBytes() { + return logicalSizeBytes; + } + + public DatabaseBranch setParentId(String parentId) { + this.parentId = parentId; + return this; + } + + public String getParentId() { + return parentId; + } + + public DatabaseBranch setParentLsn(String parentLsn) { + this.parentLsn = parentLsn; + return this; + } + + public String getParentLsn() { + return parentLsn; + } + + public DatabaseBranch setParentTime(String parentTime) { + this.parentTime = parentTime; + return this; + } + + public String getParentTime() { + return parentTime; + } + + public DatabaseBranch setPendingState(String pendingState) { + this.pendingState = pendingState; + return this; + } + + public String getPendingState() { + return pendingState; + } + + public DatabaseBranch setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + public DatabaseBranch setStateChangeTime(String stateChangeTime) { + this.stateChangeTime = stateChangeTime; + return this; + } + + public String getStateChangeTime() { + return stateChangeTime; + } + + public DatabaseBranch setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseBranch that = (DatabaseBranch) o; + return Objects.equals(branchId, that.branchId) + && Objects.equals(createTime, that.createTime) + && Objects.equals(currentState, that.currentState) + && Objects.equals(defaultValue, that.defaultValue) + && Objects.equals(effectiveDefault, that.effectiveDefault) + && Objects.equals(isProtected, that.isProtected) + && Objects.equals(logicalSizeBytes, that.logicalSizeBytes) + && Objects.equals(parentId, that.parentId) + && Objects.equals(parentLsn, that.parentLsn) + && Objects.equals(parentTime, that.parentTime) + && Objects.equals(pendingState, that.pendingState) + && Objects.equals(projectId, that.projectId) + && Objects.equals(stateChangeTime, that.stateChangeTime) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + branchId, + createTime, + currentState, + defaultValue, + effectiveDefault, + isProtected, + logicalSizeBytes, + parentId, + parentLsn, + parentTime, + pendingState, + projectId, + stateChangeTime, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(DatabaseBranch.class) + .add("branchId", branchId) + .add("createTime", createTime) + .add("currentState", currentState) + .add("defaultValue", defaultValue) + .add("effectiveDefault", effectiveDefault) + .add("isProtected", isProtected) + .add("logicalSizeBytes", logicalSizeBytes) + .add("parentId", parentId) + .add("parentLsn", parentLsn) + .add("parentTime", parentTime) + .add("pendingState", pendingState) + .add("projectId", projectId) + .add("stateChangeTime", stateChangeTime) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java index 06049eb18..fe1a9ecc4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java @@ -13,6 +13,10 @@ public class DatabaseCatalog { @JsonProperty("create_database_if_not_exists") private Boolean createDatabaseIfNotExists; + /** The branch_id of the database branch associated with the catalog. */ + @JsonProperty("database_branch_id") + private String databaseBranchId; + /** The name of the DatabaseInstance housing the database. */ @JsonProperty("database_instance_name") private String databaseInstanceName; @@ -21,6 +25,10 @@ public class DatabaseCatalog { @JsonProperty("database_name") private String databaseName; + /** The project_id of the database project associated with the catalog. */ + @JsonProperty("database_project_id") + private String databaseProjectId; + /** The name of the catalog in UC. */ @JsonProperty("name") private String name; @@ -38,6 +46,15 @@ public Boolean getCreateDatabaseIfNotExists() { return createDatabaseIfNotExists; } + public DatabaseCatalog setDatabaseBranchId(String databaseBranchId) { + this.databaseBranchId = databaseBranchId; + return this; + } + + public String getDatabaseBranchId() { + return databaseBranchId; + } + public DatabaseCatalog setDatabaseInstanceName(String databaseInstanceName) { this.databaseInstanceName = databaseInstanceName; return this; @@ -56,6 +73,15 @@ public String getDatabaseName() { return databaseName; } + public DatabaseCatalog setDatabaseProjectId(String databaseProjectId) { + this.databaseProjectId = databaseProjectId; + return this; + } + + public String getDatabaseProjectId() { + return databaseProjectId; + } + public DatabaseCatalog setName(String name) { this.name = name; return this; @@ -80,23 +106,34 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; DatabaseCatalog that = (DatabaseCatalog) o; return Objects.equals(createDatabaseIfNotExists, that.createDatabaseIfNotExists) + && Objects.equals(databaseBranchId, that.databaseBranchId) && Objects.equals(databaseInstanceName, that.databaseInstanceName) && Objects.equals(databaseName, that.databaseName) + && Objects.equals(databaseProjectId, that.databaseProjectId) && Objects.equals(name, that.name) && Objects.equals(uid, that.uid); } @Override public int hashCode() { - return Objects.hash(createDatabaseIfNotExists, databaseInstanceName, databaseName, name, uid); + return Objects.hash( + createDatabaseIfNotExists, + databaseBranchId, + databaseInstanceName, + databaseName, + databaseProjectId, + name, + uid); } @Override public String toString() { return new ToStringer(DatabaseCatalog.class) .add("createDatabaseIfNotExists", createDatabaseIfNotExists) + .add("databaseBranchId", databaseBranchId) .add("databaseInstanceName", databaseInstanceName) .add("databaseName", databaseName) + .add("databaseProjectId", databaseProjectId) .add("name", name) .add("uid", uid) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpoint.java new file mode 100755 index 000000000..918b6c5a2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpoint.java @@ -0,0 +1,327 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DatabaseEndpoint { + /** The maximum number of Compute Units. */ + @JsonProperty("autoscaling_limit_max_cu") + private Double autoscalingLimitMaxCu; + + /** The minimum number of Compute Units. */ + @JsonProperty("autoscaling_limit_min_cu") + private Double autoscalingLimitMinCu; + + /** */ + @JsonProperty("branch_id") + private String branchId; + + /** A timestamp indicating when the compute endpoint was created. */ + @JsonProperty("create_time") + private String createTime; + + /** */ + @JsonProperty("current_state") + private DatabaseEndpointState currentState; + + /** + * Whether to restrict connections to the compute endpoint. Enabling this option schedules a + * suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or + * console action. + */ + @JsonProperty("disabled") + private Boolean disabled; + + /** */ + @JsonProperty("endpoint_id") + private String endpointId; + + /** + * The hostname of the compute endpoint. This is the hostname specified when connecting to a + * database. + */ + @JsonProperty("host") + private String host; + + /** A timestamp indicating when the compute endpoint was last active. */ + @JsonProperty("last_active_time") + private String lastActiveTime; + + /** */ + @JsonProperty("pending_state") + private DatabaseEndpointState pendingState; + + /** */ + @JsonProperty("pooler_mode") + private DatabaseEndpointPoolerMode poolerMode; + + /** */ + @JsonProperty("project_id") + private String projectId; + + /** */ + @JsonProperty("settings") + private DatabaseEndpointSettings settings; + + /** A timestamp indicating when the compute endpoint was last started. */ + @JsonProperty("start_time") + private String startTime; + + /** A timestamp indicating when the compute endpoint was last suspended. */ + @JsonProperty("suspend_time") + private String suspendTime; + + /** Duration of inactivity after which the compute endpoint is automatically suspended. */ + @JsonProperty("suspend_timeout_duration") + private String suspendTimeoutDuration; + + /** + * NOTE: if want type to default to some value set the server then an effective_type field OR make + * this field REQUIRED + */ + @JsonProperty("type") + private DatabaseEndpointType typeValue; + + /** A timestamp indicating when the compute endpoint was last updated. */ + @JsonProperty("update_time") + private String updateTime; + + public DatabaseEndpoint setAutoscalingLimitMaxCu(Double autoscalingLimitMaxCu) { + this.autoscalingLimitMaxCu = autoscalingLimitMaxCu; + return this; + } + + public Double getAutoscalingLimitMaxCu() { + return autoscalingLimitMaxCu; + } + + public DatabaseEndpoint setAutoscalingLimitMinCu(Double autoscalingLimitMinCu) { + this.autoscalingLimitMinCu = autoscalingLimitMinCu; + return this; + } + + public Double getAutoscalingLimitMinCu() { + return autoscalingLimitMinCu; + } + + public DatabaseEndpoint setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + + public DatabaseEndpoint setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public DatabaseEndpoint setCurrentState(DatabaseEndpointState currentState) { + this.currentState = currentState; + return this; + } + + public DatabaseEndpointState getCurrentState() { + return currentState; + } + + public DatabaseEndpoint setDisabled(Boolean disabled) { + this.disabled = disabled; + return this; + } + + public Boolean getDisabled() { + return disabled; + } + + public DatabaseEndpoint setEndpointId(String endpointId) { + this.endpointId = endpointId; + return this; + } + + public String getEndpointId() { + return endpointId; + } + + public DatabaseEndpoint setHost(String host) { + this.host = host; + return this; + } + + public String getHost() { + return host; + } + + public DatabaseEndpoint setLastActiveTime(String lastActiveTime) { + this.lastActiveTime = lastActiveTime; + return this; + } + + public String getLastActiveTime() { + return lastActiveTime; + } + + public DatabaseEndpoint setPendingState(DatabaseEndpointState pendingState) { + this.pendingState = pendingState; + return this; + } + + public DatabaseEndpointState getPendingState() { + return pendingState; + } + + public DatabaseEndpoint setPoolerMode(DatabaseEndpointPoolerMode poolerMode) { + this.poolerMode = poolerMode; + return this; + } + + public DatabaseEndpointPoolerMode getPoolerMode() { + return poolerMode; + } + + public DatabaseEndpoint setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + public DatabaseEndpoint setSettings(DatabaseEndpointSettings settings) { + this.settings = settings; + return this; + } + + public DatabaseEndpointSettings getSettings() { + return settings; + } + + public DatabaseEndpoint setStartTime(String startTime) { + this.startTime = startTime; + return this; + } + + public String getStartTime() { + return startTime; + } + + public DatabaseEndpoint setSuspendTime(String suspendTime) { + this.suspendTime = suspendTime; + return this; + } + + public String getSuspendTime() { + return suspendTime; + } + + public DatabaseEndpoint setSuspendTimeoutDuration(String suspendTimeoutDuration) { + this.suspendTimeoutDuration = suspendTimeoutDuration; + return this; + } + + public String getSuspendTimeoutDuration() { + return suspendTimeoutDuration; + } + + public DatabaseEndpoint setType(DatabaseEndpointType typeValue) { + this.typeValue = typeValue; + return this; + } + + public DatabaseEndpointType getType() { + return typeValue; + } + + public DatabaseEndpoint setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseEndpoint that = (DatabaseEndpoint) o; + return Objects.equals(autoscalingLimitMaxCu, that.autoscalingLimitMaxCu) + && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu) + && Objects.equals(branchId, that.branchId) + && Objects.equals(createTime, that.createTime) + && Objects.equals(currentState, that.currentState) + && Objects.equals(disabled, that.disabled) + && Objects.equals(endpointId, that.endpointId) + && Objects.equals(host, that.host) + && Objects.equals(lastActiveTime, that.lastActiveTime) + && Objects.equals(pendingState, that.pendingState) + && Objects.equals(poolerMode, that.poolerMode) + && Objects.equals(projectId, that.projectId) + && Objects.equals(settings, that.settings) + && Objects.equals(startTime, that.startTime) + && Objects.equals(suspendTime, that.suspendTime) + && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration) + && Objects.equals(typeValue, that.typeValue) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + autoscalingLimitMaxCu, + autoscalingLimitMinCu, + branchId, + createTime, + currentState, + disabled, + endpointId, + host, + lastActiveTime, + pendingState, + poolerMode, + projectId, + settings, + startTime, + suspendTime, + suspendTimeoutDuration, + typeValue, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(DatabaseEndpoint.class) + .add("autoscalingLimitMaxCu", autoscalingLimitMaxCu) + .add("autoscalingLimitMinCu", autoscalingLimitMinCu) + .add("branchId", branchId) + .add("createTime", createTime) + .add("currentState", currentState) + .add("disabled", disabled) + .add("endpointId", endpointId) + .add("host", host) + .add("lastActiveTime", lastActiveTime) + .add("pendingState", pendingState) + .add("poolerMode", poolerMode) + .add("projectId", projectId) + .add("settings", settings) + .add("startTime", startTime) + .add("suspendTime", suspendTime) + .add("suspendTimeoutDuration", suspendTimeoutDuration) + .add("typeValue", typeValue) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointPoolerMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointPoolerMode.java new file mode 100755 index 000000000..09bbcc3c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointPoolerMode.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; + +/** The connection pooler mode. Lakebase supports PgBouncer in `transaction` mode only. */ +@Generated +public enum DatabaseEndpointPoolerMode { + TRANSACTION, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointSettings.java new file mode 100755 index 000000000..dd6cfac29 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointSettings.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +/** A collection of settings for a compute endpoint */ +@Generated +public class DatabaseEndpointSettings { + /** A raw representation of Postgres settings. */ + @JsonProperty("pg_settings") + private Map pgSettings; + + /** A raw representation of PgBouncer settings. */ + @JsonProperty("pgbouncer_settings") + private Map pgbouncerSettings; + + public DatabaseEndpointSettings setPgSettings(Map pgSettings) { + this.pgSettings = pgSettings; + return this; + } + + public Map getPgSettings() { + return pgSettings; + } + + public DatabaseEndpointSettings setPgbouncerSettings(Map pgbouncerSettings) { + this.pgbouncerSettings = pgbouncerSettings; + return this; + } + + public Map getPgbouncerSettings() { + return pgbouncerSettings; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseEndpointSettings that = (DatabaseEndpointSettings) o; + return Objects.equals(pgSettings, that.pgSettings) + && Objects.equals(pgbouncerSettings, that.pgbouncerSettings); + } + + @Override + public int hashCode() { + return Objects.hash(pgSettings, pgbouncerSettings); + } + + @Override + public String toString() { + return new ToStringer(DatabaseEndpointSettings.class) + .add("pgSettings", pgSettings) + .add("pgbouncerSettings", pgbouncerSettings) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointState.java new file mode 100755 index 000000000..9227e98ad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointState.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; + +/** The state of the compute endpoint */ +@Generated +public enum DatabaseEndpointState { + ACTIVE, + IDLE, + INIT, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointType.java new file mode 100755 index 000000000..937c6db36 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseEndpointType.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; + +/** The compute endpoint type. Either `read_write` or `read_only`. */ +@Generated +public enum DatabaseEndpointType { + READ_ONLY, + READ_WRITE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java index f58d3a0bc..57faa5809 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java @@ -156,6 +156,20 @@ public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request) } } + @Override + public DatabaseInstance failoverDatabaseInstance(FailoverDatabaseInstanceRequest request) { + String path = String.format("/api/2.0/database/instances/%s/failover", request.getName()); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseInstance.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidRequest request) { String path = "/api/2.0/database/instances:findByUid"; @@ -335,6 +349,25 @@ public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest req } } + @Override + public DatabaseInstanceRole updateDatabaseInstanceRole( + UpdateDatabaseInstanceRoleRequest request) { + String path = + String.format( + "/api/2.0/database/instances/%s/roles/%s", + request.getInstanceName(), request.getName()); + try { + Request req = + new Request("PATCH", path, apiClient.serialize(request.getDatabaseInstanceRole())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseInstanceRole.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public SyncedDatabaseTable updateSyncedDatabaseTable(UpdateSyncedDatabaseTableRequest request) { String path = String.format("/api/2.0/database/synced_tables/%s", request.getName()); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java index 077608170..f52760290 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java @@ -29,10 +29,21 @@ public class DatabaseInstance { @JsonProperty("creator") private String creator; + /** + * Custom tags associated with the instance. This field is only included on create and update + * responses. + */ + @JsonProperty("custom_tags") + private Collection customTags; + /** Deprecated. The sku of the instance; this field will always match the value of capacity. */ @JsonProperty("effective_capacity") private String effectiveCapacity; + /** The recorded custom tags associated with the instance. */ + @JsonProperty("effective_custom_tags") + private Collection effectiveCustomTags; + /** Whether the instance has PG native password login enabled. */ @JsonProperty("effective_enable_pg_native_login") private Boolean effectiveEnablePgNativeLogin; @@ -59,6 +70,10 @@ public class DatabaseInstance { @JsonProperty("effective_stopped") private Boolean effectiveStopped; + /** The policy that is applied to the instance. */ + @JsonProperty("effective_usage_policy_id") + private String effectiveUsagePolicyId; + /** Whether to enable PG native password login on the instance. Defaults to false. */ @JsonProperty("enable_pg_native_login") private Boolean enablePgNativeLogin; @@ -121,6 +136,10 @@ public class DatabaseInstance { @JsonProperty("uid") private String uid; + /** The desired usage policy to associate with the instance. */ + @JsonProperty("usage_policy_id") + private String usagePolicyId; + public DatabaseInstance setCapacity(String capacity) { this.capacity = capacity; return this; @@ -157,6 +176,15 @@ public String getCreator() { return creator; } + public DatabaseInstance setCustomTags(Collection customTags) { + this.customTags = customTags; + return this; + } + + public Collection getCustomTags() { + return customTags; + } + public DatabaseInstance setEffectiveCapacity(String effectiveCapacity) { this.effectiveCapacity = effectiveCapacity; return this; @@ -166,6 +194,15 @@ public String getEffectiveCapacity() { return effectiveCapacity; } + public DatabaseInstance setEffectiveCustomTags(Collection effectiveCustomTags) { + this.effectiveCustomTags = effectiveCustomTags; + return this; + } + + public Collection getEffectiveCustomTags() { + return effectiveCustomTags; + } + public DatabaseInstance setEffectiveEnablePgNativeLogin(Boolean effectiveEnablePgNativeLogin) { this.effectiveEnablePgNativeLogin = effectiveEnablePgNativeLogin; return this; @@ -212,6 +249,15 @@ public Boolean getEffectiveStopped() { return effectiveStopped; } + public DatabaseInstance setEffectiveUsagePolicyId(String effectiveUsagePolicyId) { + this.effectiveUsagePolicyId = effectiveUsagePolicyId; + return this; + } + + public String getEffectiveUsagePolicyId() { + return effectiveUsagePolicyId; + } + public DatabaseInstance setEnablePgNativeLogin(Boolean enablePgNativeLogin) { this.enablePgNativeLogin = enablePgNativeLogin; return this; @@ -320,6 +366,15 @@ public String getUid() { return uid; } + public DatabaseInstance setUsagePolicyId(String usagePolicyId) { + this.usagePolicyId = usagePolicyId; + return this; + } + + public String getUsagePolicyId() { + return usagePolicyId; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -329,13 +384,16 @@ public boolean equals(Object o) { && Objects.equals(childInstanceRefs, that.childInstanceRefs) && Objects.equals(creationTime, that.creationTime) && Objects.equals(creator, that.creator) + && Objects.equals(customTags, that.customTags) && Objects.equals(effectiveCapacity, that.effectiveCapacity) + && Objects.equals(effectiveCustomTags, that.effectiveCustomTags) && Objects.equals(effectiveEnablePgNativeLogin, that.effectiveEnablePgNativeLogin) && Objects.equals( effectiveEnableReadableSecondaries, that.effectiveEnableReadableSecondaries) && Objects.equals(effectiveNodeCount, that.effectiveNodeCount) && Objects.equals(effectiveRetentionWindowInDays, that.effectiveRetentionWindowInDays) && Objects.equals(effectiveStopped, that.effectiveStopped) + && Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId) && Objects.equals(enablePgNativeLogin, that.enablePgNativeLogin) && Objects.equals(enableReadableSecondaries, that.enableReadableSecondaries) && Objects.equals(name, that.name) @@ -347,7 +405,8 @@ public boolean equals(Object o) { && Objects.equals(retentionWindowInDays, that.retentionWindowInDays) && Objects.equals(state, that.state) && Objects.equals(stopped, that.stopped) - && Objects.equals(uid, that.uid); + && Objects.equals(uid, that.uid) + && Objects.equals(usagePolicyId, that.usagePolicyId); } @Override @@ -357,12 +416,15 @@ public int hashCode() { childInstanceRefs, creationTime, creator, + customTags, effectiveCapacity, + effectiveCustomTags, effectiveEnablePgNativeLogin, effectiveEnableReadableSecondaries, effectiveNodeCount, effectiveRetentionWindowInDays, effectiveStopped, + effectiveUsagePolicyId, enablePgNativeLogin, enableReadableSecondaries, name, @@ -374,7 +436,8 @@ public int hashCode() { retentionWindowInDays, state, stopped, - uid); + uid, + usagePolicyId); } @Override @@ -384,12 +447,15 @@ public String toString() { .add("childInstanceRefs", childInstanceRefs) .add("creationTime", creationTime) .add("creator", creator) + .add("customTags", customTags) .add("effectiveCapacity", effectiveCapacity) + .add("effectiveCustomTags", effectiveCustomTags) .add("effectiveEnablePgNativeLogin", effectiveEnablePgNativeLogin) .add("effectiveEnableReadableSecondaries", effectiveEnableReadableSecondaries) .add("effectiveNodeCount", effectiveNodeCount) .add("effectiveRetentionWindowInDays", effectiveRetentionWindowInDays) .add("effectiveStopped", effectiveStopped) + .add("effectiveUsagePolicyId", effectiveUsagePolicyId) .add("enablePgNativeLogin", enablePgNativeLogin) .add("enableReadableSecondaries", enableReadableSecondaries) .add("name", name) @@ -402,6 +468,7 @@ public String toString() { .add("state", state) .add("stopped", stopped) .add("uid", uid) + .add("usagePolicyId", usagePolicyId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRole.java index cc9b99b4c..9f0c7010a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRole.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRole.java @@ -10,14 +10,22 @@ /** A DatabaseInstanceRole represents a Postgres role in a database instance. */ @Generated public class DatabaseInstanceRole { - /** API-exposed Postgres role attributes */ + /** The desired API-exposed Postgres role attribute to associate with the role. Optional. */ @JsonProperty("attributes") private DatabaseInstanceRoleAttributes attributes; + /** The attributes that are applied to the role. */ + @JsonProperty("effective_attributes") + private DatabaseInstanceRoleAttributes effectiveAttributes; + /** The type of the role. */ @JsonProperty("identity_type") private DatabaseInstanceRoleIdentityType identityType; + /** */ + @JsonProperty("instance_name") + private String instanceName; + /** An enum value for a standard role that this role is a member of. */ @JsonProperty("membership_role") private DatabaseInstanceRoleMembershipRole membershipRole; @@ -35,6 +43,16 @@ public DatabaseInstanceRoleAttributes getAttributes() { return attributes; } + public DatabaseInstanceRole setEffectiveAttributes( + DatabaseInstanceRoleAttributes effectiveAttributes) { + this.effectiveAttributes = effectiveAttributes; + return this; + } + + public DatabaseInstanceRoleAttributes getEffectiveAttributes() { + return effectiveAttributes; + } + public DatabaseInstanceRole setIdentityType(DatabaseInstanceRoleIdentityType identityType) { this.identityType = identityType; return this; @@ -44,6 +62,15 @@ public DatabaseInstanceRoleIdentityType getIdentityType() { return identityType; } + public DatabaseInstanceRole setInstanceName(String instanceName) { + this.instanceName = instanceName; + return this; + } + + public String getInstanceName() { + return instanceName; + } + public DatabaseInstanceRole setMembershipRole(DatabaseInstanceRoleMembershipRole membershipRole) { this.membershipRole = membershipRole; return this; @@ -68,21 +95,26 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; DatabaseInstanceRole that = (DatabaseInstanceRole) o; return Objects.equals(attributes, that.attributes) + && Objects.equals(effectiveAttributes, that.effectiveAttributes) && Objects.equals(identityType, that.identityType) + && Objects.equals(instanceName, that.instanceName) && Objects.equals(membershipRole, that.membershipRole) && Objects.equals(name, that.name); } @Override public int hashCode() { - return Objects.hash(attributes, identityType, membershipRole, name); + return Objects.hash( + attributes, effectiveAttributes, identityType, instanceName, membershipRole, name); } @Override public String toString() { return new ToStringer(DatabaseInstanceRole.class) .add("attributes", attributes) + .add("effectiveAttributes", effectiveAttributes) .add("identityType", identityType) + .add("instanceName", instanceName) .add("membershipRole", membershipRole) .add("name", name) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProject.java new file mode 100755 index 000000000..cf2969c40 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProject.java @@ -0,0 +1,347 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class DatabaseProject { + /** The logical size limit for a branch. */ + @JsonProperty("branch_logical_size_limit_bytes") + private Long branchLogicalSizeLimitBytes; + + /** + * The desired budget policy to associate with the instance. This field is only returned on + * create/update responses, and represents the customer provided budget policy. See + * effective_budget_policy_id for the policy that is actually applied to the instance. + */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + /** The most recent time when any endpoint of this project was active. */ + @JsonProperty("compute_last_active_time") + private String computeLastActiveTime; + + /** A timestamp indicating when the project was created. */ + @JsonProperty("create_time") + private String createTime; + + /** Custom tags associated with the instance. */ + @JsonProperty("custom_tags") + private Collection customTags; + + /** */ + @JsonProperty("default_endpoint_settings") + private DatabaseProjectDefaultEndpointSettings defaultEndpointSettings; + + /** Human-readable project name. */ + @JsonProperty("display_name") + private String displayName; + + /** The policy that is applied to the instance. */ + @JsonProperty("effective_budget_policy_id") + private String effectiveBudgetPolicyId; + + /** */ + @JsonProperty("effective_default_endpoint_settings") + private DatabaseProjectDefaultEndpointSettings effectiveDefaultEndpointSettings; + + /** */ + @JsonProperty("effective_display_name") + private String effectiveDisplayName; + + /** */ + @JsonProperty("effective_history_retention_duration") + private String effectiveHistoryRetentionDuration; + + /** */ + @JsonProperty("effective_pg_version") + private Long effectivePgVersion; + + /** */ + @JsonProperty("effective_settings") + private DatabaseProjectSettings effectiveSettings; + + /** + * The number of seconds to retain the shared history for point in time recovery for all branches + * in this project. + */ + @JsonProperty("history_retention_duration") + private String historyRetentionDuration; + + /** The major Postgres version number. */ + @JsonProperty("pg_version") + private Long pgVersion; + + /** */ + @JsonProperty("project_id") + private String projectId; + + /** */ + @JsonProperty("settings") + private DatabaseProjectSettings settings; + + /** + * The current space occupied by the project in storage. Synthetic storage size combines the + * logical data size and Write-Ahead Log (WAL) size for all branches in a project. + */ + @JsonProperty("synthetic_storage_size_bytes") + private Long syntheticStorageSizeBytes; + + /** A timestamp indicating when the project was last updated. */ + @JsonProperty("update_time") + private String updateTime; + + public DatabaseProject setBranchLogicalSizeLimitBytes(Long branchLogicalSizeLimitBytes) { + this.branchLogicalSizeLimitBytes = branchLogicalSizeLimitBytes; + return this; + } + + public Long getBranchLogicalSizeLimitBytes() { + return branchLogicalSizeLimitBytes; + } + + public DatabaseProject setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public DatabaseProject setComputeLastActiveTime(String computeLastActiveTime) { + this.computeLastActiveTime = computeLastActiveTime; + return this; + } + + public String getComputeLastActiveTime() { + return computeLastActiveTime; + } + + public DatabaseProject setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public DatabaseProject setCustomTags(Collection customTags) { + this.customTags = customTags; + return this; + } + + public Collection getCustomTags() { + return customTags; + } + + public DatabaseProject setDefaultEndpointSettings( + DatabaseProjectDefaultEndpointSettings defaultEndpointSettings) { + this.defaultEndpointSettings = defaultEndpointSettings; + return this; + } + + public DatabaseProjectDefaultEndpointSettings getDefaultEndpointSettings() { + return defaultEndpointSettings; + } + + public DatabaseProject setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public DatabaseProject setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) { + this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; + return this; + } + + public String getEffectiveBudgetPolicyId() { + return effectiveBudgetPolicyId; + } + + public DatabaseProject setEffectiveDefaultEndpointSettings( + DatabaseProjectDefaultEndpointSettings effectiveDefaultEndpointSettings) { + this.effectiveDefaultEndpointSettings = effectiveDefaultEndpointSettings; + return this; + } + + public DatabaseProjectDefaultEndpointSettings getEffectiveDefaultEndpointSettings() { + return effectiveDefaultEndpointSettings; + } + + public DatabaseProject setEffectiveDisplayName(String effectiveDisplayName) { + this.effectiveDisplayName = effectiveDisplayName; + return this; + } + + public String getEffectiveDisplayName() { + return effectiveDisplayName; + } + + public DatabaseProject setEffectiveHistoryRetentionDuration( + String effectiveHistoryRetentionDuration) { + this.effectiveHistoryRetentionDuration = effectiveHistoryRetentionDuration; + return this; + } + + public String getEffectiveHistoryRetentionDuration() { + return effectiveHistoryRetentionDuration; + } + + public DatabaseProject setEffectivePgVersion(Long effectivePgVersion) { + this.effectivePgVersion = effectivePgVersion; + return this; + } + + public Long getEffectivePgVersion() { + return effectivePgVersion; + } + + public DatabaseProject setEffectiveSettings(DatabaseProjectSettings effectiveSettings) { + this.effectiveSettings = effectiveSettings; + return this; + } + + public DatabaseProjectSettings getEffectiveSettings() { + return effectiveSettings; + } + + public DatabaseProject setHistoryRetentionDuration(String historyRetentionDuration) { + this.historyRetentionDuration = historyRetentionDuration; + return this; + } + + public String getHistoryRetentionDuration() { + return historyRetentionDuration; + } + + public DatabaseProject setPgVersion(Long pgVersion) { + this.pgVersion = pgVersion; + return this; + } + + public Long getPgVersion() { + return pgVersion; + } + + public DatabaseProject setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + public DatabaseProject setSettings(DatabaseProjectSettings settings) { + this.settings = settings; + return this; + } + + public DatabaseProjectSettings getSettings() { + return settings; + } + + public DatabaseProject setSyntheticStorageSizeBytes(Long syntheticStorageSizeBytes) { + this.syntheticStorageSizeBytes = syntheticStorageSizeBytes; + return this; + } + + public Long getSyntheticStorageSizeBytes() { + return syntheticStorageSizeBytes; + } + + public DatabaseProject setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseProject that = (DatabaseProject) o; + return Objects.equals(branchLogicalSizeLimitBytes, that.branchLogicalSizeLimitBytes) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(computeLastActiveTime, that.computeLastActiveTime) + && Objects.equals(createTime, that.createTime) + && Objects.equals(customTags, that.customTags) + && Objects.equals(defaultEndpointSettings, that.defaultEndpointSettings) + && Objects.equals(displayName, that.displayName) + && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(effectiveDefaultEndpointSettings, that.effectiveDefaultEndpointSettings) + && Objects.equals(effectiveDisplayName, that.effectiveDisplayName) + && Objects.equals(effectiveHistoryRetentionDuration, that.effectiveHistoryRetentionDuration) + && Objects.equals(effectivePgVersion, that.effectivePgVersion) + && Objects.equals(effectiveSettings, that.effectiveSettings) + && Objects.equals(historyRetentionDuration, that.historyRetentionDuration) + && Objects.equals(pgVersion, that.pgVersion) + && Objects.equals(projectId, that.projectId) + && Objects.equals(settings, that.settings) + && Objects.equals(syntheticStorageSizeBytes, that.syntheticStorageSizeBytes) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + branchLogicalSizeLimitBytes, + budgetPolicyId, + computeLastActiveTime, + createTime, + customTags, + defaultEndpointSettings, + displayName, + effectiveBudgetPolicyId, + effectiveDefaultEndpointSettings, + effectiveDisplayName, + effectiveHistoryRetentionDuration, + effectivePgVersion, + effectiveSettings, + historyRetentionDuration, + pgVersion, + projectId, + settings, + syntheticStorageSizeBytes, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(DatabaseProject.class) + .add("branchLogicalSizeLimitBytes", branchLogicalSizeLimitBytes) + .add("budgetPolicyId", budgetPolicyId) + .add("computeLastActiveTime", computeLastActiveTime) + .add("createTime", createTime) + .add("customTags", customTags) + .add("defaultEndpointSettings", defaultEndpointSettings) + .add("displayName", displayName) + .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("effectiveDefaultEndpointSettings", effectiveDefaultEndpointSettings) + .add("effectiveDisplayName", effectiveDisplayName) + .add("effectiveHistoryRetentionDuration", effectiveHistoryRetentionDuration) + .add("effectivePgVersion", effectivePgVersion) + .add("effectiveSettings", effectiveSettings) + .add("historyRetentionDuration", historyRetentionDuration) + .add("pgVersion", pgVersion) + .add("projectId", projectId) + .add("settings", settings) + .add("syntheticStorageSizeBytes", syntheticStorageSizeBytes) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectAPI.java new file mode 100755 index 000000000..90d1c45b8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectAPI.java @@ -0,0 +1,184 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.database; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Database Projects provide access to a database via REST API or direct SQL. */ +@Generated +public class DatabaseProjectAPI { + private static final Logger LOG = LoggerFactory.getLogger(DatabaseProjectAPI.class); + + private final DatabaseProjectService impl; + + /** Regular-use constructor */ + public DatabaseProjectAPI(ApiClient apiClient) { + impl = new DatabaseProjectImpl(apiClient); + } + + /** Constructor for mocks */ + public DatabaseProjectAPI(DatabaseProjectService mock) { + impl = mock; + } + + /** Create a Database Branch. */ + public DatabaseBranch createDatabaseBranch(CreateDatabaseBranchRequest request) { + return impl.createDatabaseBranch(request); + } + + /** Create a Database Endpoint. */ + public DatabaseEndpoint createDatabaseEndpoint(CreateDatabaseEndpointRequest request) { + return impl.createDatabaseEndpoint(request); + } + + /** Create a Database Project. */ + public DatabaseProject createDatabaseProject(CreateDatabaseProjectRequest request) { + return impl.createDatabaseProject(request); + } + + public void deleteDatabaseBranch(String projectId, String branchId) { + deleteDatabaseBranch( + new DeleteDatabaseBranchRequest().setProjectId(projectId).setBranchId(branchId)); + } + + /** Delete a Database Branch. */ + public void deleteDatabaseBranch(DeleteDatabaseBranchRequest request) { + impl.deleteDatabaseBranch(request); + } + + public void deleteDatabaseEndpoint(String projectId, String branchId, String endpointId) { + deleteDatabaseEndpoint( + new DeleteDatabaseEndpointRequest() + .setProjectId(projectId) + .setBranchId(branchId) + .setEndpointId(endpointId)); + } + + /** Delete a Database Endpoint. */ + public void deleteDatabaseEndpoint(DeleteDatabaseEndpointRequest request) { + impl.deleteDatabaseEndpoint(request); + } + + public void deleteDatabaseProject(String projectId) { + deleteDatabaseProject(new DeleteDatabaseProjectRequest().setProjectId(projectId)); + } + + /** Delete a Database Project. */ + public void deleteDatabaseProject(DeleteDatabaseProjectRequest request) { + impl.deleteDatabaseProject(request); + } + + public DatabaseBranch getDatabaseBranch(String projectId, String branchId) { + return getDatabaseBranch( + new GetDatabaseBranchRequest().setProjectId(projectId).setBranchId(branchId)); + } + + /** Get a Database Branch. */ + public DatabaseBranch getDatabaseBranch(GetDatabaseBranchRequest request) { + return impl.getDatabaseBranch(request); + } + + public DatabaseEndpoint getDatabaseEndpoint( + String projectId, String branchId, String endpointId) { + return getDatabaseEndpoint( + new GetDatabaseEndpointRequest() + .setProjectId(projectId) + .setBranchId(branchId) + .setEndpointId(endpointId)); + } + + /** Get a Database Endpoint. */ + public DatabaseEndpoint getDatabaseEndpoint(GetDatabaseEndpointRequest request) { + return impl.getDatabaseEndpoint(request); + } + + public DatabaseProject getDatabaseProject(String projectId) { + return getDatabaseProject(new GetDatabaseProjectRequest().setProjectId(projectId)); + } + + /** Get a Database Project. */ + public DatabaseProject getDatabaseProject(GetDatabaseProjectRequest request) { + return impl.getDatabaseProject(request); + } + + public Iterable listDatabaseBranches(String projectId) { + return listDatabaseBranches(new ListDatabaseBranchesRequest().setProjectId(projectId)); + } + + /** List Database Branches. */ + public Iterable listDatabaseBranches(ListDatabaseBranchesRequest request) { + return new Paginator<>( + request, + impl::listDatabaseBranches, + ListDatabaseBranchesResponse::getDatabaseBranches, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public Iterable listDatabaseEndpoints(String projectId, String branchId) { + return listDatabaseEndpoints( + new ListDatabaseEndpointsRequest().setProjectId(projectId).setBranchId(branchId)); + } + + /** List Database Endpoints. */ + public Iterable listDatabaseEndpoints(ListDatabaseEndpointsRequest request) { + return new Paginator<>( + request, + impl::listDatabaseEndpoints, + ListDatabaseEndpointsResponse::getDatabaseEndpoints, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + /** List Database Instances. */ + public Iterable listDatabaseProjects(ListDatabaseProjectsRequest request) { + return new Paginator<>( + request, + impl::listDatabaseProjects, + ListDatabaseProjectsResponse::getDatabaseProjects, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + /** Restart a Database Endpoint. TODO: should return databricks.longrunning.Operation */ + public DatabaseEndpoint restartDatabaseEndpoint(RestartDatabaseEndpointRequest request) { + return impl.restartDatabaseEndpoint(request); + } + + /** Update a Database Branch. */ + public DatabaseBranch updateDatabaseBranch(UpdateDatabaseBranchRequest request) { + return impl.updateDatabaseBranch(request); + } + + /** Update a Database Endpoint. TODO: should return databricks.longrunning.Operation { */ + public DatabaseEndpoint updateDatabaseEndpoint(UpdateDatabaseEndpointRequest request) { + return impl.updateDatabaseEndpoint(request); + } + + /** Update a Database Project. */ + public DatabaseProject updateDatabaseProject(UpdateDatabaseProjectRequest request) { + return impl.updateDatabaseProject(request); + } + + public DatabaseProjectService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectCustomTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectCustomTag.java new file mode 100755 index 000000000..715cb3229 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectCustomTag.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DatabaseProjectCustomTag { + /** The key of the custom tag. */ + @JsonProperty("key") + private String key; + + /** The value of the custom tag. */ + @JsonProperty("value") + private String value; + + public DatabaseProjectCustomTag setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public DatabaseProjectCustomTag setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseProjectCustomTag that = (DatabaseProjectCustomTag) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(DatabaseProjectCustomTag.class) + .add("key", key) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectDefaultEndpointSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectDefaultEndpointSettings.java new file mode 100755 index 000000000..699992cae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectDefaultEndpointSettings.java @@ -0,0 +1,115 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +/** A collection of settings for a database endpoint. */ +@Generated +public class DatabaseProjectDefaultEndpointSettings { + /** The maximum number of Compute Units. */ + @JsonProperty("autoscaling_limit_max_cu") + private Double autoscalingLimitMaxCu; + + /** The minimum number of Compute Units. */ + @JsonProperty("autoscaling_limit_min_cu") + private Double autoscalingLimitMinCu; + + /** A raw representation of Postgres settings. */ + @JsonProperty("pg_settings") + private Map pgSettings; + + /** A raw representation of PgBouncer settings. */ + @JsonProperty("pgbouncer_settings") + private Map pgbouncerSettings; + + /** Duration of inactivity after which the compute endpoint is automatically suspended. */ + @JsonProperty("suspend_timeout_duration") + private String suspendTimeoutDuration; + + public DatabaseProjectDefaultEndpointSettings setAutoscalingLimitMaxCu( + Double autoscalingLimitMaxCu) { + this.autoscalingLimitMaxCu = autoscalingLimitMaxCu; + return this; + } + + public Double getAutoscalingLimitMaxCu() { + return autoscalingLimitMaxCu; + } + + public DatabaseProjectDefaultEndpointSettings setAutoscalingLimitMinCu( + Double autoscalingLimitMinCu) { + this.autoscalingLimitMinCu = autoscalingLimitMinCu; + return this; + } + + public Double getAutoscalingLimitMinCu() { + return autoscalingLimitMinCu; + } + + public DatabaseProjectDefaultEndpointSettings setPgSettings(Map pgSettings) { + this.pgSettings = pgSettings; + return this; + } + + public Map getPgSettings() { + return pgSettings; + } + + public DatabaseProjectDefaultEndpointSettings setPgbouncerSettings( + Map pgbouncerSettings) { + this.pgbouncerSettings = pgbouncerSettings; + return this; + } + + public Map getPgbouncerSettings() { + return pgbouncerSettings; + } + + public DatabaseProjectDefaultEndpointSettings setSuspendTimeoutDuration( + String suspendTimeoutDuration) { + this.suspendTimeoutDuration = suspendTimeoutDuration; + return this; + } + + public String getSuspendTimeoutDuration() { + return suspendTimeoutDuration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseProjectDefaultEndpointSettings that = (DatabaseProjectDefaultEndpointSettings) o; + return Objects.equals(autoscalingLimitMaxCu, that.autoscalingLimitMaxCu) + && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu) + && Objects.equals(pgSettings, that.pgSettings) + && Objects.equals(pgbouncerSettings, that.pgbouncerSettings) + && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration); + } + + @Override + public int hashCode() { + return Objects.hash( + autoscalingLimitMaxCu, + autoscalingLimitMinCu, + pgSettings, + pgbouncerSettings, + suspendTimeoutDuration); + } + + @Override + public String toString() { + return new ToStringer(DatabaseProjectDefaultEndpointSettings.class) + .add("autoscalingLimitMaxCu", autoscalingLimitMaxCu) + .add("autoscalingLimitMinCu", autoscalingLimitMinCu) + .add("pgSettings", pgSettings) + .add("pgbouncerSettings", pgbouncerSettings) + .add("suspendTimeoutDuration", suspendTimeoutDuration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectImpl.java new file mode 100755 index 000000000..bf9ff3aa4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectImpl.java @@ -0,0 +1,260 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.database; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of DatabaseProject */ +@Generated +class DatabaseProjectImpl implements DatabaseProjectService { + private final ApiClient apiClient; + + public DatabaseProjectImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public DatabaseBranch createDatabaseBranch(CreateDatabaseBranchRequest request) { + String path = String.format("/api/2.0/database/projects/%s/branches", request.getProjectId()); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getDatabaseBranch())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseBranch.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseEndpoint createDatabaseEndpoint(CreateDatabaseEndpointRequest request) { + String path = + String.format( + "/api/2.0/database/projects/%s/branches/%s/endpoints", + request.getProjectId(), request.getBranchId()); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getDatabaseEndpoint())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseEndpoint.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseProject createDatabaseProject(CreateDatabaseProjectRequest request) { + String path = "/api/2.0/database/projects"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getDatabaseProject())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseProject.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteDatabaseBranch(DeleteDatabaseBranchRequest request) { + String path = + String.format( + "/api/2.0/database/projects/%s/branches/%s", + request.getProjectId(), request.getBranchId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteDatabaseEndpoint(DeleteDatabaseEndpointRequest request) { + String path = + String.format( + "/api/2.0/database/projects/%s/branches/%s/endpoints/%s", + request.getProjectId(), request.getBranchId(), request.getEndpointId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteDatabaseProject(DeleteDatabaseProjectRequest request) { + String path = String.format("/api/2.0/database/projects/%s", request.getProjectId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseBranch getDatabaseBranch(GetDatabaseBranchRequest request) { + String path = + String.format( + "/api/2.0/database/projects/%s/branches/%s", + request.getProjectId(), request.getBranchId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DatabaseBranch.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseEndpoint getDatabaseEndpoint(GetDatabaseEndpointRequest request) { + String path = + String.format( + "/api/2.0/database/projects/%s/branches/%s/endpoints/%s", + request.getProjectId(), request.getBranchId(), request.getEndpointId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DatabaseEndpoint.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseProject getDatabaseProject(GetDatabaseProjectRequest request) { + String path = String.format("/api/2.0/database/projects/%s", request.getProjectId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DatabaseProject.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListDatabaseBranchesResponse listDatabaseBranches(ListDatabaseBranchesRequest request) { + String path = String.format("/api/2.0/database/projects/%s/branches", request.getProjectId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListDatabaseBranchesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListDatabaseEndpointsResponse listDatabaseEndpoints(ListDatabaseEndpointsRequest request) { + String path = + String.format( + "/api/2.0/database/projects/%s/branches/%s/endpoints", + request.getProjectId(), request.getBranchId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListDatabaseEndpointsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListDatabaseProjectsResponse listDatabaseProjects(ListDatabaseProjectsRequest request) { + String path = "/api/2.0/database/projects"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListDatabaseProjectsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseEndpoint restartDatabaseEndpoint(RestartDatabaseEndpointRequest request) { + String path = + String.format( + "/api/2.0/database/projects/%s/branches/%s/endpoints/%s/restart", + request.getProjectId(), request.getBranchId(), request.getEndpointId()); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseEndpoint.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseBranch updateDatabaseBranch(UpdateDatabaseBranchRequest request) { + String path = + String.format( + "/api/2.0/database/projects/%s/branches/%s", + request.getProjectId(), request.getBranchId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getDatabaseBranch())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseBranch.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseEndpoint updateDatabaseEndpoint(UpdateDatabaseEndpointRequest request) { + String path = + String.format( + "/api/2.0/database/projects/%s/branches/%s/endpoints/%s", + request.getProjectId(), request.getBranchId(), request.getEndpointId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getDatabaseEndpoint())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseEndpoint.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseProject updateDatabaseProject(UpdateDatabaseProjectRequest request) { + String path = String.format("/api/2.0/database/projects/%s", request.getProjectId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getDatabaseProject())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseProject.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectService.java new file mode 100755 index 000000000..cd2a49bd0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectService.java @@ -0,0 +1,68 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; + +/** + * Database Projects provide access to a database via REST API or direct SQL. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface DatabaseProjectService { + /** Create a Database Branch. */ + DatabaseBranch createDatabaseBranch(CreateDatabaseBranchRequest createDatabaseBranchRequest); + + /** Create a Database Endpoint. */ + DatabaseEndpoint createDatabaseEndpoint( + CreateDatabaseEndpointRequest createDatabaseEndpointRequest); + + /** Create a Database Project. */ + DatabaseProject createDatabaseProject(CreateDatabaseProjectRequest createDatabaseProjectRequest); + + /** Delete a Database Branch. */ + void deleteDatabaseBranch(DeleteDatabaseBranchRequest deleteDatabaseBranchRequest); + + /** Delete a Database Endpoint. */ + void deleteDatabaseEndpoint(DeleteDatabaseEndpointRequest deleteDatabaseEndpointRequest); + + /** Delete a Database Project. */ + void deleteDatabaseProject(DeleteDatabaseProjectRequest deleteDatabaseProjectRequest); + + /** Get a Database Branch. */ + DatabaseBranch getDatabaseBranch(GetDatabaseBranchRequest getDatabaseBranchRequest); + + /** Get a Database Endpoint. */ + DatabaseEndpoint getDatabaseEndpoint(GetDatabaseEndpointRequest getDatabaseEndpointRequest); + + /** Get a Database Project. */ + DatabaseProject getDatabaseProject(GetDatabaseProjectRequest getDatabaseProjectRequest); + + /** List Database Branches. */ + ListDatabaseBranchesResponse listDatabaseBranches( + ListDatabaseBranchesRequest listDatabaseBranchesRequest); + + /** List Database Endpoints. */ + ListDatabaseEndpointsResponse listDatabaseEndpoints( + ListDatabaseEndpointsRequest listDatabaseEndpointsRequest); + + /** List Database Instances. */ + ListDatabaseProjectsResponse listDatabaseProjects( + ListDatabaseProjectsRequest listDatabaseProjectsRequest); + + /** Restart a Database Endpoint. TODO: should return databricks.longrunning.Operation */ + DatabaseEndpoint restartDatabaseEndpoint( + RestartDatabaseEndpointRequest restartDatabaseEndpointRequest); + + /** Update a Database Branch. */ + DatabaseBranch updateDatabaseBranch(UpdateDatabaseBranchRequest updateDatabaseBranchRequest); + + /** Update a Database Endpoint. TODO: should return databricks.longrunning.Operation { */ + DatabaseEndpoint updateDatabaseEndpoint( + UpdateDatabaseEndpointRequest updateDatabaseEndpointRequest); + + /** Update a Database Project. */ + DatabaseProject updateDatabaseProject(UpdateDatabaseProjectRequest updateDatabaseProjectRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectSettings.java new file mode 100755 index 000000000..bf18dcd42 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseProjectSettings.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DatabaseProjectSettings { + /** + * Sets wal_level=logical for all compute endpoints in this project. All active endpoints will be + * suspended. Once enabled, logical replication cannot be disabled. + */ + @JsonProperty("enable_logical_replication") + private Boolean enableLogicalReplication; + + public DatabaseProjectSettings setEnableLogicalReplication(Boolean enableLogicalReplication) { + this.enableLogicalReplication = enableLogicalReplication; + return this; + } + + public Boolean getEnableLogicalReplication() { + return enableLogicalReplication; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseProjectSettings that = (DatabaseProjectSettings) o; + return Objects.equals(enableLogicalReplication, that.enableLogicalReplication); + } + + @Override + public int hashCode() { + return Objects.hash(enableLogicalReplication); + } + + @Override + public String toString() { + return new ToStringer(DatabaseProjectSettings.class) + .add("enableLogicalReplication", enableLogicalReplication) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java index 4f6d40276..182ba0435 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java @@ -49,6 +49,10 @@ void deleteDatabaseInstanceRole( /** Delete a Synced Database Table. */ void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest deleteSyncedDatabaseTableRequest); + /** Failover the primary node of a Database Instance to a secondary. */ + DatabaseInstance failoverDatabaseInstance( + FailoverDatabaseInstanceRequest failoverDatabaseInstanceRequest); + /** Find a Database Instance by uid. */ DatabaseInstance findDatabaseInstanceByUid( FindDatabaseInstanceByUidRequest findDatabaseInstanceByUidRequest); @@ -102,6 +106,10 @@ ListSyncedDatabaseTablesResponse listSyncedDatabaseTables( DatabaseInstance updateDatabaseInstance( UpdateDatabaseInstanceRequest updateDatabaseInstanceRequest); + /** Update a role for a Database Instance. */ + DatabaseInstanceRole updateDatabaseInstanceRole( + UpdateDatabaseInstanceRoleRequest updateDatabaseInstanceRoleRequest); + /** This API is currently unimplemented, but exposed for Terraform support. */ SyncedDatabaseTable updateSyncedDatabaseTable( UpdateSyncedDatabaseTableRequest updateSyncedDatabaseTableRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java index 8bae3d07a..42e127417 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java @@ -37,6 +37,10 @@ public class DatabaseTable { @JsonProperty("name") private String name; + /** Data serving REST API URL for this table */ + @JsonProperty("table_serving_url") + private String tableServingUrl; + public DatabaseTable setDatabaseInstanceName(String databaseInstanceName) { this.databaseInstanceName = databaseInstanceName; return this; @@ -64,6 +68,15 @@ public String getName() { return name; } + public DatabaseTable setTableServingUrl(String tableServingUrl) { + this.tableServingUrl = tableServingUrl; + return this; + } + + public String getTableServingUrl() { + return tableServingUrl; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -71,12 +84,13 @@ public boolean equals(Object o) { DatabaseTable that = (DatabaseTable) o; return Objects.equals(databaseInstanceName, that.databaseInstanceName) && Objects.equals(logicalDatabaseName, that.logicalDatabaseName) - && Objects.equals(name, that.name); + && Objects.equals(name, that.name) + && Objects.equals(tableServingUrl, that.tableServingUrl); } @Override public int hashCode() { - return Objects.hash(databaseInstanceName, logicalDatabaseName, name); + return Objects.hash(databaseInstanceName, logicalDatabaseName, name, tableServingUrl); } @Override @@ -85,6 +99,7 @@ public String toString() { .add("databaseInstanceName", databaseInstanceName) .add("logicalDatabaseName", logicalDatabaseName) .add("name", name) + .add("tableServingUrl", tableServingUrl) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseBranchRequest.java new file mode 100755 index 000000000..895faa2a1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseBranchRequest.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteDatabaseBranchRequest { + /** */ + @JsonIgnore private String branchId; + + /** */ + @JsonIgnore private String projectId; + + public DeleteDatabaseBranchRequest setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + + public DeleteDatabaseBranchRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDatabaseBranchRequest that = (DeleteDatabaseBranchRequest) o; + return Objects.equals(branchId, that.branchId) && Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(branchId, projectId); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseBranchRequest.class) + .add("branchId", branchId) + .add("projectId", projectId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseEndpointRequest.java new file mode 100755 index 000000000..4d123c6f4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseEndpointRequest.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteDatabaseEndpointRequest { + /** */ + @JsonIgnore private String branchId; + + /** */ + @JsonIgnore private String endpointId; + + /** */ + @JsonIgnore private String projectId; + + public DeleteDatabaseEndpointRequest setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + + public DeleteDatabaseEndpointRequest setEndpointId(String endpointId) { + this.endpointId = endpointId; + return this; + } + + public String getEndpointId() { + return endpointId; + } + + public DeleteDatabaseEndpointRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDatabaseEndpointRequest that = (DeleteDatabaseEndpointRequest) o; + return Objects.equals(branchId, that.branchId) + && Objects.equals(endpointId, that.endpointId) + && Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(branchId, endpointId, projectId); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseEndpointRequest.class) + .add("branchId", branchId) + .add("endpointId", endpointId) + .add("projectId", projectId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseProjectRequest.java new file mode 100755 index 000000000..557cef9f7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseProjectRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteDatabaseProjectRequest { + /** */ + @JsonIgnore private String projectId; + + public DeleteDatabaseProjectRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDatabaseProjectRequest that = (DeleteDatabaseProjectRequest) o; + return Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(projectId); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseProjectRequest.class) + .add("projectId", projectId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FailoverDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FailoverDatabaseInstanceRequest.java new file mode 100755 index 000000000..24cda6829 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FailoverDatabaseInstanceRequest.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class FailoverDatabaseInstanceRequest { + /** */ + @JsonProperty("failover_target_database_instance_name") + private String failoverTargetDatabaseInstanceName; + + /** Name of the instance to failover. */ + @JsonIgnore private String name; + + public FailoverDatabaseInstanceRequest setFailoverTargetDatabaseInstanceName( + String failoverTargetDatabaseInstanceName) { + this.failoverTargetDatabaseInstanceName = failoverTargetDatabaseInstanceName; + return this; + } + + public String getFailoverTargetDatabaseInstanceName() { + return failoverTargetDatabaseInstanceName; + } + + public FailoverDatabaseInstanceRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FailoverDatabaseInstanceRequest that = (FailoverDatabaseInstanceRequest) o; + return Objects.equals( + failoverTargetDatabaseInstanceName, that.failoverTargetDatabaseInstanceName) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(failoverTargetDatabaseInstanceName, name); + } + + @Override + public String toString() { + return new ToStringer(FailoverDatabaseInstanceRequest.class) + .add("failoverTargetDatabaseInstanceName", failoverTargetDatabaseInstanceName) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseBranchRequest.java new file mode 100755 index 000000000..d695581dc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseBranchRequest.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetDatabaseBranchRequest { + /** */ + @JsonIgnore private String branchId; + + /** */ + @JsonIgnore private String projectId; + + public GetDatabaseBranchRequest setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + + public GetDatabaseBranchRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDatabaseBranchRequest that = (GetDatabaseBranchRequest) o; + return Objects.equals(branchId, that.branchId) && Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(branchId, projectId); + } + + @Override + public String toString() { + return new ToStringer(GetDatabaseBranchRequest.class) + .add("branchId", branchId) + .add("projectId", projectId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseEndpointRequest.java new file mode 100755 index 000000000..a0c8e44a3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseEndpointRequest.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetDatabaseEndpointRequest { + /** */ + @JsonIgnore private String branchId; + + /** */ + @JsonIgnore private String endpointId; + + /** */ + @JsonIgnore private String projectId; + + public GetDatabaseEndpointRequest setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + + public GetDatabaseEndpointRequest setEndpointId(String endpointId) { + this.endpointId = endpointId; + return this; + } + + public String getEndpointId() { + return endpointId; + } + + public GetDatabaseEndpointRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDatabaseEndpointRequest that = (GetDatabaseEndpointRequest) o; + return Objects.equals(branchId, that.branchId) + && Objects.equals(endpointId, that.endpointId) + && Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(branchId, endpointId, projectId); + } + + @Override + public String toString() { + return new ToStringer(GetDatabaseEndpointRequest.class) + .add("branchId", branchId) + .add("endpointId", endpointId) + .add("projectId", projectId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseProjectRequest.java new file mode 100755 index 000000000..21c5cc5b5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseProjectRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetDatabaseProjectRequest { + /** */ + @JsonIgnore private String projectId; + + public GetDatabaseProjectRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDatabaseProjectRequest that = (GetDatabaseProjectRequest) o; + return Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(projectId); + } + + @Override + public String toString() { + return new ToStringer(GetDatabaseProjectRequest.class).add("projectId", projectId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesRequest.java new file mode 100755 index 000000000..15d795165 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesRequest.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListDatabaseBranchesRequest { + /** Upper bound for items returned. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * Pagination token to go to the next page of Database Branches. Requests first page if absent. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** */ + @JsonIgnore private String projectId; + + public ListDatabaseBranchesRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListDatabaseBranchesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListDatabaseBranchesRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDatabaseBranchesRequest that = (ListDatabaseBranchesRequest) o; + return Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken, projectId); + } + + @Override + public String toString() { + return new ToStringer(ListDatabaseBranchesRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("projectId", projectId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesResponse.java new file mode 100755 index 000000000..6ffedd7a8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseBranchesResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListDatabaseBranchesResponse { + /** List of branches. */ + @JsonProperty("database_branches") + private Collection databaseBranches; + + /** Pagination token to request the next page of instances. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListDatabaseBranchesResponse setDatabaseBranches( + Collection databaseBranches) { + this.databaseBranches = databaseBranches; + return this; + } + + public Collection getDatabaseBranches() { + return databaseBranches; + } + + public ListDatabaseBranchesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDatabaseBranchesResponse that = (ListDatabaseBranchesResponse) o; + return Objects.equals(databaseBranches, that.databaseBranches) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(databaseBranches, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDatabaseBranchesResponse.class) + .add("databaseBranches", databaseBranches) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsRequest.java new file mode 100755 index 000000000..7702ea0b2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsRequest.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListDatabaseEndpointsRequest { + /** */ + @JsonIgnore private String branchId; + + /** Upper bound for items returned. If specified must be at least 10. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * Pagination token to go to the next page of Database Endpoints. Requests first page if absent. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** */ + @JsonIgnore private String projectId; + + public ListDatabaseEndpointsRequest setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + + public ListDatabaseEndpointsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListDatabaseEndpointsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListDatabaseEndpointsRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDatabaseEndpointsRequest that = (ListDatabaseEndpointsRequest) o; + return Objects.equals(branchId, that.branchId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(branchId, pageSize, pageToken, projectId); + } + + @Override + public String toString() { + return new ToStringer(ListDatabaseEndpointsRequest.class) + .add("branchId", branchId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("projectId", projectId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsResponse.java new file mode 100755 index 000000000..331b1615a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseEndpointsResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListDatabaseEndpointsResponse { + /** List of endpoints. */ + @JsonProperty("database_endpoints") + private Collection databaseEndpoints; + + /** Pagination token to request the next page of instances. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListDatabaseEndpointsResponse setDatabaseEndpoints( + Collection databaseEndpoints) { + this.databaseEndpoints = databaseEndpoints; + return this; + } + + public Collection getDatabaseEndpoints() { + return databaseEndpoints; + } + + public ListDatabaseEndpointsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDatabaseEndpointsResponse that = (ListDatabaseEndpointsResponse) o; + return Objects.equals(databaseEndpoints, that.databaseEndpoints) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(databaseEndpoints, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDatabaseEndpointsResponse.class) + .add("databaseEndpoints", databaseEndpoints) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsRequest.java new file mode 100755 index 000000000..e29f7d6e4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsRequest.java @@ -0,0 +1,63 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListDatabaseProjectsRequest { + /** Upper bound for items returned. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * Pagination token to go to the next page of Database Projects. Requests first page if absent. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListDatabaseProjectsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListDatabaseProjectsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDatabaseProjectsRequest that = (ListDatabaseProjectsRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDatabaseProjectsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsResponse.java new file mode 100755 index 000000000..ab8825496 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseProjectsResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListDatabaseProjectsResponse { + /** List of projects. */ + @JsonProperty("database_projects") + private Collection databaseProjects; + + /** Pagination token to request the next page of instances. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListDatabaseProjectsResponse setDatabaseProjects( + Collection databaseProjects) { + this.databaseProjects = databaseProjects; + return this; + } + + public Collection getDatabaseProjects() { + return databaseProjects; + } + + public ListDatabaseProjectsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDatabaseProjectsResponse that = (ListDatabaseProjectsResponse) o; + return Objects.equals(databaseProjects, that.databaseProjects) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(databaseProjects, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDatabaseProjectsResponse.class) + .add("databaseProjects", databaseProjects) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java index f3a3befad..8b9f1fd19 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java @@ -13,6 +13,10 @@ */ @Generated public class NewPipelineSpec { + /** Budget policy of this pipeline. */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + /** * This field needs to be specified if the destination catalog is a managed postgres catalog. * @@ -31,6 +35,15 @@ public class NewPipelineSpec { @JsonProperty("storage_schema") private String storageSchema; + public NewPipelineSpec setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + public NewPipelineSpec setStorageCatalog(String storageCatalog) { this.storageCatalog = storageCatalog; return this; @@ -54,18 +67,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; NewPipelineSpec that = (NewPipelineSpec) o; - return Objects.equals(storageCatalog, that.storageCatalog) + return Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(storageCatalog, that.storageCatalog) && Objects.equals(storageSchema, that.storageSchema); } @Override public int hashCode() { - return Objects.hash(storageCatalog, storageSchema); + return Objects.hash(budgetPolicyId, storageCatalog, storageSchema); } @Override public String toString() { return new ToStringer(NewPipelineSpec.class) + .add("budgetPolicyId", budgetPolicyId) .add("storageCatalog", storageCatalog) .add("storageSchema", storageSchema) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RestartDatabaseEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RestartDatabaseEndpointRequest.java new file mode 100755 index 000000000..88ebd0a59 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RestartDatabaseEndpointRequest.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class RestartDatabaseEndpointRequest { + /** */ + @JsonIgnore private String branchId; + + /** */ + @JsonIgnore private String endpointId; + + /** */ + @JsonIgnore private String projectId; + + public RestartDatabaseEndpointRequest setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + + public RestartDatabaseEndpointRequest setEndpointId(String endpointId) { + this.endpointId = endpointId; + return this; + } + + public String getEndpointId() { + return endpointId; + } + + public RestartDatabaseEndpointRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestartDatabaseEndpointRequest that = (RestartDatabaseEndpointRequest) o; + return Objects.equals(branchId, that.branchId) + && Objects.equals(endpointId, that.endpointId) + && Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(branchId, endpointId, projectId); + } + + @Override + public String toString() { + return new ToStringer(RestartDatabaseEndpointRequest.class) + .add("branchId", branchId) + .add("endpointId", endpointId) + .add("projectId", projectId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java index 090724e72..adbd3bece 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java @@ -7,13 +7,17 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Next field marker: 14 */ +/** Next field marker: 18 */ @Generated public class SyncedDatabaseTable { /** Synced Table data synchronization status */ @JsonProperty("data_synchronization_status") private SyncedTableStatus dataSynchronizationStatus; + /** The branch_id of the database branch associated with the table. */ + @JsonProperty("database_branch_id") + private String databaseBranchId; + /** * Name of the target database instance. This is required when creating synced database tables in * standard catalogs. This is optional when creating synced database tables in registered @@ -24,6 +28,14 @@ public class SyncedDatabaseTable { @JsonProperty("database_instance_name") private String databaseInstanceName; + /** The project_id of the database project associated with the table. */ + @JsonProperty("database_project_id") + private String databaseProjectId; + + /** The branch_id of the database branch associated with the table. */ + @JsonProperty("effective_database_branch_id") + private String effectiveDatabaseBranchId; + /** * The name of the database instance that this table is registered to. This field is always * returned, and for tables inside database catalogs is inferred database instance associated with @@ -32,6 +44,10 @@ public class SyncedDatabaseTable { @JsonProperty("effective_database_instance_name") private String effectiveDatabaseInstanceName; + /** The project_id of the database project associated with the table. */ + @JsonProperty("effective_database_project_id") + private String effectiveDatabaseProjectId; + /** The name of the logical database that this table is registered to. */ @JsonProperty("effective_logical_database_name") private String effectiveLogicalDatabaseName; @@ -59,6 +75,10 @@ public class SyncedDatabaseTable { @JsonProperty("spec") private SyncedTableSpec spec; + /** Data serving REST API URL for this table */ + @JsonProperty("table_serving_url") + private String tableServingUrl; + /** * The provisioning state of the synced table entity in Unity Catalog. This is distinct from the * state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline @@ -77,6 +97,15 @@ public SyncedTableStatus getDataSynchronizationStatus() { return dataSynchronizationStatus; } + public SyncedDatabaseTable setDatabaseBranchId(String databaseBranchId) { + this.databaseBranchId = databaseBranchId; + return this; + } + + public String getDatabaseBranchId() { + return databaseBranchId; + } + public SyncedDatabaseTable setDatabaseInstanceName(String databaseInstanceName) { this.databaseInstanceName = databaseInstanceName; return this; @@ -86,6 +115,24 @@ public String getDatabaseInstanceName() { return databaseInstanceName; } + public SyncedDatabaseTable setDatabaseProjectId(String databaseProjectId) { + this.databaseProjectId = databaseProjectId; + return this; + } + + public String getDatabaseProjectId() { + return databaseProjectId; + } + + public SyncedDatabaseTable setEffectiveDatabaseBranchId(String effectiveDatabaseBranchId) { + this.effectiveDatabaseBranchId = effectiveDatabaseBranchId; + return this; + } + + public String getEffectiveDatabaseBranchId() { + return effectiveDatabaseBranchId; + } + public SyncedDatabaseTable setEffectiveDatabaseInstanceName( String effectiveDatabaseInstanceName) { this.effectiveDatabaseInstanceName = effectiveDatabaseInstanceName; @@ -96,6 +143,15 @@ public String getEffectiveDatabaseInstanceName() { return effectiveDatabaseInstanceName; } + public SyncedDatabaseTable setEffectiveDatabaseProjectId(String effectiveDatabaseProjectId) { + this.effectiveDatabaseProjectId = effectiveDatabaseProjectId; + return this; + } + + public String getEffectiveDatabaseProjectId() { + return effectiveDatabaseProjectId; + } + public SyncedDatabaseTable setEffectiveLogicalDatabaseName(String effectiveLogicalDatabaseName) { this.effectiveLogicalDatabaseName = effectiveLogicalDatabaseName; return this; @@ -132,6 +188,15 @@ public SyncedTableSpec getSpec() { return spec; } + public SyncedDatabaseTable setTableServingUrl(String tableServingUrl) { + this.tableServingUrl = tableServingUrl; + return this; + } + + public String getTableServingUrl() { + return tableServingUrl; + } + public SyncedDatabaseTable setUnityCatalogProvisioningState( ProvisioningInfoState unityCatalogProvisioningState) { this.unityCatalogProvisioningState = unityCatalogProvisioningState; @@ -148,12 +213,17 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; SyncedDatabaseTable that = (SyncedDatabaseTable) o; return Objects.equals(dataSynchronizationStatus, that.dataSynchronizationStatus) + && Objects.equals(databaseBranchId, that.databaseBranchId) && Objects.equals(databaseInstanceName, that.databaseInstanceName) + && Objects.equals(databaseProjectId, that.databaseProjectId) + && Objects.equals(effectiveDatabaseBranchId, that.effectiveDatabaseBranchId) && Objects.equals(effectiveDatabaseInstanceName, that.effectiveDatabaseInstanceName) + && Objects.equals(effectiveDatabaseProjectId, that.effectiveDatabaseProjectId) && Objects.equals(effectiveLogicalDatabaseName, that.effectiveLogicalDatabaseName) && Objects.equals(logicalDatabaseName, that.logicalDatabaseName) && Objects.equals(name, that.name) && Objects.equals(spec, that.spec) + && Objects.equals(tableServingUrl, that.tableServingUrl) && Objects.equals(unityCatalogProvisioningState, that.unityCatalogProvisioningState); } @@ -161,12 +231,17 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( dataSynchronizationStatus, + databaseBranchId, databaseInstanceName, + databaseProjectId, + effectiveDatabaseBranchId, effectiveDatabaseInstanceName, + effectiveDatabaseProjectId, effectiveLogicalDatabaseName, logicalDatabaseName, name, spec, + tableServingUrl, unityCatalogProvisioningState); } @@ -174,12 +249,17 @@ public int hashCode() { public String toString() { return new ToStringer(SyncedDatabaseTable.class) .add("dataSynchronizationStatus", dataSynchronizationStatus) + .add("databaseBranchId", databaseBranchId) .add("databaseInstanceName", databaseInstanceName) + .add("databaseProjectId", databaseProjectId) + .add("effectiveDatabaseBranchId", effectiveDatabaseBranchId) .add("effectiveDatabaseInstanceName", effectiveDatabaseInstanceName) + .add("effectiveDatabaseProjectId", effectiveDatabaseProjectId) .add("effectiveLogicalDatabaseName", effectiveLogicalDatabaseName) .add("logicalDatabaseName", logicalDatabaseName) .add("name", name) .add("spec", spec) + .add("tableServingUrl", tableServingUrl) .add("unityCatalogProvisioningState", unityCatalogProvisioningState) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseBranchRequest.java new file mode 100755 index 000000000..c907a5a08 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseBranchRequest.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateDatabaseBranchRequest { + /** */ + @JsonIgnore private String branchId; + + /** */ + @JsonProperty("database_branch") + private DatabaseBranch databaseBranch; + + /** */ + @JsonIgnore private String projectId; + + /** The list of fields to update. If unspecified, all fields will be updated when possible. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateDatabaseBranchRequest setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + + public UpdateDatabaseBranchRequest setDatabaseBranch(DatabaseBranch databaseBranch) { + this.databaseBranch = databaseBranch; + return this; + } + + public DatabaseBranch getDatabaseBranch() { + return databaseBranch; + } + + public UpdateDatabaseBranchRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + public UpdateDatabaseBranchRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDatabaseBranchRequest that = (UpdateDatabaseBranchRequest) o; + return Objects.equals(branchId, that.branchId) + && Objects.equals(databaseBranch, that.databaseBranch) + && Objects.equals(projectId, that.projectId) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(branchId, databaseBranch, projectId, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateDatabaseBranchRequest.class) + .add("branchId", branchId) + .add("databaseBranch", databaseBranch) + .add("projectId", projectId) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseEndpointRequest.java new file mode 100755 index 000000000..a679de9f0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseEndpointRequest.java @@ -0,0 +1,104 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateDatabaseEndpointRequest { + /** */ + @JsonIgnore private String branchId; + + /** */ + @JsonProperty("database_endpoint") + private DatabaseEndpoint databaseEndpoint; + + /** */ + @JsonIgnore private String endpointId; + + /** */ + @JsonIgnore private String projectId; + + /** The list of fields to update. If unspecified, all fields will be updated when possible. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateDatabaseEndpointRequest setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + + public UpdateDatabaseEndpointRequest setDatabaseEndpoint(DatabaseEndpoint databaseEndpoint) { + this.databaseEndpoint = databaseEndpoint; + return this; + } + + public DatabaseEndpoint getDatabaseEndpoint() { + return databaseEndpoint; + } + + public UpdateDatabaseEndpointRequest setEndpointId(String endpointId) { + this.endpointId = endpointId; + return this; + } + + public String getEndpointId() { + return endpointId; + } + + public UpdateDatabaseEndpointRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + public UpdateDatabaseEndpointRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDatabaseEndpointRequest that = (UpdateDatabaseEndpointRequest) o; + return Objects.equals(branchId, that.branchId) + && Objects.equals(databaseEndpoint, that.databaseEndpoint) + && Objects.equals(endpointId, that.endpointId) + && Objects.equals(projectId, that.projectId) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(branchId, databaseEndpoint, endpointId, projectId, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateDatabaseEndpointRequest.class) + .add("branchId", branchId) + .add("databaseEndpoint", databaseEndpoint) + .add("endpointId", endpointId) + .add("projectId", projectId) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRoleRequest.java new file mode 100755 index 000000000..bc2ad46c3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRoleRequest.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateDatabaseInstanceRoleRequest { + /** */ + @JsonIgnore + @QueryParam("database_instance_name") + private String databaseInstanceName; + + /** */ + @JsonProperty("database_instance_role") + private DatabaseInstanceRole databaseInstanceRole; + + /** */ + @JsonIgnore private String instanceName; + + /** The name of the role. This is the unique identifier for the role in an instance. */ + @JsonIgnore private String name; + + public UpdateDatabaseInstanceRoleRequest setDatabaseInstanceName(String databaseInstanceName) { + this.databaseInstanceName = databaseInstanceName; + return this; + } + + public String getDatabaseInstanceName() { + return databaseInstanceName; + } + + public UpdateDatabaseInstanceRoleRequest setDatabaseInstanceRole( + DatabaseInstanceRole databaseInstanceRole) { + this.databaseInstanceRole = databaseInstanceRole; + return this; + } + + public DatabaseInstanceRole getDatabaseInstanceRole() { + return databaseInstanceRole; + } + + public UpdateDatabaseInstanceRoleRequest setInstanceName(String instanceName) { + this.instanceName = instanceName; + return this; + } + + public String getInstanceName() { + return instanceName; + } + + public UpdateDatabaseInstanceRoleRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDatabaseInstanceRoleRequest that = (UpdateDatabaseInstanceRoleRequest) o; + return Objects.equals(databaseInstanceName, that.databaseInstanceName) + && Objects.equals(databaseInstanceRole, that.databaseInstanceRole) + && Objects.equals(instanceName, that.instanceName) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(databaseInstanceName, databaseInstanceRole, instanceName, name); + } + + @Override + public String toString() { + return new ToStringer(UpdateDatabaseInstanceRoleRequest.class) + .add("databaseInstanceName", databaseInstanceName) + .add("databaseInstanceRole", databaseInstanceRole) + .add("instanceName", instanceName) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseProjectRequest.java new file mode 100755 index 000000000..60b4844da --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseProjectRequest.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateDatabaseProjectRequest { + /** */ + @JsonProperty("database_project") + private DatabaseProject databaseProject; + + /** */ + @JsonIgnore private String projectId; + + /** The list of fields to update. If unspecified, all fields will be updated when possible. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateDatabaseProjectRequest setDatabaseProject(DatabaseProject databaseProject) { + this.databaseProject = databaseProject; + return this; + } + + public DatabaseProject getDatabaseProject() { + return databaseProject; + } + + public UpdateDatabaseProjectRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + public UpdateDatabaseProjectRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDatabaseProjectRequest that = (UpdateDatabaseProjectRequest) o; + return Objects.equals(databaseProject, that.databaseProject) + && Objects.equals(projectId, that.projectId) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(databaseProject, projectId, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateDatabaseProjectRequest.class) + .add("databaseProject", databaseProject) + .add("projectId", projectId) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AggregationGranularity.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AggregationGranularity.java new file mode 100755 index 000000000..1bff8d647 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AggregationGranularity.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; + +/** The granularity for aggregating data into time windows based on their timestamp. */ +@Generated +public enum AggregationGranularity { + AGGREGATION_GRANULARITY_1_DAY, + AGGREGATION_GRANULARITY_1_HOUR, + AGGREGATION_GRANULARITY_1_MONTH, + AGGREGATION_GRANULARITY_1_WEEK, + AGGREGATION_GRANULARITY_1_YEAR, + AGGREGATION_GRANULARITY_2_WEEKS, + AGGREGATION_GRANULARITY_30_MINUTES, + AGGREGATION_GRANULARITY_3_WEEKS, + AGGREGATION_GRANULARITY_4_WEEKS, + AGGREGATION_GRANULARITY_5_MINUTES, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java new file mode 100755 index 000000000..6b99c71fa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java @@ -0,0 +1,79 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Anomaly Detection Configurations. */ +@Generated +public class AnomalyDetectionConfig { + /** + * The id of the workflow that detects the anomaly. This field will only be returned in the + * Get/Update response, if the request comes from the workspace where this anomaly detection job + * is created. + */ + @JsonProperty("anomaly_detection_workflow_id") + private Long anomalyDetectionWorkflowId; + + /** The type of the last run of the workflow. */ + @JsonProperty("job_type") + private AnomalyDetectionJobType jobType; + + /** If the health indicator should be shown. */ + @JsonProperty("publish_health_indicator") + private Boolean publishHealthIndicator; + + public AnomalyDetectionConfig setAnomalyDetectionWorkflowId(Long anomalyDetectionWorkflowId) { + this.anomalyDetectionWorkflowId = anomalyDetectionWorkflowId; + return this; + } + + public Long getAnomalyDetectionWorkflowId() { + return anomalyDetectionWorkflowId; + } + + public AnomalyDetectionConfig setJobType(AnomalyDetectionJobType jobType) { + this.jobType = jobType; + return this; + } + + public AnomalyDetectionJobType getJobType() { + return jobType; + } + + public AnomalyDetectionConfig setPublishHealthIndicator(Boolean publishHealthIndicator) { + this.publishHealthIndicator = publishHealthIndicator; + return this; + } + + public Boolean getPublishHealthIndicator() { + return publishHealthIndicator; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnomalyDetectionConfig that = (AnomalyDetectionConfig) o; + return Objects.equals(anomalyDetectionWorkflowId, that.anomalyDetectionWorkflowId) + && Objects.equals(jobType, that.jobType) + && Objects.equals(publishHealthIndicator, that.publishHealthIndicator); + } + + @Override + public int hashCode() { + return Objects.hash(anomalyDetectionWorkflowId, jobType, publishHealthIndicator); + } + + @Override + public String toString() { + return new ToStringer(AnomalyDetectionConfig.class) + .add("anomalyDetectionWorkflowId", anomalyDetectionWorkflowId) + .add("jobType", jobType) + .add("publishHealthIndicator", publishHealthIndicator) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionJobType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionJobType.java new file mode 100755 index 000000000..8176bb473 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionJobType.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; + +/** Anomaly Detection job type. */ +@Generated +public enum AnomalyDetectionJobType { + ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN, + ANOMALY_DETECTION_JOB_TYPE_NORMAL, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java new file mode 100755 index 000000000..7d24f64ad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Request to cancel a refresh. */ +@Generated +public class CancelRefreshRequest { + /** The UUID of the request object. For example, schema id. */ + @JsonIgnore private String objectId; + + /** The type of the monitored object. Can be one of the following: `schema` or `table`. */ + @JsonIgnore private String objectType; + + /** Unique id of the refresh operation. */ + @JsonIgnore private Long refreshId; + + public CancelRefreshRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public CancelRefreshRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + public CancelRefreshRequest setRefreshId(Long refreshId) { + this.refreshId = refreshId; + return this; + } + + public Long getRefreshId() { + return refreshId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CancelRefreshRequest that = (CancelRefreshRequest) o; + return Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType) + && Objects.equals(refreshId, that.refreshId); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType, refreshId); + } + + @Override + public String toString() { + return new ToStringer(CancelRefreshRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .add("refreshId", refreshId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshResponse.java new file mode 100755 index 000000000..3f81c4845 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshResponse.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Response to cancelling a refresh. */ +@Generated +public class CancelRefreshResponse { + /** The refresh to cancel. */ + @JsonProperty("refresh") + private Refresh refresh; + + public CancelRefreshResponse setRefresh(Refresh refresh) { + this.refresh = refresh; + return this; + } + + public Refresh getRefresh() { + return refresh; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CancelRefreshResponse that = (CancelRefreshResponse) o; + return Objects.equals(refresh, that.refresh); + } + + @Override + public int hashCode() { + return Objects.hash(refresh); + } + + @Override + public String toString() { + return new ToStringer(CancelRefreshResponse.class).add("refresh", refresh).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateMonitorRequest.java new file mode 100755 index 000000000..b98cb431c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateMonitorRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateMonitorRequest { + /** The monitor to create. */ + @JsonProperty("monitor") + private Monitor monitor; + + public CreateMonitorRequest setMonitor(Monitor monitor) { + this.monitor = monitor; + return this; + } + + public Monitor getMonitor() { + return monitor; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateMonitorRequest that = (CreateMonitorRequest) o; + return Objects.equals(monitor, that.monitor); + } + + @Override + public int hashCode() { + return Objects.hash(monitor); + } + + @Override + public String toString() { + return new ToStringer(CreateMonitorRequest.class).add("monitor", monitor).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateRefreshRequest.java new file mode 100755 index 000000000..25ba06bc9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateRefreshRequest.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateRefreshRequest { + /** The UUID of the request object. For example, table id. */ + @JsonIgnore private String objectId; + + /** The type of the monitored object. Can be one of the following: `schema`or `table`. */ + @JsonIgnore private String objectType; + + /** The refresh to create */ + @JsonProperty("refresh") + private Refresh refresh; + + public CreateRefreshRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public CreateRefreshRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + public CreateRefreshRequest setRefresh(Refresh refresh) { + this.refresh = refresh; + return this; + } + + public Refresh getRefresh() { + return refresh; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateRefreshRequest that = (CreateRefreshRequest) o; + return Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType) + && Objects.equals(refresh, that.refresh); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType, refresh); + } + + @Override + public String toString() { + return new ToStringer(CreateRefreshRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .add("refresh", refresh) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedule.java new file mode 100755 index 000000000..72f07e684 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedule.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The data quality monitoring workflow cron schedule. */ +@Generated +public class CronSchedule { + /** Read only field that indicates whether the schedule is paused or not. */ + @JsonProperty("pause_status") + private CronSchedulePauseStatus pauseStatus; + + /** + * The expression that determines when to run the monitor. See [examples]. + * + *

[examples]: + * https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html + */ + @JsonProperty("quartz_cron_expression") + private String quartzCronExpression; + + /** + * A Java timezone id. The schedule for a job will be resolved with respect to this timezone. See + * `Java TimeZone `_ for + * details. The timezone id (e.g., ``America/Los_Angeles``) in which to evaluate the quartz + * expression. + */ + @JsonProperty("timezone_id") + private String timezoneId; + + public CronSchedule setPauseStatus(CronSchedulePauseStatus pauseStatus) { + this.pauseStatus = pauseStatus; + return this; + } + + public CronSchedulePauseStatus getPauseStatus() { + return pauseStatus; + } + + public CronSchedule setQuartzCronExpression(String quartzCronExpression) { + this.quartzCronExpression = quartzCronExpression; + return this; + } + + public String getQuartzCronExpression() { + return quartzCronExpression; + } + + public CronSchedule setTimezoneId(String timezoneId) { + this.timezoneId = timezoneId; + return this; + } + + public String getTimezoneId() { + return timezoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CronSchedule that = (CronSchedule) o; + return Objects.equals(pauseStatus, that.pauseStatus) + && Objects.equals(quartzCronExpression, that.quartzCronExpression) + && Objects.equals(timezoneId, that.timezoneId); + } + + @Override + public int hashCode() { + return Objects.hash(pauseStatus, quartzCronExpression, timezoneId); + } + + @Override + public String toString() { + return new ToStringer(CronSchedule.class) + .add("pauseStatus", pauseStatus) + .add("quartzCronExpression", quartzCronExpression) + .add("timezoneId", timezoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedulePauseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedulePauseStatus.java new file mode 100755 index 000000000..55d5db475 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CronSchedulePauseStatus.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; + +/** The data quality monitoring workflow cron schedule pause status. */ +@Generated +public enum CronSchedulePauseStatus { + CRON_SCHEDULE_PAUSE_STATUS_PAUSED, + CRON_SCHEDULE_PAUSE_STATUS_UNPAUSED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingConfig.java new file mode 100755 index 000000000..24c38e762 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingConfig.java @@ -0,0 +1,373 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Data Profiling Configurations. */ +@Generated +public class DataProfilingConfig { + /** + * Field for specifying the absolute path to a custom directory to store data-monitoring assets. + * Normally prepopulated to a default user location via UI and Python APIs. + */ + @JsonProperty("assets_dir") + private String assetsDir; + + /** + * Baseline table name. Baseline data is used to compute drift from the data in the monitored + * `table_name`. The baseline table and the monitored table shall have the same schema. + */ + @JsonProperty("baseline_table_name") + private String baselineTableName; + + /** Custom metrics. */ + @JsonProperty("custom_metrics") + private Collection customMetrics; + + /** + * Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in + * PENDING state. + */ + @JsonProperty("dashboard_id") + private String dashboardId; + + /** Table that stores drift metrics data. Format: `catalog.schema.table_name`. */ + @JsonProperty("drift_metrics_table_name") + private String driftMetricsTableName; + + /** The warehouse for dashboard creation */ + @JsonProperty("effective_warehouse_id") + private String effectiveWarehouseId; + + /** Configuration for monitoring inference log tables. */ + @JsonProperty("inference_log") + private InferenceLogConfig inferenceLog; + + /** The latest error message for a monitor failure. */ + @JsonProperty("latest_monitor_failure_message") + private String latestMonitorFailureMessage; + + /** + * Represents the current monitor configuration version in use. The version will be represented in + * a numeric fashion (1,2,3...). The field has flexibility to take on negative values, which can + * indicate corrupted monitor_version numbers. + */ + @JsonProperty("monitor_version") + private Long monitorVersion; + + /** Unity Catalog table to monitor. Format: `catalog.schema.table_name` */ + @JsonProperty("monitored_table_name") + private String monitoredTableName; + + /** Field for specifying notification settings. */ + @JsonProperty("notification_settings") + private NotificationSettings notificationSettings; + + /** ID of the schema where output tables are created. */ + @JsonProperty("output_schema_id") + private String outputSchemaId; + + /** Table that stores profile metrics data. Format: `catalog.schema.table_name`. */ + @JsonProperty("profile_metrics_table_name") + private String profileMetricsTableName; + + /** The cron schedule. */ + @JsonProperty("schedule") + private CronSchedule schedule; + + /** Whether to skip creating a default dashboard summarizing data quality metrics. */ + @JsonProperty("skip_builtin_dashboard") + private Boolean skipBuiltinDashboard; + + /** + * List of column expressions to slice data with for targeted analysis. The data is grouped by + * each expression independently, resulting in a separate slice for each predicate and its + * complements. For example `slicing_exprs=[“col_1”, “col_2 > 10”]` will generate the following + * slices: two slices for `col_2 > 10` (True and False), and one slice per unique value in `col1`. + * For high-cardinality columns, only the top 100 unique values by frequency will generate slices. + */ + @JsonProperty("slicing_exprs") + private Collection slicingExprs; + + /** Configuration for monitoring snapshot tables. */ + @JsonProperty("snapshot") + private SnapshotConfig snapshot; + + /** The data profiling monitor status. */ + @JsonProperty("status") + private DataProfilingStatus status; + + /** Configuration for monitoring time series tables. */ + @JsonProperty("time_series") + private TimeSeriesConfig timeSeries; + + /** + * Optional argument to specify the warehouse for dashboard creation. If not specified, the first + * running warehouse will be used. + */ + @JsonProperty("warehouse_id") + private String warehouseId; + + public DataProfilingConfig setAssetsDir(String assetsDir) { + this.assetsDir = assetsDir; + return this; + } + + public String getAssetsDir() { + return assetsDir; + } + + public DataProfilingConfig setBaselineTableName(String baselineTableName) { + this.baselineTableName = baselineTableName; + return this; + } + + public String getBaselineTableName() { + return baselineTableName; + } + + public DataProfilingConfig setCustomMetrics(Collection customMetrics) { + this.customMetrics = customMetrics; + return this; + } + + public Collection getCustomMetrics() { + return customMetrics; + } + + public DataProfilingConfig setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public DataProfilingConfig setDriftMetricsTableName(String driftMetricsTableName) { + this.driftMetricsTableName = driftMetricsTableName; + return this; + } + + public String getDriftMetricsTableName() { + return driftMetricsTableName; + } + + public DataProfilingConfig setEffectiveWarehouseId(String effectiveWarehouseId) { + this.effectiveWarehouseId = effectiveWarehouseId; + return this; + } + + public String getEffectiveWarehouseId() { + return effectiveWarehouseId; + } + + public DataProfilingConfig setInferenceLog(InferenceLogConfig inferenceLog) { + this.inferenceLog = inferenceLog; + return this; + } + + public InferenceLogConfig getInferenceLog() { + return inferenceLog; + } + + public DataProfilingConfig setLatestMonitorFailureMessage(String latestMonitorFailureMessage) { + this.latestMonitorFailureMessage = latestMonitorFailureMessage; + return this; + } + + public String getLatestMonitorFailureMessage() { + return latestMonitorFailureMessage; + } + + public DataProfilingConfig setMonitorVersion(Long monitorVersion) { + this.monitorVersion = monitorVersion; + return this; + } + + public Long getMonitorVersion() { + return monitorVersion; + } + + public DataProfilingConfig setMonitoredTableName(String monitoredTableName) { + this.monitoredTableName = monitoredTableName; + return this; + } + + public String getMonitoredTableName() { + return monitoredTableName; + } + + public DataProfilingConfig setNotificationSettings(NotificationSettings notificationSettings) { + this.notificationSettings = notificationSettings; + return this; + } + + public NotificationSettings getNotificationSettings() { + return notificationSettings; + } + + public DataProfilingConfig setOutputSchemaId(String outputSchemaId) { + this.outputSchemaId = outputSchemaId; + return this; + } + + public String getOutputSchemaId() { + return outputSchemaId; + } + + public DataProfilingConfig setProfileMetricsTableName(String profileMetricsTableName) { + this.profileMetricsTableName = profileMetricsTableName; + return this; + } + + public String getProfileMetricsTableName() { + return profileMetricsTableName; + } + + public DataProfilingConfig setSchedule(CronSchedule schedule) { + this.schedule = schedule; + return this; + } + + public CronSchedule getSchedule() { + return schedule; + } + + public DataProfilingConfig setSkipBuiltinDashboard(Boolean skipBuiltinDashboard) { + this.skipBuiltinDashboard = skipBuiltinDashboard; + return this; + } + + public Boolean getSkipBuiltinDashboard() { + return skipBuiltinDashboard; + } + + public DataProfilingConfig setSlicingExprs(Collection slicingExprs) { + this.slicingExprs = slicingExprs; + return this; + } + + public Collection getSlicingExprs() { + return slicingExprs; + } + + public DataProfilingConfig setSnapshot(SnapshotConfig snapshot) { + this.snapshot = snapshot; + return this; + } + + public SnapshotConfig getSnapshot() { + return snapshot; + } + + public DataProfilingConfig setStatus(DataProfilingStatus status) { + this.status = status; + return this; + } + + public DataProfilingStatus getStatus() { + return status; + } + + public DataProfilingConfig setTimeSeries(TimeSeriesConfig timeSeries) { + this.timeSeries = timeSeries; + return this; + } + + public TimeSeriesConfig getTimeSeries() { + return timeSeries; + } + + public DataProfilingConfig setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataProfilingConfig that = (DataProfilingConfig) o; + return Objects.equals(assetsDir, that.assetsDir) + && Objects.equals(baselineTableName, that.baselineTableName) + && Objects.equals(customMetrics, that.customMetrics) + && Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(driftMetricsTableName, that.driftMetricsTableName) + && Objects.equals(effectiveWarehouseId, that.effectiveWarehouseId) + && Objects.equals(inferenceLog, that.inferenceLog) + && Objects.equals(latestMonitorFailureMessage, that.latestMonitorFailureMessage) + && Objects.equals(monitorVersion, that.monitorVersion) + && Objects.equals(monitoredTableName, that.monitoredTableName) + && Objects.equals(notificationSettings, that.notificationSettings) + && Objects.equals(outputSchemaId, that.outputSchemaId) + && Objects.equals(profileMetricsTableName, that.profileMetricsTableName) + && Objects.equals(schedule, that.schedule) + && Objects.equals(skipBuiltinDashboard, that.skipBuiltinDashboard) + && Objects.equals(slicingExprs, that.slicingExprs) + && Objects.equals(snapshot, that.snapshot) + && Objects.equals(status, that.status) + && Objects.equals(timeSeries, that.timeSeries) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + assetsDir, + baselineTableName, + customMetrics, + dashboardId, + driftMetricsTableName, + effectiveWarehouseId, + inferenceLog, + latestMonitorFailureMessage, + monitorVersion, + monitoredTableName, + notificationSettings, + outputSchemaId, + profileMetricsTableName, + schedule, + skipBuiltinDashboard, + slicingExprs, + snapshot, + status, + timeSeries, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(DataProfilingConfig.class) + .add("assetsDir", assetsDir) + .add("baselineTableName", baselineTableName) + .add("customMetrics", customMetrics) + .add("dashboardId", dashboardId) + .add("driftMetricsTableName", driftMetricsTableName) + .add("effectiveWarehouseId", effectiveWarehouseId) + .add("inferenceLog", inferenceLog) + .add("latestMonitorFailureMessage", latestMonitorFailureMessage) + .add("monitorVersion", monitorVersion) + .add("monitoredTableName", monitoredTableName) + .add("notificationSettings", notificationSettings) + .add("outputSchemaId", outputSchemaId) + .add("profileMetricsTableName", profileMetricsTableName) + .add("schedule", schedule) + .add("skipBuiltinDashboard", skipBuiltinDashboard) + .add("slicingExprs", slicingExprs) + .add("snapshot", snapshot) + .add("status", status) + .add("timeSeries", timeSeries) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetric.java new file mode 100755 index 000000000..0fd468a79 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetric.java @@ -0,0 +1,115 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Custom metric definition. */ +@Generated +public class DataProfilingCustomMetric { + /** + * Jinja template for a SQL expression that specifies how to compute the metric. See [create + * metric definition]. + * + *

[create metric definition]: + * https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition + */ + @JsonProperty("definition") + private String definition; + + /** + * A list of column names in the input table the metric should be computed for. Can use + * ``":table"`` to indicate that the metric needs information from multiple columns. + */ + @JsonProperty("input_columns") + private Collection inputColumns; + + /** Name of the metric in the output tables. */ + @JsonProperty("name") + private String name; + + /** The output type of the custom metric. */ + @JsonProperty("output_data_type") + private String outputDataType; + + /** The type of the custom metric. */ + @JsonProperty("type") + private DataProfilingCustomMetricType typeValue; + + public DataProfilingCustomMetric setDefinition(String definition) { + this.definition = definition; + return this; + } + + public String getDefinition() { + return definition; + } + + public DataProfilingCustomMetric setInputColumns(Collection inputColumns) { + this.inputColumns = inputColumns; + return this; + } + + public Collection getInputColumns() { + return inputColumns; + } + + public DataProfilingCustomMetric setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DataProfilingCustomMetric setOutputDataType(String outputDataType) { + this.outputDataType = outputDataType; + return this; + } + + public String getOutputDataType() { + return outputDataType; + } + + public DataProfilingCustomMetric setType(DataProfilingCustomMetricType typeValue) { + this.typeValue = typeValue; + return this; + } + + public DataProfilingCustomMetricType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataProfilingCustomMetric that = (DataProfilingCustomMetric) o; + return Objects.equals(definition, that.definition) + && Objects.equals(inputColumns, that.inputColumns) + && Objects.equals(name, that.name) + && Objects.equals(outputDataType, that.outputDataType) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(definition, inputColumns, name, outputDataType, typeValue); + } + + @Override + public String toString() { + return new ToStringer(DataProfilingCustomMetric.class) + .add("definition", definition) + .add("inputColumns", inputColumns) + .add("name", name) + .add("outputDataType", outputDataType) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetricType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetricType.java new file mode 100755 index 000000000..63122073c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingCustomMetricType.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; + +/** The custom metric type. */ +@Generated +public enum DataProfilingCustomMetricType { + DATA_PROFILING_CUSTOM_METRIC_TYPE_AGGREGATE, + DATA_PROFILING_CUSTOM_METRIC_TYPE_DERIVED, + DATA_PROFILING_CUSTOM_METRIC_TYPE_DRIFT, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingStatus.java new file mode 100755 index 000000000..d49a4406d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingStatus.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; + +/** The status of the data profiling monitor. */ +@Generated +public enum DataProfilingStatus { + DATA_PROFILING_STATUS_ACTIVE, + DATA_PROFILING_STATUS_DELETE_PENDING, + DATA_PROFILING_STATUS_ERROR, + DATA_PROFILING_STATUS_FAILED, + DATA_PROFILING_STATUS_PENDING, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java new file mode 100755 index 000000000..0226e6c69 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java @@ -0,0 +1,197 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Manage the data quality of Unity Catalog objects (currently support `schema` and `table`) */ +@Generated +public class DataQualityAPI { + private static final Logger LOG = LoggerFactory.getLogger(DataQualityAPI.class); + + private final DataQualityService impl; + + /** Regular-use constructor */ + public DataQualityAPI(ApiClient apiClient) { + impl = new DataQualityImpl(apiClient); + } + + /** Constructor for mocks */ + public DataQualityAPI(DataQualityService mock) { + impl = mock; + } + + /** + * Cancels a data quality monitor refresh. Currently only supported for the `table` `object_type`. + */ + public CancelRefreshResponse cancelRefresh(CancelRefreshRequest request) { + return impl.cancelRefresh(request); + } + + /** + * Create a data quality monitor on a Unity Catalog object. The caller must provide either + * `anomaly_detection_config` for a schema monitor or `data_profiling_config` for a table monitor. + * + *

For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent + * catalog, have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the + * table 2. have **USE_CATALOG** on the table's parent catalog, be an owner of the table's parent + * schema, and have **SELECT** access on the table. 3. have the following permissions: - + * **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - + * be an owner of the table. + * + *

Workspace assets, such as the dashboard, will be created in the workspace where this call + * was made. + */ + public Monitor createMonitor(CreateMonitorRequest request) { + return impl.createMonitor(request); + } + + /** + * Creates a refresh. Currently only supported for the `table` `object_type`. + * + *

The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** + * on the table's parent catalog and be an owner of the table's parent schema 3. have the + * following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the + * table's parent schema - be an owner of the table + */ + public Refresh createRefresh(CreateRefreshRequest request) { + return impl.createRefresh(request); + } + + public void deleteMonitor(String objectType, String objectId) { + deleteMonitor(new DeleteMonitorRequest().setObjectType(objectType).setObjectId(objectId)); + } + + /** + * Delete a data quality monitor on Unity Catalog object. + * + *

For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent + * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's + * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent + * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. + * + *

Note that the metric tables and dashboard will not be deleted as part of this call; those + * assets must be manually cleaned up (if desired). + */ + public void deleteMonitor(DeleteMonitorRequest request) { + impl.deleteMonitor(request); + } + + public void deleteRefresh(String objectType, String objectId, long refreshId) { + deleteRefresh( + new DeleteRefreshRequest() + .setObjectType(objectType) + .setObjectId(objectId) + .setRefreshId(refreshId)); + } + + /** (Unimplemented) Delete a refresh */ + public void deleteRefresh(DeleteRefreshRequest request) { + impl.deleteRefresh(request); + } + + public Monitor getMonitor(String objectType, String objectId) { + return getMonitor(new GetMonitorRequest().setObjectType(objectType).setObjectId(objectId)); + } + + /** + * Read a data quality monitor on Unity Catalog object. + * + *

For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent + * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's + * parent schema. 3. have the following permissions: - **USE_CATALOG** on the table's parent + * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. + * + *

The returned information includes configuration values, as well as information on assets + * created by the monitor. Some information (e.g., dashboard) may be filtered out if the caller is + * in a different workspace than where the monitor was created. + */ + public Monitor getMonitor(GetMonitorRequest request) { + return impl.getMonitor(request); + } + + public Refresh getRefresh(String objectType, String objectId, long refreshId) { + return getRefresh( + new GetRefreshRequest() + .setObjectType(objectType) + .setObjectId(objectId) + .setRefreshId(refreshId)); + } + + /** + * Get data quality monitor refresh. + * + *

For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent + * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's + * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent + * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. + */ + public Refresh getRefresh(GetRefreshRequest request) { + return impl.getRefresh(request); + } + + /** (Unimplemented) List data quality monitors. */ + public Iterable listMonitor(ListMonitorRequest request) { + return new Paginator<>( + request, + impl::listMonitor, + ListMonitorResponse::getMonitors, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public Iterable listRefresh(String objectType, String objectId) { + return listRefresh(new ListRefreshRequest().setObjectType(objectType).setObjectId(objectId)); + } + + /** + * List data quality monitor refreshes. + * + *

For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent + * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's + * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent + * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. + */ + public Iterable listRefresh(ListRefreshRequest request) { + return new Paginator<>( + request, + impl::listRefresh, + ListRefreshResponse::getRefreshes, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + /** + * Update a data quality monitor on Unity Catalog object. + * + *

For the `table` `object_type`, The caller must either: 1. be an owner of the table's parent + * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's + * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent + * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. + */ + public Monitor updateMonitor(UpdateMonitorRequest request) { + return impl.updateMonitor(request); + } + + /** (Unimplemented) Update a refresh */ + public Refresh updateRefresh(UpdateRefreshRequest request) { + return impl.updateRefresh(request); + } + + public DataQualityService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityImpl.java new file mode 100755 index 000000000..7411d79f5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityImpl.java @@ -0,0 +1,190 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of DataQuality */ +@Generated +class DataQualityImpl implements DataQualityService { + private final ApiClient apiClient; + + public DataQualityImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public CancelRefreshResponse cancelRefresh(CancelRefreshRequest request) { + String path = + String.format( + "/api/data-quality/v1/monitors/%s/%s/refreshes/%s/cancel", + request.getObjectType(), request.getObjectId(), request.getRefreshId()); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CancelRefreshResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Monitor createMonitor(CreateMonitorRequest request) { + String path = "/api/data-quality/v1/monitors"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getMonitor())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Monitor.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Refresh createRefresh(CreateRefreshRequest request) { + String path = + String.format( + "/api/data-quality/v1/monitors/%s/%s/refreshes", + request.getObjectType(), request.getObjectId()); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getRefresh())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Refresh.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteMonitor(DeleteMonitorRequest request) { + String path = + String.format( + "/api/data-quality/v1/monitors/%s/%s", request.getObjectType(), request.getObjectId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteRefresh(DeleteRefreshRequest request) { + String path = + String.format( + "/api/data-quality/v1/monitors/%s/%s/refreshes/%s", + request.getObjectType(), request.getObjectId(), request.getRefreshId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Monitor getMonitor(GetMonitorRequest request) { + String path = + String.format( + "/api/data-quality/v1/monitors/%s/%s", request.getObjectType(), request.getObjectId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Monitor.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Refresh getRefresh(GetRefreshRequest request) { + String path = + String.format( + "/api/data-quality/v1/monitors/%s/%s/refreshes/%s", + request.getObjectType(), request.getObjectId(), request.getRefreshId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Refresh.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListMonitorResponse listMonitor(ListMonitorRequest request) { + String path = "/api/data-quality/v1/monitors"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListMonitorResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListRefreshResponse listRefresh(ListRefreshRequest request) { + String path = + String.format( + "/api/data-quality/v1/monitors/%s/%s/refreshes", + request.getObjectType(), request.getObjectId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListRefreshResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Monitor updateMonitor(UpdateMonitorRequest request) { + String path = + String.format( + "/api/data-quality/v1/monitors/%s/%s", request.getObjectType(), request.getObjectId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getMonitor())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Monitor.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Refresh updateRefresh(UpdateRefreshRequest request) { + String path = + String.format( + "/api/data-quality/v1/monitors/%s/%s/refreshes/%s", + request.getObjectType(), request.getObjectId(), request.getRefreshId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getRefresh())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Refresh.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityService.java new file mode 100755 index 000000000..1e5487768 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityService.java @@ -0,0 +1,111 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; + +/** + * Manage the data quality of Unity Catalog objects (currently support `schema` and `table`) + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface DataQualityService { + /** + * Cancels a data quality monitor refresh. Currently only supported for the `table` `object_type`. + */ + CancelRefreshResponse cancelRefresh(CancelRefreshRequest cancelRefreshRequest); + + /** + * Create a data quality monitor on a Unity Catalog object. The caller must provide either + * `anomaly_detection_config` for a schema monitor or `data_profiling_config` for a table monitor. + * + *

For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent + * catalog, have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the + * table 2. have **USE_CATALOG** on the table's parent catalog, be an owner of the table's parent + * schema, and have **SELECT** access on the table. 3. have the following permissions: - + * **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - + * be an owner of the table. + * + *

Workspace assets, such as the dashboard, will be created in the workspace where this call + * was made. + */ + Monitor createMonitor(CreateMonitorRequest createMonitorRequest); + + /** + * Creates a refresh. Currently only supported for the `table` `object_type`. + * + *

The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** + * on the table's parent catalog and be an owner of the table's parent schema 3. have the + * following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the + * table's parent schema - be an owner of the table + */ + Refresh createRefresh(CreateRefreshRequest createRefreshRequest); + + /** + * Delete a data quality monitor on Unity Catalog object. + * + *

For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent + * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's + * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent + * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. + * + *

Note that the metric tables and dashboard will not be deleted as part of this call; those + * assets must be manually cleaned up (if desired). + */ + void deleteMonitor(DeleteMonitorRequest deleteMonitorRequest); + + /** (Unimplemented) Delete a refresh */ + void deleteRefresh(DeleteRefreshRequest deleteRefreshRequest); + + /** + * Read a data quality monitor on Unity Catalog object. + * + *

For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent + * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's + * parent schema. 3. have the following permissions: - **USE_CATALOG** on the table's parent + * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. + * + *

The returned information includes configuration values, as well as information on assets + * created by the monitor. Some information (e.g., dashboard) may be filtered out if the caller is + * in a different workspace than where the monitor was created. + */ + Monitor getMonitor(GetMonitorRequest getMonitorRequest); + + /** + * Get data quality monitor refresh. + * + *

For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent + * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's + * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent + * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. + */ + Refresh getRefresh(GetRefreshRequest getRefreshRequest); + + /** (Unimplemented) List data quality monitors. */ + ListMonitorResponse listMonitor(ListMonitorRequest listMonitorRequest); + + /** + * List data quality monitor refreshes. + * + *

For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent + * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's + * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent + * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. + */ + ListRefreshResponse listRefresh(ListRefreshRequest listRefreshRequest); + + /** + * Update a data quality monitor on Unity Catalog object. + * + *

For the `table` `object_type`, The caller must either: 1. be an owner of the table's parent + * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's + * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent + * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. + */ + Monitor updateMonitor(UpdateMonitorRequest updateMonitorRequest); + + /** (Unimplemented) Update a refresh */ + Refresh updateRefresh(UpdateRefreshRequest updateRefreshRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteMonitorRequest.java new file mode 100755 index 000000000..0479ce355 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteMonitorRequest.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteMonitorRequest { + /** The UUID of the request object. For example, schema id. */ + @JsonIgnore private String objectId; + + /** The type of the monitored object. Can be one of the following: `schema` or `table`. */ + @JsonIgnore private String objectType; + + public DeleteMonitorRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public DeleteMonitorRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteMonitorRequest that = (DeleteMonitorRequest) o; + return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(DeleteMonitorRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteRefreshRequest.java new file mode 100755 index 000000000..6ec839ce9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteRefreshRequest.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteRefreshRequest { + /** The UUID of the request object. For example, schema id. */ + @JsonIgnore private String objectId; + + /** The type of the monitored object. Can be one of the following: `schema` or `table`. */ + @JsonIgnore private String objectType; + + /** Unique id of the refresh operation. */ + @JsonIgnore private Long refreshId; + + public DeleteRefreshRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public DeleteRefreshRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + public DeleteRefreshRequest setRefreshId(Long refreshId) { + this.refreshId = refreshId; + return this; + } + + public Long getRefreshId() { + return refreshId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRefreshRequest that = (DeleteRefreshRequest) o; + return Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType) + && Objects.equals(refreshId, that.refreshId); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType, refreshId); + } + + @Override + public String toString() { + return new ToStringer(DeleteRefreshRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .add("refreshId", refreshId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetMonitorRequest.java new file mode 100755 index 000000000..cdb1e5136 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetMonitorRequest.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetMonitorRequest { + /** The UUID of the request object. For example, schema id. */ + @JsonIgnore private String objectId; + + /** The type of the monitored object. Can be one of the following: `schema` or `table`. */ + @JsonIgnore private String objectType; + + public GetMonitorRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public GetMonitorRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetMonitorRequest that = (GetMonitorRequest) o; + return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(GetMonitorRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetRefreshRequest.java new file mode 100755 index 000000000..9280dce0f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetRefreshRequest.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetRefreshRequest { + /** The UUID of the request object. For example, schema id. */ + @JsonIgnore private String objectId; + + /** The type of the monitored object. Can be one of the following: `schema` or `table`. */ + @JsonIgnore private String objectType; + + /** Unique id of the refresh operation. */ + @JsonIgnore private Long refreshId; + + public GetRefreshRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public GetRefreshRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + public GetRefreshRequest setRefreshId(Long refreshId) { + this.refreshId = refreshId; + return this; + } + + public Long getRefreshId() { + return refreshId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRefreshRequest that = (GetRefreshRequest) o; + return Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType) + && Objects.equals(refreshId, that.refreshId); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType, refreshId); + } + + @Override + public String toString() { + return new ToStringer(GetRefreshRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .add("refreshId", refreshId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceLogConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceLogConfig.java new file mode 100755 index 000000000..0411b284a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceLogConfig.java @@ -0,0 +1,145 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Inference log configuration. */ +@Generated +public class InferenceLogConfig { + /** + * List of granularities to use when aggregating data into time windows based on their timestamp. + */ + @JsonProperty("granularities") + private Collection granularities; + + /** Column for the label. */ + @JsonProperty("label_column") + private String labelColumn; + + /** Column for the model identifier. */ + @JsonProperty("model_id_column") + private String modelIdColumn; + + /** Column for the prediction. */ + @JsonProperty("prediction_column") + private String predictionColumn; + + /** Column for prediction probabilities */ + @JsonProperty("prediction_probability_column") + private String predictionProbabilityColumn; + + /** Problem type the model aims to solve. */ + @JsonProperty("problem_type") + private InferenceProblemType problemType; + + /** Column for the timestamp. */ + @JsonProperty("timestamp_column") + private String timestampColumn; + + public InferenceLogConfig setGranularities(Collection granularities) { + this.granularities = granularities; + return this; + } + + public Collection getGranularities() { + return granularities; + } + + public InferenceLogConfig setLabelColumn(String labelColumn) { + this.labelColumn = labelColumn; + return this; + } + + public String getLabelColumn() { + return labelColumn; + } + + public InferenceLogConfig setModelIdColumn(String modelIdColumn) { + this.modelIdColumn = modelIdColumn; + return this; + } + + public String getModelIdColumn() { + return modelIdColumn; + } + + public InferenceLogConfig setPredictionColumn(String predictionColumn) { + this.predictionColumn = predictionColumn; + return this; + } + + public String getPredictionColumn() { + return predictionColumn; + } + + public InferenceLogConfig setPredictionProbabilityColumn(String predictionProbabilityColumn) { + this.predictionProbabilityColumn = predictionProbabilityColumn; + return this; + } + + public String getPredictionProbabilityColumn() { + return predictionProbabilityColumn; + } + + public InferenceLogConfig setProblemType(InferenceProblemType problemType) { + this.problemType = problemType; + return this; + } + + public InferenceProblemType getProblemType() { + return problemType; + } + + public InferenceLogConfig setTimestampColumn(String timestampColumn) { + this.timestampColumn = timestampColumn; + return this; + } + + public String getTimestampColumn() { + return timestampColumn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InferenceLogConfig that = (InferenceLogConfig) o; + return Objects.equals(granularities, that.granularities) + && Objects.equals(labelColumn, that.labelColumn) + && Objects.equals(modelIdColumn, that.modelIdColumn) + && Objects.equals(predictionColumn, that.predictionColumn) + && Objects.equals(predictionProbabilityColumn, that.predictionProbabilityColumn) + && Objects.equals(problemType, that.problemType) + && Objects.equals(timestampColumn, that.timestampColumn); + } + + @Override + public int hashCode() { + return Objects.hash( + granularities, + labelColumn, + modelIdColumn, + predictionColumn, + predictionProbabilityColumn, + problemType, + timestampColumn); + } + + @Override + public String toString() { + return new ToStringer(InferenceLogConfig.class) + .add("granularities", granularities) + .add("labelColumn", labelColumn) + .add("modelIdColumn", modelIdColumn) + .add("predictionColumn", predictionColumn) + .add("predictionProbabilityColumn", predictionProbabilityColumn) + .add("problemType", problemType) + .add("timestampColumn", timestampColumn) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceProblemType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceProblemType.java new file mode 100755 index 000000000..3adad7d38 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/InferenceProblemType.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; + +/** Inference problem type the model aims to solve. */ +@Generated +public enum InferenceProblemType { + INFERENCE_PROBLEM_TYPE_CLASSIFICATION, + INFERENCE_PROBLEM_TYPE_REGRESSION, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorRequest.java new file mode 100755 index 000000000..5a7cc3b3a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorRequest.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListMonitorRequest { + /** */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListMonitorRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListMonitorRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListMonitorRequest that = (ListMonitorRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListMonitorRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorResponse.java new file mode 100755 index 000000000..ad6f2650b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListMonitorResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Response for listing Monitors. */ +@Generated +public class ListMonitorResponse { + /** */ + @JsonProperty("monitors") + private Collection monitors; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListMonitorResponse setMonitors(Collection monitors) { + this.monitors = monitors; + return this; + } + + public Collection getMonitors() { + return monitors; + } + + public ListMonitorResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListMonitorResponse that = (ListMonitorResponse) o; + return Objects.equals(monitors, that.monitors) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(monitors, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListMonitorResponse.class) + .add("monitors", monitors) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshRequest.java new file mode 100755 index 000000000..e86705d05 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshRequest.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListRefreshRequest { + /** The UUID of the request object. For example, schema id. */ + @JsonIgnore private String objectId; + + /** The type of the monitored object. Can be one of the following: `schema` or `table`. */ + @JsonIgnore private String objectType; + + /** */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListRefreshRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public ListRefreshRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + public ListRefreshRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListRefreshRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListRefreshRequest that = (ListRefreshRequest) o; + return Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListRefreshRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshResponse.java new file mode 100755 index 000000000..d05dce54b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Response for listing refreshes. */ +@Generated +public class ListRefreshResponse { + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("refreshes") + private Collection refreshes; + + public ListRefreshResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListRefreshResponse setRefreshes(Collection refreshes) { + this.refreshes = refreshes; + return this; + } + + public Collection getRefreshes() { + return refreshes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListRefreshResponse that = (ListRefreshResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(refreshes, that.refreshes); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, refreshes); + } + + @Override + public String toString() { + return new ToStringer(ListRefreshResponse.class) + .add("nextPageToken", nextPageToken) + .add("refreshes", refreshes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Monitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Monitor.java new file mode 100755 index 000000000..da034b6b9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Monitor.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Monitor for the data quality of unity catalog entities such as schema or table. */ +@Generated +public class Monitor { + /** Anomaly Detection Configuration, applicable to `schema` object types. */ + @JsonProperty("anomaly_detection_config") + private AnomalyDetectionConfig anomalyDetectionConfig; + + /** Data Profiling Configuration, applicable to `table` object types */ + @JsonProperty("data_profiling_config") + private DataProfilingConfig dataProfilingConfig; + + /** The UUID of the request object. For example, schema id. */ + @JsonProperty("object_id") + private String objectId; + + /** The type of the monitored object. Can be one of the following: `schema` or `table`. */ + @JsonProperty("object_type") + private String objectType; + + public Monitor setAnomalyDetectionConfig(AnomalyDetectionConfig anomalyDetectionConfig) { + this.anomalyDetectionConfig = anomalyDetectionConfig; + return this; + } + + public AnomalyDetectionConfig getAnomalyDetectionConfig() { + return anomalyDetectionConfig; + } + + public Monitor setDataProfilingConfig(DataProfilingConfig dataProfilingConfig) { + this.dataProfilingConfig = dataProfilingConfig; + return this; + } + + public DataProfilingConfig getDataProfilingConfig() { + return dataProfilingConfig; + } + + public Monitor setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public Monitor setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Monitor that = (Monitor) o; + return Objects.equals(anomalyDetectionConfig, that.anomalyDetectionConfig) + && Objects.equals(dataProfilingConfig, that.dataProfilingConfig) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(anomalyDetectionConfig, dataProfilingConfig, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(Monitor.class) + .add("anomalyDetectionConfig", anomalyDetectionConfig) + .add("dataProfilingConfig", dataProfilingConfig) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationDestination.java new file mode 100755 index 000000000..5a5a920aa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationDestination.java @@ -0,0 +1,49 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Destination of the data quality monitoring notification. */ +@Generated +public class NotificationDestination { + /** + * The list of email addresses to send the notification to. A maximum of 5 email addresses is + * supported. + */ + @JsonProperty("email_addresses") + private Collection emailAddresses; + + public NotificationDestination setEmailAddresses(Collection emailAddresses) { + this.emailAddresses = emailAddresses; + return this; + } + + public Collection getEmailAddresses() { + return emailAddresses; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NotificationDestination that = (NotificationDestination) o; + return Objects.equals(emailAddresses, that.emailAddresses); + } + + @Override + public int hashCode() { + return Objects.hash(emailAddresses); + } + + @Override + public String toString() { + return new ToStringer(NotificationDestination.class) + .add("emailAddresses", emailAddresses) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationSettings.java new file mode 100755 index 000000000..6f3b950b5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/NotificationSettings.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Settings for sending notifications on the data quality monitoring. */ +@Generated +public class NotificationSettings { + /** Destinations to send notifications on failure/timeout. */ + @JsonProperty("on_failure") + private NotificationDestination onFailure; + + public NotificationSettings setOnFailure(NotificationDestination onFailure) { + this.onFailure = onFailure; + return this; + } + + public NotificationDestination getOnFailure() { + return onFailure; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NotificationSettings that = (NotificationSettings) o; + return Objects.equals(onFailure, that.onFailure); + } + + @Override + public int hashCode() { + return Objects.hash(onFailure); + } + + @Override + public String toString() { + return new ToStringer(NotificationSettings.class).add("onFailure", onFailure).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Refresh.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Refresh.java new file mode 100755 index 000000000..d2e0fb6b9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Refresh.java @@ -0,0 +1,154 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The Refresh object gives information on a refresh of the data quality monitoring pipeline. */ +@Generated +public class Refresh { + /** Time when the refresh ended (milliseconds since 1/1/1970 UTC). */ + @JsonProperty("end_time_ms") + private Long endTimeMs; + + /** + * An optional message to give insight into the current state of the refresh (e.g. FAILURE + * messages). + */ + @JsonProperty("message") + private String message; + + /** The UUID of the request object. For example, table id. */ + @JsonProperty("object_id") + private String objectId; + + /** The type of the monitored object. Can be one of the following: `schema`or `table`. */ + @JsonProperty("object_type") + private String objectType; + + /** Unique id of the refresh operation. */ + @JsonProperty("refresh_id") + private Long refreshId; + + /** Time when the refresh started (milliseconds since 1/1/1970 UTC). */ + @JsonProperty("start_time_ms") + private Long startTimeMs; + + /** The current state of the refresh. */ + @JsonProperty("state") + private RefreshState state; + + /** What triggered the refresh. */ + @JsonProperty("trigger") + private RefreshTrigger trigger; + + public Refresh setEndTimeMs(Long endTimeMs) { + this.endTimeMs = endTimeMs; + return this; + } + + public Long getEndTimeMs() { + return endTimeMs; + } + + public Refresh setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public Refresh setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public Refresh setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + public Refresh setRefreshId(Long refreshId) { + this.refreshId = refreshId; + return this; + } + + public Long getRefreshId() { + return refreshId; + } + + public Refresh setStartTimeMs(Long startTimeMs) { + this.startTimeMs = startTimeMs; + return this; + } + + public Long getStartTimeMs() { + return startTimeMs; + } + + public Refresh setState(RefreshState state) { + this.state = state; + return this; + } + + public RefreshState getState() { + return state; + } + + public Refresh setTrigger(RefreshTrigger trigger) { + this.trigger = trigger; + return this; + } + + public RefreshTrigger getTrigger() { + return trigger; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Refresh that = (Refresh) o; + return Objects.equals(endTimeMs, that.endTimeMs) + && Objects.equals(message, that.message) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType) + && Objects.equals(refreshId, that.refreshId) + && Objects.equals(startTimeMs, that.startTimeMs) + && Objects.equals(state, that.state) + && Objects.equals(trigger, that.trigger); + } + + @Override + public int hashCode() { + return Objects.hash( + endTimeMs, message, objectId, objectType, refreshId, startTimeMs, state, trigger); + } + + @Override + public String toString() { + return new ToStringer(Refresh.class) + .add("endTimeMs", endTimeMs) + .add("message", message) + .add("objectId", objectId) + .add("objectType", objectType) + .add("refreshId", refreshId) + .add("startTimeMs", startTimeMs) + .add("state", state) + .add("trigger", trigger) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshState.java new file mode 100755 index 000000000..d69055c0e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshState.java @@ -0,0 +1,16 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; + +/** The state of the refresh. */ +@Generated +public enum RefreshState { + MONITOR_REFRESH_STATE_CANCELED, + MONITOR_REFRESH_STATE_FAILED, + MONITOR_REFRESH_STATE_PENDING, + MONITOR_REFRESH_STATE_RUNNING, + MONITOR_REFRESH_STATE_SUCCESS, + MONITOR_REFRESH_STATE_UNKNOWN, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshTrigger.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshTrigger.java new file mode 100755 index 000000000..f40549424 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/RefreshTrigger.java @@ -0,0 +1,14 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; + +/** The trigger of the refresh. */ +@Generated +public enum RefreshTrigger { + MONITOR_REFRESH_TRIGGER_DATA_CHANGE, + MONITOR_REFRESH_TRIGGER_MANUAL, + MONITOR_REFRESH_TRIGGER_SCHEDULE, + MONITOR_REFRESH_TRIGGER_UNKNOWN, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/SnapshotConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/SnapshotConfig.java new file mode 100755 index 000000000..de4158cf0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/SnapshotConfig.java @@ -0,0 +1,29 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Snapshot analysis configuration. */ +@Generated +public class SnapshotConfig { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(SnapshotConfig.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/TimeSeriesConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/TimeSeriesConfig.java new file mode 100755 index 000000000..eb7d0402e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/TimeSeriesConfig.java @@ -0,0 +1,63 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Time series analysis configuration. */ +@Generated +public class TimeSeriesConfig { + /** + * List of granularities to use when aggregating data into time windows based on their timestamp. + */ + @JsonProperty("granularities") + private Collection granularities; + + /** Column for the timestamp. */ + @JsonProperty("timestamp_column") + private String timestampColumn; + + public TimeSeriesConfig setGranularities(Collection granularities) { + this.granularities = granularities; + return this; + } + + public Collection getGranularities() { + return granularities; + } + + public TimeSeriesConfig setTimestampColumn(String timestampColumn) { + this.timestampColumn = timestampColumn; + return this; + } + + public String getTimestampColumn() { + return timestampColumn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TimeSeriesConfig that = (TimeSeriesConfig) o; + return Objects.equals(granularities, that.granularities) + && Objects.equals(timestampColumn, that.timestampColumn); + } + + @Override + public int hashCode() { + return Objects.hash(granularities, timestampColumn); + } + + @Override + public String toString() { + return new ToStringer(TimeSeriesConfig.class) + .add("granularities", granularities) + .add("timestampColumn", timestampColumn) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java new file mode 100755 index 000000000..014f74350 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java @@ -0,0 +1,93 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateMonitorRequest { + /** The monitor to update. */ + @JsonProperty("monitor") + private Monitor monitor; + + /** The UUID of the request object. For example, schema id. */ + @JsonIgnore private String objectId; + + /** The type of the monitored object. Can be one of the following: `schema` or `table`. */ + @JsonIgnore private String objectType; + + /** + * The field mask to specify which fields to update as a comma-separated list. Example value: + * `data_profiling_config.custom_metrics,data_profiling_config.schedule.quartz_cron_expression` + */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateMonitorRequest setMonitor(Monitor monitor) { + this.monitor = monitor; + return this; + } + + public Monitor getMonitor() { + return monitor; + } + + public UpdateMonitorRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public UpdateMonitorRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + public UpdateMonitorRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateMonitorRequest that = (UpdateMonitorRequest) o; + return Objects.equals(monitor, that.monitor) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(monitor, objectId, objectType, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateMonitorRequest.class) + .add("monitor", monitor) + .add("objectId", objectId) + .add("objectType", objectType) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java new file mode 100755 index 000000000..057ac4706 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java @@ -0,0 +1,104 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dataquality; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateRefreshRequest { + /** The UUID of the request object. For example, schema id. */ + @JsonIgnore private String objectId; + + /** The type of the monitored object. Can be one of the following: `schema` or `table`. */ + @JsonIgnore private String objectType; + + /** The refresh to update. */ + @JsonProperty("refresh") + private Refresh refresh; + + /** Unique id of the refresh operation. */ + @JsonIgnore private Long refreshId; + + /** The field mask to specify which fields to update. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateRefreshRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public UpdateRefreshRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + public UpdateRefreshRequest setRefresh(Refresh refresh) { + this.refresh = refresh; + return this; + } + + public Refresh getRefresh() { + return refresh; + } + + public UpdateRefreshRequest setRefreshId(Long refreshId) { + this.refreshId = refreshId; + return this; + } + + public Long getRefreshId() { + return refreshId; + } + + public UpdateRefreshRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateRefreshRequest that = (UpdateRefreshRequest) o; + return Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType) + && Objects.equals(refresh, that.refresh) + && Objects.equals(refreshId, that.refreshId) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType, refresh, refreshId, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateRefreshRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .add("refresh", refresh) + .add("refreshId", refreshId) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java index 5aacd813f..8ae58f6a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java @@ -15,7 +15,7 @@ public class GetPermissionLevelsRequest { /** * The type of the request object. Can be one of the following: alerts, alertsv2, authorization, * clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, - * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, + * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, * serving-endpoints, or warehouses. */ @JsonIgnore private String requestObjectType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java index 9ee5386f6..801a423e4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java @@ -15,7 +15,7 @@ public class GetPermissionRequest { /** * The type of the request object. Can be one of the following: alerts, alertsv2, authorization, * clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, - * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, + * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, * serving-endpoints, or warehouses. */ @JsonIgnore private String requestObjectType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java index 188f7fda0..25ba32997 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java @@ -21,7 +21,7 @@ public class SetObjectPermissions { /** * The type of the request object. Can be one of the following: alerts, alertsv2, authorization, * clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, - * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, + * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, * serving-endpoints, or warehouses. */ @JsonIgnore private String requestObjectType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java index 442fbf40d..b7ea0195f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java @@ -21,7 +21,7 @@ public class UpdateObjectPermissions { /** * The type of the request object. Can be one of the following: alerts, alertsv2, authorization, * clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, - * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, + * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, * serving-endpoints, or warehouses. */ @JsonIgnore private String requestObjectType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java index 431df6742..883361917 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2API.java @@ -26,6 +26,93 @@ public AccountIamV2API(AccountIamV2Service mock) { impl = mock; } + /** TODO: Write description later when this method is implemented */ + public Group createGroup(CreateGroupRequest request) { + return impl.createGroup(request); + } + + /** TODO: Write description later when this method is implemented */ + public ServicePrincipal createServicePrincipal(CreateServicePrincipalRequest request) { + return impl.createServicePrincipal(request); + } + + /** TODO: Write description later when this method is implemented */ + public User createUser(CreateUserRequest request) { + return impl.createUser(request); + } + + /** TODO: Write description later when this method is implemented */ + public WorkspaceAccessDetail createWorkspaceAccessDetail( + CreateWorkspaceAccessDetailRequest request) { + return impl.createWorkspaceAccessDetail(request); + } + + public void deleteGroup(long internalId) { + deleteGroup(new DeleteGroupRequest().setInternalId(internalId)); + } + + /** TODO: Write description later when this method is implemented */ + public void deleteGroup(DeleteGroupRequest request) { + impl.deleteGroup(request); + } + + public void deleteServicePrincipal(long internalId) { + deleteServicePrincipal(new DeleteServicePrincipalRequest().setInternalId(internalId)); + } + + /** TODO: Write description later when this method is implemented */ + public void deleteServicePrincipal(DeleteServicePrincipalRequest request) { + impl.deleteServicePrincipal(request); + } + + public void deleteUser(long internalId) { + deleteUser(new DeleteUserRequest().setInternalId(internalId)); + } + + /** TODO: Write description later when this method is implemented */ + public void deleteUser(DeleteUserRequest request) { + impl.deleteUser(request); + } + + public void deleteWorkspaceAccessDetail(long workspaceId, long principalId) { + deleteWorkspaceAccessDetail( + new DeleteWorkspaceAccessDetailRequest() + .setWorkspaceId(workspaceId) + .setPrincipalId(principalId)); + } + + /** TODO: Write description later when this method is implemented */ + public void deleteWorkspaceAccessDetail(DeleteWorkspaceAccessDetailRequest request) { + impl.deleteWorkspaceAccessDetail(request); + } + + public Group getGroup(long internalId) { + return getGroup(new GetGroupRequest().setInternalId(internalId)); + } + + /** TODO: Write description later when this method is implemented */ + public Group getGroup(GetGroupRequest request) { + return impl.getGroup(request); + } + + public ServicePrincipal getServicePrincipal(long internalId) { + return getServicePrincipal(new GetServicePrincipalRequest().setInternalId(internalId)); + } + + /** TODO: Write description later when this method is implemented */ + public ServicePrincipal getServicePrincipal(GetServicePrincipalRequest request) { + return impl.getServicePrincipal(request); + } + + public User getUser(long internalId) { + return getUser(new GetUserRequest().setInternalId(internalId)); + } + + /** TODO: Write description later when this method is implemented */ + public User getUser(GetUserRequest request) { + return impl.getUser(request); + } + public WorkspaceAccessDetail getWorkspaceAccessDetail(long workspaceId, long principalId) { return getWorkspaceAccessDetail( new GetWorkspaceAccessDetailRequest() @@ -44,6 +131,32 @@ public WorkspaceAccessDetail getWorkspaceAccessDetail(GetWorkspaceAccessDetailRe return impl.getWorkspaceAccessDetail(request); } + /** TODO: Write description later when this method is implemented */ + public ListGroupsResponse listGroups(ListGroupsRequest request) { + return impl.listGroups(request); + } + + /** TODO: Write description later when this method is implemented */ + public ListServicePrincipalsResponse listServicePrincipals(ListServicePrincipalsRequest request) { + return impl.listServicePrincipals(request); + } + + /** TODO: Write description later when this method is implemented */ + public ListUsersResponse listUsers(ListUsersRequest request) { + return impl.listUsers(request); + } + + public ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetails(long workspaceId) { + return listWorkspaceAccessDetails( + new ListWorkspaceAccessDetailsRequest().setWorkspaceId(workspaceId)); + } + + /** TODO: Write description later when this method is implemented */ + public ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetails( + ListWorkspaceAccessDetailsRequest request) { + return impl.listWorkspaceAccessDetails(request); + } + /** * Resolves a group with the given external ID from the customer's IdP. If the group does not * exist, it will be created in the account. If the customer is not onboarded onto Automatic @@ -72,6 +185,27 @@ public ResolveUserResponse resolveUser(ResolveUserRequest request) { return impl.resolveUser(request); } + /** TODO: Write description later when this method is implemented */ + public Group updateGroup(UpdateGroupRequest request) { + return impl.updateGroup(request); + } + + /** TODO: Write description later when this method is implemented */ + public ServicePrincipal updateServicePrincipal(UpdateServicePrincipalRequest request) { + return impl.updateServicePrincipal(request); + } + + /** TODO: Write description later when this method is implemented */ + public User updateUser(UpdateUserRequest request) { + return impl.updateUser(request); + } + + /** TODO: Write description later when this method is implemented */ + public WorkspaceAccessDetail updateWorkspaceAccessDetail( + UpdateWorkspaceAccessDetailRequest request) { + return impl.updateWorkspaceAccessDetail(request); + } + public AccountIamV2Service impl() { return impl; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Impl.java index c887fd39b..f43782568 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Impl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Impl.java @@ -16,6 +16,183 @@ public AccountIamV2Impl(ApiClient apiClient) { this.apiClient = apiClient; } + @Override + public Group createGroup(CreateGroupRequest request) { + String path = + String.format("/api/2.0/identity/accounts/%s/groups", apiClient.configuredAccountID()); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getGroup())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Group.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ServicePrincipal createServicePrincipal(CreateServicePrincipalRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/servicePrincipals", apiClient.configuredAccountID()); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getServicePrincipal())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ServicePrincipal.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public User createUser(CreateUserRequest request) { + String path = + String.format("/api/2.0/identity/accounts/%s/users", apiClient.configuredAccountID()); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getUser())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, User.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public WorkspaceAccessDetail createWorkspaceAccessDetail( + CreateWorkspaceAccessDetailRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/workspaces/%s/workspaceAccessDetails", + apiClient.configuredAccountID(), request.getParent()); + try { + Request req = + new Request("POST", path, apiClient.serialize(request.getWorkspaceAccessDetail())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, WorkspaceAccessDetail.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteGroup(DeleteGroupRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/groups/%s", + apiClient.configuredAccountID(), request.getInternalId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteServicePrincipal(DeleteServicePrincipalRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/servicePrincipals/%s", + apiClient.configuredAccountID(), request.getInternalId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteUser(DeleteUserRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/users/%s", + apiClient.configuredAccountID(), request.getInternalId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteWorkspaceAccessDetail(DeleteWorkspaceAccessDetailRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/workspaces/%s/workspaceAccessDetails/%s", + apiClient.configuredAccountID(), request.getWorkspaceId(), request.getPrincipalId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Group getGroup(GetGroupRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/groups/%s", + apiClient.configuredAccountID(), request.getInternalId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Group.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ServicePrincipal getServicePrincipal(GetServicePrincipalRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/servicePrincipals/%s", + apiClient.configuredAccountID(), request.getInternalId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ServicePrincipal.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public User getUser(GetUserRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/users/%s", + apiClient.configuredAccountID(), request.getInternalId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, User.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public WorkspaceAccessDetail getWorkspaceAccessDetail(GetWorkspaceAccessDetailRequest request) { String path = @@ -32,6 +209,66 @@ public WorkspaceAccessDetail getWorkspaceAccessDetail(GetWorkspaceAccessDetailRe } } + @Override + public ListGroupsResponse listGroups(ListGroupsRequest request) { + String path = + String.format("/api/2.0/identity/accounts/%s/groups", apiClient.configuredAccountID()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListGroupsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListServicePrincipalsResponse listServicePrincipals(ListServicePrincipalsRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/servicePrincipals", apiClient.configuredAccountID()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListServicePrincipalsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListUsersResponse listUsers(ListUsersRequest request) { + String path = + String.format("/api/2.0/identity/accounts/%s/users", apiClient.configuredAccountID()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListUsersResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetails( + ListWorkspaceAccessDetailsRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/workspaces/%s/workspaceAccessDetails", + apiClient.configuredAccountID(), request.getWorkspaceId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListWorkspaceAccessDetailsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public ResolveGroupResponse resolveGroup(ResolveGroupRequest request) { String path = @@ -83,4 +320,74 @@ public ResolveUserResponse resolveUser(ResolveUserRequest request) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } + + @Override + public Group updateGroup(UpdateGroupRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/groups/%s", + apiClient.configuredAccountID(), request.getInternalId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getGroup())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Group.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ServicePrincipal updateServicePrincipal(UpdateServicePrincipalRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/servicePrincipals/%s", + apiClient.configuredAccountID(), request.getInternalId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getServicePrincipal())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ServicePrincipal.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public User updateUser(UpdateUserRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/users/%s", + apiClient.configuredAccountID(), request.getInternalId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getUser())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, User.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public WorkspaceAccessDetail updateWorkspaceAccessDetail( + UpdateWorkspaceAccessDetailRequest request) { + String path = + String.format( + "/api/2.0/identity/accounts/%s/workspaces/%s/workspaceAccessDetails/%s", + apiClient.configuredAccountID(), request.getWorkspaceId(), request.getPrincipalId()); + try { + Request req = + new Request("PATCH", path, apiClient.serialize(request.getWorkspaceAccessDetail())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, WorkspaceAccessDetail.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Service.java index 2a56ad630..aad7cd81e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Service.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/AccountIamV2Service.java @@ -13,6 +13,42 @@ */ @Generated public interface AccountIamV2Service { + /** TODO: Write description later when this method is implemented */ + Group createGroup(CreateGroupRequest createGroupRequest); + + /** TODO: Write description later when this method is implemented */ + ServicePrincipal createServicePrincipal( + CreateServicePrincipalRequest createServicePrincipalRequest); + + /** TODO: Write description later when this method is implemented */ + User createUser(CreateUserRequest createUserRequest); + + /** TODO: Write description later when this method is implemented */ + WorkspaceAccessDetail createWorkspaceAccessDetail( + CreateWorkspaceAccessDetailRequest createWorkspaceAccessDetailRequest); + + /** TODO: Write description later when this method is implemented */ + void deleteGroup(DeleteGroupRequest deleteGroupRequest); + + /** TODO: Write description later when this method is implemented */ + void deleteServicePrincipal(DeleteServicePrincipalRequest deleteServicePrincipalRequest); + + /** TODO: Write description later when this method is implemented */ + void deleteUser(DeleteUserRequest deleteUserRequest); + + /** TODO: Write description later when this method is implemented */ + void deleteWorkspaceAccessDetail( + DeleteWorkspaceAccessDetailRequest deleteWorkspaceAccessDetailRequest); + + /** TODO: Write description later when this method is implemented */ + Group getGroup(GetGroupRequest getGroupRequest); + + /** TODO: Write description later when this method is implemented */ + ServicePrincipal getServicePrincipal(GetServicePrincipalRequest getServicePrincipalRequest); + + /** TODO: Write description later when this method is implemented */ + User getUser(GetUserRequest getUserRequest); + /** * Returns the access details for a principal in a workspace. Allows for checking access details * for any provisioned principal (user, service principal, or group) in a workspace. * Provisioned @@ -23,6 +59,20 @@ public interface AccountIamV2Service { WorkspaceAccessDetail getWorkspaceAccessDetail( GetWorkspaceAccessDetailRequest getWorkspaceAccessDetailRequest); + /** TODO: Write description later when this method is implemented */ + ListGroupsResponse listGroups(ListGroupsRequest listGroupsRequest); + + /** TODO: Write description later when this method is implemented */ + ListServicePrincipalsResponse listServicePrincipals( + ListServicePrincipalsRequest listServicePrincipalsRequest); + + /** TODO: Write description later when this method is implemented */ + ListUsersResponse listUsers(ListUsersRequest listUsersRequest); + + /** TODO: Write description later when this method is implemented */ + ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetails( + ListWorkspaceAccessDetailsRequest listWorkspaceAccessDetailsRequest); + /** * Resolves a group with the given external ID from the customer's IdP. If the group does not * exist, it will be created in the account. If the customer is not onboarded onto Automatic @@ -44,4 +94,18 @@ ResolveServicePrincipalResponse resolveServicePrincipal( * this will return an error. */ ResolveUserResponse resolveUser(ResolveUserRequest resolveUserRequest); + + /** TODO: Write description later when this method is implemented */ + Group updateGroup(UpdateGroupRequest updateGroupRequest); + + /** TODO: Write description later when this method is implemented */ + ServicePrincipal updateServicePrincipal( + UpdateServicePrincipalRequest updateServicePrincipalRequest); + + /** TODO: Write description later when this method is implemented */ + User updateUser(UpdateUserRequest updateUserRequest); + + /** TODO: Write description later when this method is implemented */ + WorkspaceAccessDetail updateWorkspaceAccessDetail( + UpdateWorkspaceAccessDetailRequest updateWorkspaceAccessDetailRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupProxyRequest.java new file mode 100755 index 000000000..3b3b114de --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupProxyRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateGroupProxyRequest { + /** Required. Group to be created in */ + @JsonProperty("group") + private Group group; + + public CreateGroupProxyRequest setGroup(Group group) { + this.group = group; + return this; + } + + public Group getGroup() { + return group; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateGroupProxyRequest that = (CreateGroupProxyRequest) o; + return Objects.equals(group, that.group); + } + + @Override + public int hashCode() { + return Objects.hash(group); + } + + @Override + public String toString() { + return new ToStringer(CreateGroupProxyRequest.class).add("group", group).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupRequest.java new file mode 100755 index 000000000..62c97720d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateGroupRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateGroupRequest { + /** Required. Group to be created in */ + @JsonProperty("group") + private Group group; + + public CreateGroupRequest setGroup(Group group) { + this.group = group; + return this; + } + + public Group getGroup() { + return group; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateGroupRequest that = (CreateGroupRequest) o; + return Objects.equals(group, that.group); + } + + @Override + public int hashCode() { + return Objects.hash(group); + } + + @Override + public String toString() { + return new ToStringer(CreateGroupRequest.class).add("group", group).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalProxyRequest.java new file mode 100755 index 000000000..43953b199 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalProxyRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateServicePrincipalProxyRequest { + /** Required. Service principal to be created in */ + @JsonProperty("service_principal") + private ServicePrincipal servicePrincipal; + + public CreateServicePrincipalProxyRequest setServicePrincipal(ServicePrincipal servicePrincipal) { + this.servicePrincipal = servicePrincipal; + return this; + } + + public ServicePrincipal getServicePrincipal() { + return servicePrincipal; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateServicePrincipalProxyRequest that = (CreateServicePrincipalProxyRequest) o; + return Objects.equals(servicePrincipal, that.servicePrincipal); + } + + @Override + public int hashCode() { + return Objects.hash(servicePrincipal); + } + + @Override + public String toString() { + return new ToStringer(CreateServicePrincipalProxyRequest.class) + .add("servicePrincipal", servicePrincipal) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalRequest.java new file mode 100755 index 000000000..fadf9a546 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateServicePrincipalRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateServicePrincipalRequest { + /** Required. Service principal to be created in */ + @JsonProperty("service_principal") + private ServicePrincipal servicePrincipal; + + public CreateServicePrincipalRequest setServicePrincipal(ServicePrincipal servicePrincipal) { + this.servicePrincipal = servicePrincipal; + return this; + } + + public ServicePrincipal getServicePrincipal() { + return servicePrincipal; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateServicePrincipalRequest that = (CreateServicePrincipalRequest) o; + return Objects.equals(servicePrincipal, that.servicePrincipal); + } + + @Override + public int hashCode() { + return Objects.hash(servicePrincipal); + } + + @Override + public String toString() { + return new ToStringer(CreateServicePrincipalRequest.class) + .add("servicePrincipal", servicePrincipal) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserProxyRequest.java new file mode 100755 index 000000000..b35fec791 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserProxyRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateUserProxyRequest { + /** Required. User to be created in */ + @JsonProperty("user") + private User user; + + public CreateUserProxyRequest setUser(User user) { + this.user = user; + return this; + } + + public User getUser() { + return user; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateUserProxyRequest that = (CreateUserProxyRequest) o; + return Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash(user); + } + + @Override + public String toString() { + return new ToStringer(CreateUserProxyRequest.class).add("user", user).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserRequest.java new file mode 100755 index 000000000..0e85e104b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateUserRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateUserRequest { + /** Required. User to be created in */ + @JsonProperty("user") + private User user; + + public CreateUserRequest setUser(User user) { + this.user = user; + return this; + } + + public User getUser() { + return user; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateUserRequest that = (CreateUserRequest) o; + return Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash(user); + } + + @Override + public String toString() { + return new ToStringer(CreateUserRequest.class).add("user", user).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailLocalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailLocalRequest.java new file mode 100755 index 000000000..4b00b8dd9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailLocalRequest.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateWorkspaceAccessDetailLocalRequest { + /** Required. Workspace access detail to be created in . */ + @JsonProperty("workspace_access_detail") + private WorkspaceAccessDetail workspaceAccessDetail; + + public CreateWorkspaceAccessDetailLocalRequest setWorkspaceAccessDetail( + WorkspaceAccessDetail workspaceAccessDetail) { + this.workspaceAccessDetail = workspaceAccessDetail; + return this; + } + + public WorkspaceAccessDetail getWorkspaceAccessDetail() { + return workspaceAccessDetail; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateWorkspaceAccessDetailLocalRequest that = (CreateWorkspaceAccessDetailLocalRequest) o; + return Objects.equals(workspaceAccessDetail, that.workspaceAccessDetail); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceAccessDetail); + } + + @Override + public String toString() { + return new ToStringer(CreateWorkspaceAccessDetailLocalRequest.class) + .add("workspaceAccessDetail", workspaceAccessDetail) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailRequest.java new file mode 100755 index 000000000..985550754 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/CreateWorkspaceAccessDetailRequest.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateWorkspaceAccessDetailRequest { + /** Required. The parent path for workspace access detail. */ + @JsonIgnore private String parent; + + /** Required. Workspace access detail to be created in . */ + @JsonProperty("workspace_access_detail") + private WorkspaceAccessDetail workspaceAccessDetail; + + public CreateWorkspaceAccessDetailRequest setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + public CreateWorkspaceAccessDetailRequest setWorkspaceAccessDetail( + WorkspaceAccessDetail workspaceAccessDetail) { + this.workspaceAccessDetail = workspaceAccessDetail; + return this; + } + + public WorkspaceAccessDetail getWorkspaceAccessDetail() { + return workspaceAccessDetail; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateWorkspaceAccessDetailRequest that = (CreateWorkspaceAccessDetailRequest) o; + return Objects.equals(parent, that.parent) + && Objects.equals(workspaceAccessDetail, that.workspaceAccessDetail); + } + + @Override + public int hashCode() { + return Objects.hash(parent, workspaceAccessDetail); + } + + @Override + public String toString() { + return new ToStringer(CreateWorkspaceAccessDetailRequest.class) + .add("parent", parent) + .add("workspaceAccessDetail", workspaceAccessDetail) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupProxyRequest.java new file mode 100755 index 000000000..ec0d9d788 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupProxyRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteGroupProxyRequest { + /** Required. Internal ID of the group in Databricks. */ + @JsonIgnore private Long internalId; + + public DeleteGroupProxyRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteGroupProxyRequest that = (DeleteGroupProxyRequest) o; + return Objects.equals(internalId, that.internalId); + } + + @Override + public int hashCode() { + return Objects.hash(internalId); + } + + @Override + public String toString() { + return new ToStringer(DeleteGroupProxyRequest.class).add("internalId", internalId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupRequest.java new file mode 100755 index 000000000..ab0f0db5c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteGroupRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteGroupRequest { + /** Required. Internal ID of the group in Databricks. */ + @JsonIgnore private Long internalId; + + public DeleteGroupRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteGroupRequest that = (DeleteGroupRequest) o; + return Objects.equals(internalId, that.internalId); + } + + @Override + public int hashCode() { + return Objects.hash(internalId); + } + + @Override + public String toString() { + return new ToStringer(DeleteGroupRequest.class).add("internalId", internalId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalProxyRequest.java new file mode 100755 index 000000000..1698be7de --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalProxyRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteServicePrincipalProxyRequest { + /** Required. Internal ID of the service principal in Databricks. */ + @JsonIgnore private Long internalId; + + public DeleteServicePrincipalProxyRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteServicePrincipalProxyRequest that = (DeleteServicePrincipalProxyRequest) o; + return Objects.equals(internalId, that.internalId); + } + + @Override + public int hashCode() { + return Objects.hash(internalId); + } + + @Override + public String toString() { + return new ToStringer(DeleteServicePrincipalProxyRequest.class) + .add("internalId", internalId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalRequest.java new file mode 100755 index 000000000..be9589b99 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteServicePrincipalRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteServicePrincipalRequest { + /** Required. Internal ID of the service principal in Databricks. */ + @JsonIgnore private Long internalId; + + public DeleteServicePrincipalRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteServicePrincipalRequest that = (DeleteServicePrincipalRequest) o; + return Objects.equals(internalId, that.internalId); + } + + @Override + public int hashCode() { + return Objects.hash(internalId); + } + + @Override + public String toString() { + return new ToStringer(DeleteServicePrincipalRequest.class) + .add("internalId", internalId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserProxyRequest.java new file mode 100755 index 000000000..f0e6a092b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserProxyRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteUserProxyRequest { + /** Required. Internal ID of the user in Databricks. */ + @JsonIgnore private Long internalId; + + public DeleteUserProxyRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteUserProxyRequest that = (DeleteUserProxyRequest) o; + return Objects.equals(internalId, that.internalId); + } + + @Override + public int hashCode() { + return Objects.hash(internalId); + } + + @Override + public String toString() { + return new ToStringer(DeleteUserProxyRequest.class).add("internalId", internalId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserRequest.java new file mode 100755 index 000000000..efe69331b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteUserRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteUserRequest { + /** Required. Internal ID of the user in Databricks. */ + @JsonIgnore private Long internalId; + + public DeleteUserRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteUserRequest that = (DeleteUserRequest) o; + return Objects.equals(internalId, that.internalId); + } + + @Override + public int hashCode() { + return Objects.hash(internalId); + } + + @Override + public String toString() { + return new ToStringer(DeleteUserRequest.class).add("internalId", internalId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailLocalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailLocalRequest.java new file mode 100755 index 000000000..d0f737f5a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailLocalRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteWorkspaceAccessDetailLocalRequest { + /** Required. ID of the principal in Databricks. */ + @JsonIgnore private Long principalId; + + public DeleteWorkspaceAccessDetailLocalRequest setPrincipalId(Long principalId) { + this.principalId = principalId; + return this; + } + + public Long getPrincipalId() { + return principalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteWorkspaceAccessDetailLocalRequest that = (DeleteWorkspaceAccessDetailLocalRequest) o; + return Objects.equals(principalId, that.principalId); + } + + @Override + public int hashCode() { + return Objects.hash(principalId); + } + + @Override + public String toString() { + return new ToStringer(DeleteWorkspaceAccessDetailLocalRequest.class) + .add("principalId", principalId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailRequest.java new file mode 100755 index 000000000..2ff0e7ba2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/DeleteWorkspaceAccessDetailRequest.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteWorkspaceAccessDetailRequest { + /** Required. ID of the principal in Databricks to delete workspace access for. */ + @JsonIgnore private Long principalId; + + /** The workspace ID where the principal has access. */ + @JsonIgnore private Long workspaceId; + + public DeleteWorkspaceAccessDetailRequest setPrincipalId(Long principalId) { + this.principalId = principalId; + return this; + } + + public Long getPrincipalId() { + return principalId; + } + + public DeleteWorkspaceAccessDetailRequest setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteWorkspaceAccessDetailRequest that = (DeleteWorkspaceAccessDetailRequest) o; + return Objects.equals(principalId, that.principalId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(principalId, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(DeleteWorkspaceAccessDetailRequest.class) + .add("principalId", principalId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupProxyRequest.java new file mode 100755 index 000000000..a634eb259 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupProxyRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetGroupProxyRequest { + /** Required. Internal ID of the group in Databricks. */ + @JsonIgnore private Long internalId; + + public GetGroupProxyRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetGroupProxyRequest that = (GetGroupProxyRequest) o; + return Objects.equals(internalId, that.internalId); + } + + @Override + public int hashCode() { + return Objects.hash(internalId); + } + + @Override + public String toString() { + return new ToStringer(GetGroupProxyRequest.class).add("internalId", internalId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupRequest.java new file mode 100755 index 000000000..ba4ceffb6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetGroupRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetGroupRequest { + /** Required. Internal ID of the group in Databricks. */ + @JsonIgnore private Long internalId; + + public GetGroupRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetGroupRequest that = (GetGroupRequest) o; + return Objects.equals(internalId, that.internalId); + } + + @Override + public int hashCode() { + return Objects.hash(internalId); + } + + @Override + public String toString() { + return new ToStringer(GetGroupRequest.class).add("internalId", internalId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalProxyRequest.java new file mode 100755 index 000000000..675796d4e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalProxyRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetServicePrincipalProxyRequest { + /** Required. Internal ID of the service principal in Databricks. */ + @JsonIgnore private Long internalId; + + public GetServicePrincipalProxyRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetServicePrincipalProxyRequest that = (GetServicePrincipalProxyRequest) o; + return Objects.equals(internalId, that.internalId); + } + + @Override + public int hashCode() { + return Objects.hash(internalId); + } + + @Override + public String toString() { + return new ToStringer(GetServicePrincipalProxyRequest.class) + .add("internalId", internalId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalRequest.java new file mode 100755 index 000000000..e51607786 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetServicePrincipalRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetServicePrincipalRequest { + /** Required. Internal ID of the service principal in Databricks. */ + @JsonIgnore private Long internalId; + + public GetServicePrincipalRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetServicePrincipalRequest that = (GetServicePrincipalRequest) o; + return Objects.equals(internalId, that.internalId); + } + + @Override + public int hashCode() { + return Objects.hash(internalId); + } + + @Override + public String toString() { + return new ToStringer(GetServicePrincipalRequest.class) + .add("internalId", internalId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserProxyRequest.java new file mode 100755 index 000000000..681dc1b3d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserProxyRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetUserProxyRequest { + /** Required. Internal ID of the user in Databricks. */ + @JsonIgnore private Long internalId; + + public GetUserProxyRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetUserProxyRequest that = (GetUserProxyRequest) o; + return Objects.equals(internalId, that.internalId); + } + + @Override + public int hashCode() { + return Objects.hash(internalId); + } + + @Override + public String toString() { + return new ToStringer(GetUserProxyRequest.class).add("internalId", internalId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserRequest.java new file mode 100755 index 000000000..8cfc0a484 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/GetUserRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetUserRequest { + /** Required. Internal ID of the user in Databricks. */ + @JsonIgnore private Long internalId; + + public GetUserRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetUserRequest that = (GetUserRequest) o; + return Objects.equals(internalId, that.internalId); + } + + @Override + public int hashCode() { + return Objects.hash(internalId); + } + + @Override + public String toString() { + return new ToStringer(GetUserRequest.class).add("internalId", internalId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsProxyRequest.java new file mode 100755 index 000000000..53b964703 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsProxyRequest.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListGroupsProxyRequest { + /** The maximum number of groups to return. The service may return fewer than this value. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * A page token, received from a previous ListGroups call. Provide this to retrieve the subsequent + * page. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListGroupsProxyRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListGroupsProxyRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListGroupsProxyRequest that = (ListGroupsProxyRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListGroupsProxyRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsRequest.java new file mode 100755 index 000000000..6d64f2102 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsRequest.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListGroupsRequest { + /** The maximum number of groups to return. The service may return fewer than this value. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * A page token, received from a previous ListGroups call. Provide this to retrieve the subsequent + * page. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListGroupsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListGroupsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListGroupsRequest that = (ListGroupsRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListGroupsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsResponse.java new file mode 100755 index 000000000..c2ca63976 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListGroupsResponse.java @@ -0,0 +1,63 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** TODO: Write description later when this method is implemented */ +@Generated +public class ListGroupsResponse { + /** */ + @JsonProperty("groups") + private Collection groups; + + /** + * A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + * there are no subsequent pages. + */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListGroupsResponse setGroups(Collection groups) { + this.groups = groups; + return this; + } + + public Collection getGroups() { + return groups; + } + + public ListGroupsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListGroupsResponse that = (ListGroupsResponse) o; + return Objects.equals(groups, that.groups) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(groups, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListGroupsResponse.class) + .add("groups", groups) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsProxyRequest.java new file mode 100755 index 000000000..df9b6119e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsProxyRequest.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListServicePrincipalsProxyRequest { + /** The maximum number of SPs to return. The service may return fewer than this value. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the + * subsequent page. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListServicePrincipalsProxyRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListServicePrincipalsProxyRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListServicePrincipalsProxyRequest that = (ListServicePrincipalsProxyRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListServicePrincipalsProxyRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsRequest.java new file mode 100755 index 000000000..ac839e4f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsRequest.java @@ -0,0 +1,67 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListServicePrincipalsRequest { + /** + * The maximum number of service principals to return. The service may return fewer than this + * value. + */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the + * subsequent page. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListServicePrincipalsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListServicePrincipalsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListServicePrincipalsRequest that = (ListServicePrincipalsRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListServicePrincipalsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsResponse.java new file mode 100755 index 000000000..81c7f957f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListServicePrincipalsResponse.java @@ -0,0 +1,65 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** TODO: Write description later when this method is implemented */ +@Generated +public class ListServicePrincipalsResponse { + /** + * A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + * there are no subsequent pages. + */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("service_principals") + private Collection servicePrincipals; + + public ListServicePrincipalsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListServicePrincipalsResponse setServicePrincipals( + Collection servicePrincipals) { + this.servicePrincipals = servicePrincipals; + return this; + } + + public Collection getServicePrincipals() { + return servicePrincipals; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListServicePrincipalsResponse that = (ListServicePrincipalsResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(servicePrincipals, that.servicePrincipals); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, servicePrincipals); + } + + @Override + public String toString() { + return new ToStringer(ListServicePrincipalsResponse.class) + .add("nextPageToken", nextPageToken) + .add("servicePrincipals", servicePrincipals) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersProxyRequest.java new file mode 100755 index 000000000..4f0ba814a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersProxyRequest.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListUsersProxyRequest { + /** The maximum number of users to return. The service may return fewer than this value. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * A page token, received from a previous ListUsers call. Provide this to retrieve the subsequent + * page. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListUsersProxyRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListUsersProxyRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListUsersProxyRequest that = (ListUsersProxyRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListUsersProxyRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersRequest.java new file mode 100755 index 000000000..05ab3394f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersRequest.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListUsersRequest { + /** The maximum number of users to return. The service may return fewer than this value. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * A page token, received from a previous ListUsers call. Provide this to retrieve the subsequent + * page. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListUsersRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListUsersRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListUsersRequest that = (ListUsersRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListUsersRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersResponse.java new file mode 100755 index 000000000..6cd3c5e21 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListUsersResponse.java @@ -0,0 +1,63 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** TODO: Write description later when this method is implemented */ +@Generated +public class ListUsersResponse { + /** + * A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + * there are no subsequent pages. + */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("users") + private Collection users; + + public ListUsersResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListUsersResponse setUsers(Collection users) { + this.users = users; + return this; + } + + public Collection getUsers() { + return users; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListUsersResponse that = (ListUsersResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(users, that.users); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, users); + } + + @Override + public String toString() { + return new ToStringer(ListUsersResponse.class) + .add("nextPageToken", nextPageToken) + .add("users", users) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsLocalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsLocalRequest.java new file mode 100755 index 000000000..8a32d473c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsLocalRequest.java @@ -0,0 +1,67 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListWorkspaceAccessDetailsLocalRequest { + /** + * The maximum number of workspace access details to return. The service may return fewer than + * this value. + */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * A page token, received from a previous ListWorkspaceAccessDetails call. Provide this to + * retrieve the subsequent page. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListWorkspaceAccessDetailsLocalRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListWorkspaceAccessDetailsLocalRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListWorkspaceAccessDetailsLocalRequest that = (ListWorkspaceAccessDetailsLocalRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListWorkspaceAccessDetailsLocalRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsRequest.java new file mode 100755 index 000000000..108638b0e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsRequest.java @@ -0,0 +1,82 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListWorkspaceAccessDetailsRequest { + /** + * The maximum number of workspace access details to return. The service may return fewer than + * this value. + */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * A page token, received from a previous ListWorkspaceAccessDetails call. Provide this to + * retrieve the subsequent page. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** The workspace ID for which the workspace access details are being fetched. */ + @JsonIgnore private Long workspaceId; + + public ListWorkspaceAccessDetailsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListWorkspaceAccessDetailsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListWorkspaceAccessDetailsRequest setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListWorkspaceAccessDetailsRequest that = (ListWorkspaceAccessDetailsRequest) o; + return Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(ListWorkspaceAccessDetailsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsResponse.java new file mode 100755 index 000000000..8853de896 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/ListWorkspaceAccessDetailsResponse.java @@ -0,0 +1,65 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** TODO: Write description later when this method is implemented */ +@Generated +public class ListWorkspaceAccessDetailsResponse { + /** + * A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + * there are no subsequent pages. + */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("workspace_access_details") + private Collection workspaceAccessDetails; + + public ListWorkspaceAccessDetailsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListWorkspaceAccessDetailsResponse setWorkspaceAccessDetails( + Collection workspaceAccessDetails) { + this.workspaceAccessDetails = workspaceAccessDetails; + return this; + } + + public Collection getWorkspaceAccessDetails() { + return workspaceAccessDetails; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListWorkspaceAccessDetailsResponse that = (ListWorkspaceAccessDetailsResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(workspaceAccessDetails, that.workspaceAccessDetails); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, workspaceAccessDetails); + } + + @Override + public String toString() { + return new ToStringer(ListWorkspaceAccessDetailsResponse.class) + .add("nextPageToken", nextPageToken) + .add("workspaceAccessDetails", workspaceAccessDetails) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupProxyRequest.java new file mode 100755 index 000000000..e580f65ae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupProxyRequest.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateGroupProxyRequest { + /** Required. Group to be updated in */ + @JsonProperty("group") + private Group group; + + /** Required. Internal ID of the group in Databricks. */ + @JsonIgnore private Long internalId; + + /** Optional. The list of fields to update. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateGroupProxyRequest setGroup(Group group) { + this.group = group; + return this; + } + + public Group getGroup() { + return group; + } + + public UpdateGroupProxyRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + public UpdateGroupProxyRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateGroupProxyRequest that = (UpdateGroupProxyRequest) o; + return Objects.equals(group, that.group) + && Objects.equals(internalId, that.internalId) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(group, internalId, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateGroupProxyRequest.class) + .add("group", group) + .add("internalId", internalId) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupRequest.java new file mode 100755 index 000000000..b8b0c5850 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateGroupRequest.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateGroupRequest { + /** Required. Group to be updated in */ + @JsonProperty("group") + private Group group; + + /** Required. Internal ID of the group in Databricks. */ + @JsonIgnore private Long internalId; + + /** Optional. The list of fields to update. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateGroupRequest setGroup(Group group) { + this.group = group; + return this; + } + + public Group getGroup() { + return group; + } + + public UpdateGroupRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + public UpdateGroupRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateGroupRequest that = (UpdateGroupRequest) o; + return Objects.equals(group, that.group) + && Objects.equals(internalId, that.internalId) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(group, internalId, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateGroupRequest.class) + .add("group", group) + .add("internalId", internalId) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalProxyRequest.java new file mode 100755 index 000000000..a09930fb3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalProxyRequest.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateServicePrincipalProxyRequest { + /** Required. Internal ID of the service principal in Databricks. */ + @JsonIgnore private Long internalId; + + /** Required. Service principal to be updated in */ + @JsonProperty("service_principal") + private ServicePrincipal servicePrincipal; + + /** Optional. The list of fields to update. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateServicePrincipalProxyRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + public UpdateServicePrincipalProxyRequest setServicePrincipal(ServicePrincipal servicePrincipal) { + this.servicePrincipal = servicePrincipal; + return this; + } + + public ServicePrincipal getServicePrincipal() { + return servicePrincipal; + } + + public UpdateServicePrincipalProxyRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateServicePrincipalProxyRequest that = (UpdateServicePrincipalProxyRequest) o; + return Objects.equals(internalId, that.internalId) + && Objects.equals(servicePrincipal, that.servicePrincipal) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(internalId, servicePrincipal, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateServicePrincipalProxyRequest.class) + .add("internalId", internalId) + .add("servicePrincipal", servicePrincipal) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalRequest.java new file mode 100755 index 000000000..362ac7daa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateServicePrincipalRequest.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateServicePrincipalRequest { + /** Required. Internal ID of the service principal in Databricks. */ + @JsonIgnore private Long internalId; + + /** Required. Service Principal to be updated in */ + @JsonProperty("service_principal") + private ServicePrincipal servicePrincipal; + + /** Optional. The list of fields to update. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateServicePrincipalRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + public UpdateServicePrincipalRequest setServicePrincipal(ServicePrincipal servicePrincipal) { + this.servicePrincipal = servicePrincipal; + return this; + } + + public ServicePrincipal getServicePrincipal() { + return servicePrincipal; + } + + public UpdateServicePrincipalRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateServicePrincipalRequest that = (UpdateServicePrincipalRequest) o; + return Objects.equals(internalId, that.internalId) + && Objects.equals(servicePrincipal, that.servicePrincipal) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(internalId, servicePrincipal, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateServicePrincipalRequest.class) + .add("internalId", internalId) + .add("servicePrincipal", servicePrincipal) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserProxyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserProxyRequest.java new file mode 100755 index 000000000..5526d8149 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserProxyRequest.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateUserProxyRequest { + /** Required. Internal ID of the user in Databricks. */ + @JsonIgnore private Long internalId; + + /** Optional. The list of fields to update. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + /** Required. User to be updated in */ + @JsonProperty("user") + private User user; + + public UpdateUserProxyRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + public UpdateUserProxyRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + public UpdateUserProxyRequest setUser(User user) { + this.user = user; + return this; + } + + public User getUser() { + return user; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateUserProxyRequest that = (UpdateUserProxyRequest) o; + return Objects.equals(internalId, that.internalId) + && Objects.equals(updateMask, that.updateMask) + && Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash(internalId, updateMask, user); + } + + @Override + public String toString() { + return new ToStringer(UpdateUserProxyRequest.class) + .add("internalId", internalId) + .add("updateMask", updateMask) + .add("user", user) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserRequest.java new file mode 100755 index 000000000..4a75d8d0e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateUserRequest.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateUserRequest { + /** Required. Internal ID of the user in Databricks. */ + @JsonIgnore private Long internalId; + + /** Optional. The list of fields to update. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + /** Required. User to be updated in */ + @JsonProperty("user") + private User user; + + public UpdateUserRequest setInternalId(Long internalId) { + this.internalId = internalId; + return this; + } + + public Long getInternalId() { + return internalId; + } + + public UpdateUserRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + public UpdateUserRequest setUser(User user) { + this.user = user; + return this; + } + + public User getUser() { + return user; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateUserRequest that = (UpdateUserRequest) o; + return Objects.equals(internalId, that.internalId) + && Objects.equals(updateMask, that.updateMask) + && Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash(internalId, updateMask, user); + } + + @Override + public String toString() { + return new ToStringer(UpdateUserRequest.class) + .add("internalId", internalId) + .add("updateMask", updateMask) + .add("user", user) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailLocalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailLocalRequest.java new file mode 100755 index 000000000..3d88bd1f1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailLocalRequest.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateWorkspaceAccessDetailLocalRequest { + /** Required. ID of the principal in Databricks. */ + @JsonIgnore private Long principalId; + + /** Optional. The list of fields to update. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + /** Required. WorkspaceAccessDetail to be updated in */ + @JsonProperty("workspace_access_detail") + private WorkspaceAccessDetail workspaceAccessDetail; + + public UpdateWorkspaceAccessDetailLocalRequest setPrincipalId(Long principalId) { + this.principalId = principalId; + return this; + } + + public Long getPrincipalId() { + return principalId; + } + + public UpdateWorkspaceAccessDetailLocalRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + public UpdateWorkspaceAccessDetailLocalRequest setWorkspaceAccessDetail( + WorkspaceAccessDetail workspaceAccessDetail) { + this.workspaceAccessDetail = workspaceAccessDetail; + return this; + } + + public WorkspaceAccessDetail getWorkspaceAccessDetail() { + return workspaceAccessDetail; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateWorkspaceAccessDetailLocalRequest that = (UpdateWorkspaceAccessDetailLocalRequest) o; + return Objects.equals(principalId, that.principalId) + && Objects.equals(updateMask, that.updateMask) + && Objects.equals(workspaceAccessDetail, that.workspaceAccessDetail); + } + + @Override + public int hashCode() { + return Objects.hash(principalId, updateMask, workspaceAccessDetail); + } + + @Override + public String toString() { + return new ToStringer(UpdateWorkspaceAccessDetailLocalRequest.class) + .add("principalId", principalId) + .add("updateMask", updateMask) + .add("workspaceAccessDetail", workspaceAccessDetail) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailRequest.java new file mode 100755 index 000000000..d8f77804b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/UpdateWorkspaceAccessDetailRequest.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iamv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateWorkspaceAccessDetailRequest { + /** Required. ID of the principal in Databricks. */ + @JsonIgnore private Long principalId; + + /** Optional. The list of fields to update. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + /** Required. Workspace access detail to be updated in */ + @JsonProperty("workspace_access_detail") + private WorkspaceAccessDetail workspaceAccessDetail; + + /** Required. The workspace ID for which the workspace access detail is being updated. */ + @JsonIgnore private Long workspaceId; + + public UpdateWorkspaceAccessDetailRequest setPrincipalId(Long principalId) { + this.principalId = principalId; + return this; + } + + public Long getPrincipalId() { + return principalId; + } + + public UpdateWorkspaceAccessDetailRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + public UpdateWorkspaceAccessDetailRequest setWorkspaceAccessDetail( + WorkspaceAccessDetail workspaceAccessDetail) { + this.workspaceAccessDetail = workspaceAccessDetail; + return this; + } + + public WorkspaceAccessDetail getWorkspaceAccessDetail() { + return workspaceAccessDetail; + } + + public UpdateWorkspaceAccessDetailRequest setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateWorkspaceAccessDetailRequest that = (UpdateWorkspaceAccessDetailRequest) o; + return Objects.equals(principalId, that.principalId) + && Objects.equals(updateMask, that.updateMask) + && Objects.equals(workspaceAccessDetail, that.workspaceAccessDetail) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(principalId, updateMask, workspaceAccessDetail, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(UpdateWorkspaceAccessDetailRequest.class) + .add("principalId", principalId) + .add("updateMask", updateMask) + .add("workspaceAccessDetail", workspaceAccessDetail) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2API.java index a9c53bcdf..d5c8f351e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2API.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2API.java @@ -26,6 +26,92 @@ public WorkspaceIamV2API(WorkspaceIamV2Service mock) { impl = mock; } + /** TODO: Write description later when this method is implemented */ + public Group createGroupProxy(CreateGroupProxyRequest request) { + return impl.createGroupProxy(request); + } + + /** TODO: Write description later when this method is implemented */ + public ServicePrincipal createServicePrincipalProxy(CreateServicePrincipalProxyRequest request) { + return impl.createServicePrincipalProxy(request); + } + + /** TODO: Write description later when this method is implemented */ + public User createUserProxy(CreateUserProxyRequest request) { + return impl.createUserProxy(request); + } + + /** TODO: Write description later when this method is implemented */ + public WorkspaceAccessDetail createWorkspaceAccessDetailLocal( + CreateWorkspaceAccessDetailLocalRequest request) { + return impl.createWorkspaceAccessDetailLocal(request); + } + + public void deleteGroupProxy(long internalId) { + deleteGroupProxy(new DeleteGroupProxyRequest().setInternalId(internalId)); + } + + /** TODO: Write description later when this method is implemented */ + public void deleteGroupProxy(DeleteGroupProxyRequest request) { + impl.deleteGroupProxy(request); + } + + public void deleteServicePrincipalProxy(long internalId) { + deleteServicePrincipalProxy(new DeleteServicePrincipalProxyRequest().setInternalId(internalId)); + } + + /** TODO: Write description later when this method is implemented */ + public void deleteServicePrincipalProxy(DeleteServicePrincipalProxyRequest request) { + impl.deleteServicePrincipalProxy(request); + } + + public void deleteUserProxy(long internalId) { + deleteUserProxy(new DeleteUserProxyRequest().setInternalId(internalId)); + } + + /** TODO: Write description later when this method is implemented */ + public void deleteUserProxy(DeleteUserProxyRequest request) { + impl.deleteUserProxy(request); + } + + public void deleteWorkspaceAccessDetailLocal(long principalId) { + deleteWorkspaceAccessDetailLocal( + new DeleteWorkspaceAccessDetailLocalRequest().setPrincipalId(principalId)); + } + + /** TODO: Write description later when this method is implemented */ + public void deleteWorkspaceAccessDetailLocal(DeleteWorkspaceAccessDetailLocalRequest request) { + impl.deleteWorkspaceAccessDetailLocal(request); + } + + public Group getGroupProxy(long internalId) { + return getGroupProxy(new GetGroupProxyRequest().setInternalId(internalId)); + } + + /** TODO: Write description later when this method is implemented */ + public Group getGroupProxy(GetGroupProxyRequest request) { + return impl.getGroupProxy(request); + } + + public ServicePrincipal getServicePrincipalProxy(long internalId) { + return getServicePrincipalProxy( + new GetServicePrincipalProxyRequest().setInternalId(internalId)); + } + + /** TODO: Write description later when this method is implemented */ + public ServicePrincipal getServicePrincipalProxy(GetServicePrincipalProxyRequest request) { + return impl.getServicePrincipalProxy(request); + } + + public User getUserProxy(long internalId) { + return getUserProxy(new GetUserProxyRequest().setInternalId(internalId)); + } + + /** TODO: Write description later when this method is implemented */ + public User getUserProxy(GetUserProxyRequest request) { + return impl.getUserProxy(request); + } + public WorkspaceAccessDetail getWorkspaceAccessDetailLocal(long principalId) { return getWorkspaceAccessDetailLocal( new GetWorkspaceAccessDetailLocalRequest().setPrincipalId(principalId)); @@ -43,6 +129,28 @@ public WorkspaceAccessDetail getWorkspaceAccessDetailLocal( return impl.getWorkspaceAccessDetailLocal(request); } + /** TODO: Write description later when this method is implemented */ + public ListGroupsResponse listGroupsProxy(ListGroupsProxyRequest request) { + return impl.listGroupsProxy(request); + } + + /** TODO: Write description later when this method is implemented */ + public ListServicePrincipalsResponse listServicePrincipalsProxy( + ListServicePrincipalsProxyRequest request) { + return impl.listServicePrincipalsProxy(request); + } + + /** TODO: Write description later when this method is implemented */ + public ListUsersResponse listUsersProxy(ListUsersProxyRequest request) { + return impl.listUsersProxy(request); + } + + /** TODO: Write description later when this method is implemented */ + public ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetailsLocal( + ListWorkspaceAccessDetailsLocalRequest request) { + return impl.listWorkspaceAccessDetailsLocal(request); + } + /** * Resolves a group with the given external ID from the customer's IdP. If the group does not * exist, it will be created in the account. If the customer is not onboarded onto Automatic @@ -71,6 +179,27 @@ public ResolveUserResponse resolveUserProxy(ResolveUserProxyRequest request) { return impl.resolveUserProxy(request); } + /** TODO: Write description later when this method is implemented */ + public Group updateGroupProxy(UpdateGroupProxyRequest request) { + return impl.updateGroupProxy(request); + } + + /** TODO: Write description later when this method is implemented */ + public ServicePrincipal updateServicePrincipalProxy(UpdateServicePrincipalProxyRequest request) { + return impl.updateServicePrincipalProxy(request); + } + + /** TODO: Write description later when this method is implemented */ + public User updateUserProxy(UpdateUserProxyRequest request) { + return impl.updateUserProxy(request); + } + + /** TODO: Write description later when this method is implemented */ + public WorkspaceAccessDetail updateWorkspaceAccessDetailLocal( + UpdateWorkspaceAccessDetailLocalRequest request) { + return impl.updateWorkspaceAccessDetailLocal(request); + } + public WorkspaceIamV2Service impl() { return impl; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Impl.java index 91f88bc33..1903e2849 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Impl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Impl.java @@ -16,6 +16,156 @@ public WorkspaceIamV2Impl(ApiClient apiClient) { this.apiClient = apiClient; } + @Override + public Group createGroupProxy(CreateGroupProxyRequest request) { + String path = "/api/2.0/identity/groups"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getGroup())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Group.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ServicePrincipal createServicePrincipalProxy(CreateServicePrincipalProxyRequest request) { + String path = "/api/2.0/identity/servicePrincipals"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getServicePrincipal())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ServicePrincipal.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public User createUserProxy(CreateUserProxyRequest request) { + String path = "/api/2.0/identity/users"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getUser())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, User.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public WorkspaceAccessDetail createWorkspaceAccessDetailLocal( + CreateWorkspaceAccessDetailLocalRequest request) { + String path = "/api/2.0/identity/workspaceAccessDetails"; + try { + Request req = + new Request("POST", path, apiClient.serialize(request.getWorkspaceAccessDetail())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, WorkspaceAccessDetail.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteGroupProxy(DeleteGroupProxyRequest request) { + String path = String.format("/api/2.0/identity/groups/%s", request.getInternalId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteServicePrincipalProxy(DeleteServicePrincipalProxyRequest request) { + String path = String.format("/api/2.0/identity/servicePrincipals/%s", request.getInternalId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteUserProxy(DeleteUserProxyRequest request) { + String path = String.format("/api/2.0/identity/users/%s", request.getInternalId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteWorkspaceAccessDetailLocal(DeleteWorkspaceAccessDetailLocalRequest request) { + String path = + String.format("/api/2.0/identity/workspaceAccessDetails/%s", request.getPrincipalId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Group getGroupProxy(GetGroupProxyRequest request) { + String path = String.format("/api/2.0/identity/groups/%s", request.getInternalId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Group.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ServicePrincipal getServicePrincipalProxy(GetServicePrincipalProxyRequest request) { + String path = String.format("/api/2.0/identity/servicePrincipals/%s", request.getInternalId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ServicePrincipal.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public User getUserProxy(GetUserProxyRequest request) { + String path = String.format("/api/2.0/identity/users/%s", request.getInternalId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, User.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public WorkspaceAccessDetail getWorkspaceAccessDetailLocal( GetWorkspaceAccessDetailLocalRequest request) { @@ -31,6 +181,60 @@ public WorkspaceAccessDetail getWorkspaceAccessDetailLocal( } } + @Override + public ListGroupsResponse listGroupsProxy(ListGroupsProxyRequest request) { + String path = "/api/2.0/identity/groups"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListGroupsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListServicePrincipalsResponse listServicePrincipalsProxy( + ListServicePrincipalsProxyRequest request) { + String path = "/api/2.0/identity/servicePrincipals"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListServicePrincipalsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListUsersResponse listUsersProxy(ListUsersProxyRequest request) { + String path = "/api/2.0/identity/users"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListUsersResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetailsLocal( + ListWorkspaceAccessDetailsLocalRequest request) { + String path = "/api/2.0/identity/workspaceAccessDetails"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListWorkspaceAccessDetailsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public ResolveGroupResponse resolveGroupProxy(ResolveGroupProxyRequest request) { String path = "/api/2.0/identity/groups/resolveByExternalId"; @@ -73,4 +277,63 @@ public ResolveUserResponse resolveUserProxy(ResolveUserProxyRequest request) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } + + @Override + public Group updateGroupProxy(UpdateGroupProxyRequest request) { + String path = String.format("/api/2.0/identity/groups/%s", request.getInternalId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getGroup())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Group.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ServicePrincipal updateServicePrincipalProxy(UpdateServicePrincipalProxyRequest request) { + String path = String.format("/api/2.0/identity/servicePrincipals/%s", request.getInternalId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getServicePrincipal())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ServicePrincipal.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public User updateUserProxy(UpdateUserProxyRequest request) { + String path = String.format("/api/2.0/identity/users/%s", request.getInternalId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getUser())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, User.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public WorkspaceAccessDetail updateWorkspaceAccessDetailLocal( + UpdateWorkspaceAccessDetailLocalRequest request) { + String path = + String.format("/api/2.0/identity/workspaceAccessDetails/%s", request.getPrincipalId()); + try { + Request req = + new Request("PATCH", path, apiClient.serialize(request.getWorkspaceAccessDetail())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, WorkspaceAccessDetail.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Service.java index f20471423..8614cc8d4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Service.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iamv2/WorkspaceIamV2Service.java @@ -13,6 +13,44 @@ */ @Generated public interface WorkspaceIamV2Service { + /** TODO: Write description later when this method is implemented */ + Group createGroupProxy(CreateGroupProxyRequest createGroupProxyRequest); + + /** TODO: Write description later when this method is implemented */ + ServicePrincipal createServicePrincipalProxy( + CreateServicePrincipalProxyRequest createServicePrincipalProxyRequest); + + /** TODO: Write description later when this method is implemented */ + User createUserProxy(CreateUserProxyRequest createUserProxyRequest); + + /** TODO: Write description later when this method is implemented */ + WorkspaceAccessDetail createWorkspaceAccessDetailLocal( + CreateWorkspaceAccessDetailLocalRequest createWorkspaceAccessDetailLocalRequest); + + /** TODO: Write description later when this method is implemented */ + void deleteGroupProxy(DeleteGroupProxyRequest deleteGroupProxyRequest); + + /** TODO: Write description later when this method is implemented */ + void deleteServicePrincipalProxy( + DeleteServicePrincipalProxyRequest deleteServicePrincipalProxyRequest); + + /** TODO: Write description later when this method is implemented */ + void deleteUserProxy(DeleteUserProxyRequest deleteUserProxyRequest); + + /** TODO: Write description later when this method is implemented */ + void deleteWorkspaceAccessDetailLocal( + DeleteWorkspaceAccessDetailLocalRequest deleteWorkspaceAccessDetailLocalRequest); + + /** TODO: Write description later when this method is implemented */ + Group getGroupProxy(GetGroupProxyRequest getGroupProxyRequest); + + /** TODO: Write description later when this method is implemented */ + ServicePrincipal getServicePrincipalProxy( + GetServicePrincipalProxyRequest getServicePrincipalProxyRequest); + + /** TODO: Write description later when this method is implemented */ + User getUserProxy(GetUserProxyRequest getUserProxyRequest); + /** * Returns the access details for a principal in the current workspace. Allows for checking access * details for any provisioned principal (user, service principal, or group) in the current @@ -23,6 +61,20 @@ public interface WorkspaceIamV2Service { WorkspaceAccessDetail getWorkspaceAccessDetailLocal( GetWorkspaceAccessDetailLocalRequest getWorkspaceAccessDetailLocalRequest); + /** TODO: Write description later when this method is implemented */ + ListGroupsResponse listGroupsProxy(ListGroupsProxyRequest listGroupsProxyRequest); + + /** TODO: Write description later when this method is implemented */ + ListServicePrincipalsResponse listServicePrincipalsProxy( + ListServicePrincipalsProxyRequest listServicePrincipalsProxyRequest); + + /** TODO: Write description later when this method is implemented */ + ListUsersResponse listUsersProxy(ListUsersProxyRequest listUsersProxyRequest); + + /** TODO: Write description later when this method is implemented */ + ListWorkspaceAccessDetailsResponse listWorkspaceAccessDetailsLocal( + ListWorkspaceAccessDetailsLocalRequest listWorkspaceAccessDetailsLocalRequest); + /** * Resolves a group with the given external ID from the customer's IdP. If the group does not * exist, it will be created in the account. If the customer is not onboarded onto Automatic @@ -44,4 +96,18 @@ ResolveServicePrincipalResponse resolveServicePrincipalProxy( * this will return an error. */ ResolveUserResponse resolveUserProxy(ResolveUserProxyRequest resolveUserProxyRequest); + + /** TODO: Write description later when this method is implemented */ + Group updateGroupProxy(UpdateGroupProxyRequest updateGroupProxyRequest); + + /** TODO: Write description later when this method is implemented */ + ServicePrincipal updateServicePrincipalProxy( + UpdateServicePrincipalProxyRequest updateServicePrincipalProxyRequest); + + /** TODO: Write description later when this method is implemented */ + User updateUserProxy(UpdateUserProxyRequest updateUserProxyRequest); + + /** TODO: Write description later when this method is implemented */ + WorkspaceAccessDetail updateWorkspaceAccessDetailLocal( + UpdateWorkspaceAccessDetailLocalRequest updateWorkspaceAccessDetailLocalRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java index a78f27377..08b5cf207 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java @@ -47,6 +47,14 @@ public class BaseJob { @JsonProperty("job_id") private Long jobId; + /** + * Path of the job object in workspace file tree, including file extension. If absent, the job + * doesn't have a workspace object. Example: + * /Workspace/user@example.com/my_project/my_job.job.json + */ + @JsonProperty("path") + private String path; + /** * Settings for this job and all of its runs. These settings can be updated using the `resetJob` * method. @@ -112,6 +120,15 @@ public Long getJobId() { return jobId; } + public BaseJob setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + public BaseJob setSettings(JobSettings settings) { this.settings = settings; return this; @@ -141,6 +158,7 @@ public boolean equals(Object o) { && Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId) && Objects.equals(hasMore, that.hasMore) && Objects.equals(jobId, that.jobId) + && Objects.equals(path, that.path) && Objects.equals(settings, that.settings) && Objects.equals(triggerState, that.triggerState); } @@ -154,6 +172,7 @@ public int hashCode() { effectiveUsagePolicyId, hasMore, jobId, + path, settings, triggerState); } @@ -167,6 +186,7 @@ public String toString() { .add("effectiveUsagePolicyId", effectiveUsagePolicyId) .add("hasMore", hasMore) .add("jobId", jobId) + .add("path", path) .add("settings", settings) .add("triggerState", triggerState) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java index c38a239bd..adf3d7ab6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java @@ -127,6 +127,13 @@ public class CreateJob { @JsonProperty("parameters") private Collection parameters; + /** + * Path of the job parent folder in workspace file tree. If absent, the job doesn't have a + * workspace object. + */ + @JsonProperty("parent_path") + private String parentPath; + /** * The performance mode on a serverless job. This field determines the level of compute * performance or cost-efficiency for the run. @@ -343,6 +350,15 @@ public Collection getParameters() { return parameters; } + public CreateJob setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + public CreateJob setPerformanceTarget(PerformanceTarget performanceTarget) { this.performanceTarget = performanceTarget; return this; @@ -454,6 +470,7 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(notificationSettings, that.notificationSettings) && Objects.equals(parameters, that.parameters) + && Objects.equals(parentPath, that.parentPath) && Objects.equals(performanceTarget, that.performanceTarget) && Objects.equals(queue, that.queue) && Objects.equals(runAs, that.runAs) @@ -485,6 +502,7 @@ public int hashCode() { name, notificationSettings, parameters, + parentPath, performanceTarget, queue, runAs, @@ -516,6 +534,7 @@ public String toString() { .add("name", name) .add("notificationSettings", notificationSettings) .add("parameters", parameters) + .add("parentPath", parentPath) .add("performanceTarget", performanceTarget) .add("queue", queue) .add("runAs", runAs) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java index 3b2a3d50b..f270de7e0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java @@ -52,6 +52,14 @@ public class Job { @JsonProperty("next_page_token") private String nextPageToken; + /** + * Path of the job object in workspace file tree, including file extension. If absent, the job + * doesn't have a workspace object. Example: + * /Workspace/user@example.com/my_project/my_job.job.json + */ + @JsonProperty("path") + private String path; + /** * The email of an active workspace user or the application ID of a service principal that the job * runs as. This value can be changed by setting the `run_as` field when creating or updating a @@ -138,6 +146,15 @@ public String getNextPageToken() { return nextPageToken; } + public Job setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + public Job setRunAsUserName(String runAsUserName) { this.runAsUserName = runAsUserName; return this; @@ -177,6 +194,7 @@ public boolean equals(Object o) { && Objects.equals(hasMore, that.hasMore) && Objects.equals(jobId, that.jobId) && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(path, that.path) && Objects.equals(runAsUserName, that.runAsUserName) && Objects.equals(settings, that.settings) && Objects.equals(triggerState, that.triggerState); @@ -192,6 +210,7 @@ public int hashCode() { hasMore, jobId, nextPageToken, + path, runAsUserName, settings, triggerState); @@ -207,6 +226,7 @@ public String toString() { .add("hasMore", hasMore) .add("jobId", jobId) .add("nextPageToken", nextPageToken) + .add("path", path) .add("runAsUserName", runAsUserName) .add("settings", settings) .add("triggerState", triggerState) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java index d0d2ad7ee..91d8b5de6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java @@ -123,6 +123,13 @@ public class JobSettings { @JsonProperty("parameters") private Collection parameters; + /** + * Path of the job parent folder in workspace file tree. If absent, the job doesn't have a + * workspace object. + */ + @JsonProperty("parent_path") + private String parentPath; + /** * The performance mode on a serverless job. This field determines the level of compute * performance or cost-efficiency for the run. @@ -330,6 +337,15 @@ public Collection getParameters() { return parameters; } + public JobSettings setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + public JobSettings setPerformanceTarget(PerformanceTarget performanceTarget) { this.performanceTarget = performanceTarget; return this; @@ -440,6 +456,7 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(notificationSettings, that.notificationSettings) && Objects.equals(parameters, that.parameters) + && Objects.equals(parentPath, that.parentPath) && Objects.equals(performanceTarget, that.performanceTarget) && Objects.equals(queue, that.queue) && Objects.equals(runAs, that.runAs) @@ -470,6 +487,7 @@ public int hashCode() { name, notificationSettings, parameters, + parentPath, performanceTarget, queue, runAs, @@ -500,6 +518,7 @@ public String toString() { .add("name", name) .add("notificationSettings", notificationSettings) .add("parameters", parameters) + .add("parentPath", parentPath) .add("performanceTarget", performanceTarget) .add("queue", queue) .add("runAs", runAs) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java new file mode 100755 index 000000000..36452e42e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java @@ -0,0 +1,125 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ModelTriggerConfiguration { + /** + * Aliases of the model versions to monitor. Can only be used in conjunction with condition + * MODEL_ALIAS_SET. + */ + @JsonProperty("aliases") + private Collection aliases; + + /** The condition based on which to trigger a job run. */ + @JsonProperty("condition") + private ModelTriggerConfigurationCondition condition; + + /** + * If set, the trigger starts a run only after the specified amount of time has passed since the + * last time the trigger fired. The minimum allowed value is 60 seconds. + */ + @JsonProperty("min_time_between_triggers_seconds") + private Long minTimeBetweenTriggersSeconds; + + /** + * Name of the securable to monitor ("mycatalog.myschema.mymodel" in the case of model-level + * triggers, "mycatalog.myschema" in the case of schema-level triggers) or empty in the case of + * metastore-level triggers. + */ + @JsonProperty("securable_name") + private String securableName; + + /** + * If set, the trigger starts a run only after no model updates have occurred for the specified + * time and can be used to wait for a series of model updates before triggering a run. The minimum + * allowed value is 60 seconds. + */ + @JsonProperty("wait_after_last_change_seconds") + private Long waitAfterLastChangeSeconds; + + public ModelTriggerConfiguration setAliases(Collection aliases) { + this.aliases = aliases; + return this; + } + + public Collection getAliases() { + return aliases; + } + + public ModelTriggerConfiguration setCondition(ModelTriggerConfigurationCondition condition) { + this.condition = condition; + return this; + } + + public ModelTriggerConfigurationCondition getCondition() { + return condition; + } + + public ModelTriggerConfiguration setMinTimeBetweenTriggersSeconds( + Long minTimeBetweenTriggersSeconds) { + this.minTimeBetweenTriggersSeconds = minTimeBetweenTriggersSeconds; + return this; + } + + public Long getMinTimeBetweenTriggersSeconds() { + return minTimeBetweenTriggersSeconds; + } + + public ModelTriggerConfiguration setSecurableName(String securableName) { + this.securableName = securableName; + return this; + } + + public String getSecurableName() { + return securableName; + } + + public ModelTriggerConfiguration setWaitAfterLastChangeSeconds(Long waitAfterLastChangeSeconds) { + this.waitAfterLastChangeSeconds = waitAfterLastChangeSeconds; + return this; + } + + public Long getWaitAfterLastChangeSeconds() { + return waitAfterLastChangeSeconds; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelTriggerConfiguration that = (ModelTriggerConfiguration) o; + return Objects.equals(aliases, that.aliases) + && Objects.equals(condition, that.condition) + && Objects.equals(minTimeBetweenTriggersSeconds, that.minTimeBetweenTriggersSeconds) + && Objects.equals(securableName, that.securableName) + && Objects.equals(waitAfterLastChangeSeconds, that.waitAfterLastChangeSeconds); + } + + @Override + public int hashCode() { + return Objects.hash( + aliases, + condition, + minTimeBetweenTriggersSeconds, + securableName, + waitAfterLastChangeSeconds); + } + + @Override + public String toString() { + return new ToStringer(ModelTriggerConfiguration.class) + .add("aliases", aliases) + .add("condition", condition) + .add("minTimeBetweenTriggersSeconds", minTimeBetweenTriggersSeconds) + .add("securableName", securableName) + .add("waitAfterLastChangeSeconds", waitAfterLastChangeSeconds) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java new file mode 100755 index 000000000..263b649fa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum ModelTriggerConfigurationCondition { + MODEL_ALIAS_SET, + MODEL_CREATED, + MODEL_VERSION_READY, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java index c68229720..66e16b5c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java @@ -84,6 +84,13 @@ public class RunTask { @JsonProperty("description") private String description; + /** + * An optional flag to disable the task. If set to true, the task will not run even if it is part + * of a job. + */ + @JsonProperty("disabled") + private Boolean disabled; + /** * The actual performance target used by the serverless run during execution. This can differ from * the client-set performance target on the request depending on whether the performance mode is @@ -403,6 +410,15 @@ public String getDescription() { return description; } + public RunTask setDisabled(Boolean disabled) { + this.disabled = disabled; + return this; + } + + public Boolean getDisabled() { + return disabled; + } + public RunTask setEffectivePerformanceTarget(PerformanceTarget effectivePerformanceTarget) { this.effectivePerformanceTarget = effectivePerformanceTarget; return this; @@ -734,6 +750,7 @@ public boolean equals(Object o) { && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) + && Objects.equals(disabled, that.disabled) && Objects.equals(effectivePerformanceTarget, that.effectivePerformanceTarget) && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(endTime, that.endTime) @@ -785,6 +802,7 @@ public int hashCode() { dbtTask, dependsOn, description, + disabled, effectivePerformanceTarget, emailNotifications, endTime, @@ -836,6 +854,7 @@ public String toString() { .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) + .add("disabled", disabled) .add("effectivePerformanceTarget", effectivePerformanceTarget) .add("emailNotifications", emailNotifications) .add("endTime", endTime) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index 2ca6b1107..d5510335a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -57,6 +57,13 @@ public class SubmitTask { @JsonProperty("description") private String description; + /** + * An optional flag to disable the task. If set to true, the task will not run even if it is part + * of a job. + */ + @JsonProperty("disabled") + private Boolean disabled; + /** * An optional set of email addresses notified when the task run begins or completes. The default * behavior is to not send any emails. @@ -260,6 +267,15 @@ public String getDescription() { return description; } + public SubmitTask setDisabled(Boolean disabled) { + this.disabled = disabled; + return this; + } + + public Boolean getDisabled() { + return disabled; + } + public SubmitTask setEmailNotifications(JobEmailNotifications emailNotifications) { this.emailNotifications = emailNotifications; return this; @@ -471,6 +487,7 @@ public boolean equals(Object o) { && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) + && Objects.equals(disabled, that.disabled) && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(environmentKey, that.environmentKey) && Objects.equals(existingClusterId, that.existingClusterId) @@ -506,6 +523,7 @@ public int hashCode() { dbtTask, dependsOn, description, + disabled, emailNotifications, environmentKey, existingClusterId, @@ -541,6 +559,7 @@ public String toString() { .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) + .add("disabled", disabled) .add("emailNotifications", emailNotifications) .add("environmentKey", environmentKey) .add("existingClusterId", existingClusterId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java index db8989c2e..999c1128d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java @@ -22,7 +22,7 @@ public class TableUpdateTriggerConfiguration { private Long minTimeBetweenTriggersSeconds; /** - * A list of Delta tables to monitor for changes. The table name must be in the format + * A list of tables to monitor for changes. The table name must be in the format * `catalog_name.schema_name.table_name`. */ @JsonProperty("table_names") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java index 7ee1fe4b1..0c5217593 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java @@ -13,6 +13,10 @@ public class TriggerSettings { @JsonProperty("file_arrival") private FileArrivalTriggerConfiguration fileArrival; + /** */ + @JsonProperty("model") + private ModelTriggerConfiguration model; + /** Whether this trigger is paused or not. */ @JsonProperty("pause_status") private PauseStatus pauseStatus; @@ -38,6 +42,15 @@ public FileArrivalTriggerConfiguration getFileArrival() { return fileArrival; } + public TriggerSettings setModel(ModelTriggerConfiguration model) { + this.model = model; + return this; + } + + public ModelTriggerConfiguration getModel() { + return model; + } + public TriggerSettings setPauseStatus(PauseStatus pauseStatus) { this.pauseStatus = pauseStatus; return this; @@ -80,6 +93,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; TriggerSettings that = (TriggerSettings) o; return Objects.equals(fileArrival, that.fileArrival) + && Objects.equals(model, that.model) && Objects.equals(pauseStatus, that.pauseStatus) && Objects.equals(periodic, that.periodic) && Objects.equals(table, that.table) @@ -88,13 +102,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(fileArrival, pauseStatus, periodic, table, tableUpdate); + return Objects.hash(fileArrival, model, pauseStatus, periodic, table, tableUpdate); } @Override public String toString() { return new ToStringer(TriggerSettings.class) .add("fileArrival", fileArrival) + .add("model", model) .add("pauseStatus", pauseStatus) .add("periodic", periodic) .add("table", table) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java index cc7583362..6e734b77e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java @@ -9,6 +9,7 @@ public enum AssetType { ASSET_TYPE_APP, ASSET_TYPE_DATA_TABLE, ASSET_TYPE_GIT_REPO, + ASSET_TYPE_MCP, ASSET_TYPE_MEDIA, ASSET_TYPE_MODEL, ASSET_TYPE_NOTEBOOK, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java index a3080894f..09bbcbe2c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java @@ -164,6 +164,11 @@ public GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest request) { return impl.getLoggedModel(request); } + /** Batch endpoint for getting logged models from a list of model IDs */ + public GetLoggedModelsRequestResponse getLoggedModels(GetLoggedModelsRequest request) { + return impl.getLoggedModels(request); + } + public GetExperimentPermissionLevelsResponse getPermissionLevels(String experimentId) { return getPermissionLevels( new GetExperimentPermissionLevelsRequest().setExperimentId(experimentId)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java index 0d39a660b..d3b844e34 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java @@ -208,6 +208,19 @@ public GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest request) { } } + @Override + public GetLoggedModelsRequestResponse getLoggedModels(GetLoggedModelsRequest request) { + String path = "/api/2.0/mlflow/logged-models:batchGet"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetLoggedModelsRequestResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GetExperimentPermissionLevelsResponse getPermissionLevels( GetExperimentPermissionLevelsRequest request) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java index 486c97d62..a2eba9962 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java @@ -89,6 +89,9 @@ FinalizeLoggedModelResponse finalizeLoggedModel( /** Get a logged model. */ GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest getLoggedModelRequest); + /** Batch endpoint for getting logged models from a list of model IDs */ + GetLoggedModelsRequestResponse getLoggedModels(GetLoggedModelsRequest getLoggedModelsRequest); + /** Gets the permission levels that a user can have on an object. */ GetExperimentPermissionLevelsResponse getPermissionLevels( GetExperimentPermissionLevelsRequest getExperimentPermissionLevelsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequest.java new file mode 100755 index 000000000..c71186bd0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequest.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class GetLoggedModelsRequest { + /** The IDs of the logged models to retrieve. Max threshold is 100. */ + @JsonIgnore + @QueryParam("model_ids") + private Collection modelIds; + + public GetLoggedModelsRequest setModelIds(Collection modelIds) { + this.modelIds = modelIds; + return this; + } + + public Collection getModelIds() { + return modelIds; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLoggedModelsRequest that = (GetLoggedModelsRequest) o; + return Objects.equals(modelIds, that.modelIds); + } + + @Override + public int hashCode() { + return Objects.hash(modelIds); + } + + @Override + public String toString() { + return new ToStringer(GetLoggedModelsRequest.class).add("modelIds", modelIds).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequestResponse.java new file mode 100755 index 000000000..a5469473e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequestResponse.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class GetLoggedModelsRequestResponse { + /** The retrieved logged models. */ + @JsonProperty("models") + private Collection models; + + public GetLoggedModelsRequestResponse setModels(Collection models) { + this.models = models; + return this; + } + + public Collection getModels() { + return models; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLoggedModelsRequestResponse that = (GetLoggedModelsRequestResponse) o; + return Objects.equals(models, that.models); + } + + @Override + public int hashCode() { + return Objects.hash(models); + } + + @Override + public String toString() { + return new ToStringer(GetLoggedModelsRequestResponse.class).add("models", models).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java index ac3b6fd36..74495054d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java @@ -9,14 +9,32 @@ @Generated public class TokenAccessPolicy { + /** absolute OAuth session TTL in minutes when single-use refresh tokens are enabled */ + @JsonProperty("absolute_session_lifetime_in_minutes") + private Long absoluteSessionLifetimeInMinutes; + /** access token time to live in minutes */ @JsonProperty("access_token_ttl_in_minutes") private Long accessTokenTtlInMinutes; + /** whether to enable single-use refresh tokens */ + @JsonProperty("enable_single_use_refresh_tokens") + private Boolean enableSingleUseRefreshTokens; + /** refresh token time to live in minutes */ @JsonProperty("refresh_token_ttl_in_minutes") private Long refreshTokenTtlInMinutes; + public TokenAccessPolicy setAbsoluteSessionLifetimeInMinutes( + Long absoluteSessionLifetimeInMinutes) { + this.absoluteSessionLifetimeInMinutes = absoluteSessionLifetimeInMinutes; + return this; + } + + public Long getAbsoluteSessionLifetimeInMinutes() { + return absoluteSessionLifetimeInMinutes; + } + public TokenAccessPolicy setAccessTokenTtlInMinutes(Long accessTokenTtlInMinutes) { this.accessTokenTtlInMinutes = accessTokenTtlInMinutes; return this; @@ -26,6 +44,15 @@ public Long getAccessTokenTtlInMinutes() { return accessTokenTtlInMinutes; } + public TokenAccessPolicy setEnableSingleUseRefreshTokens(Boolean enableSingleUseRefreshTokens) { + this.enableSingleUseRefreshTokens = enableSingleUseRefreshTokens; + return this; + } + + public Boolean getEnableSingleUseRefreshTokens() { + return enableSingleUseRefreshTokens; + } + public TokenAccessPolicy setRefreshTokenTtlInMinutes(Long refreshTokenTtlInMinutes) { this.refreshTokenTtlInMinutes = refreshTokenTtlInMinutes; return this; @@ -40,19 +67,27 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TokenAccessPolicy that = (TokenAccessPolicy) o; - return Objects.equals(accessTokenTtlInMinutes, that.accessTokenTtlInMinutes) + return Objects.equals(absoluteSessionLifetimeInMinutes, that.absoluteSessionLifetimeInMinutes) + && Objects.equals(accessTokenTtlInMinutes, that.accessTokenTtlInMinutes) + && Objects.equals(enableSingleUseRefreshTokens, that.enableSingleUseRefreshTokens) && Objects.equals(refreshTokenTtlInMinutes, that.refreshTokenTtlInMinutes); } @Override public int hashCode() { - return Objects.hash(accessTokenTtlInMinutes, refreshTokenTtlInMinutes); + return Objects.hash( + absoluteSessionLifetimeInMinutes, + accessTokenTtlInMinutes, + enableSingleUseRefreshTokens, + refreshTokenTtlInMinutes); } @Override public String toString() { return new ToStringer(TokenAccessPolicy.class) + .add("absoluteSessionLifetimeInMinutes", absoluteSessionLifetimeInMinutes) .add("accessTokenTtlInMinutes", accessTokenTtlInMinutes) + .add("enableSingleUseRefreshTokens", enableSingleUseRefreshTokens) .add("refreshTokenTtlInMinutes", refreshTokenTtlInMinutes) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequest.java new file mode 100755 index 000000000..00ce73ecb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ApplyEnvironmentRequest { + /** */ + @JsonIgnore private String pipelineId; + + public ApplyEnvironmentRequest setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ApplyEnvironmentRequest that = (ApplyEnvironmentRequest) o; + return Objects.equals(pipelineId, that.pipelineId); + } + + @Override + public int hashCode() { + return Objects.hash(pipelineId); + } + + @Override + public String toString() { + return new ToStringer(ApplyEnvironmentRequest.class).add("pipelineId", pipelineId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequestResponse.java new file mode 100755 index 000000000..9dd9d8c06 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequestResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class ApplyEnvironmentRequestResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(ApplyEnvironmentRequestResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java new file mode 100755 index 000000000..729e23fae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java @@ -0,0 +1,48 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ConnectionParameters { + /** + * Source catalog for initial connection. This is necessary for schema exploration in some + * database systems like Oracle, and optional but nice-to-have in some other database systems like + * Postgres. For Oracle databases, this maps to a service name. + */ + @JsonProperty("source_catalog") + private String sourceCatalog; + + public ConnectionParameters setSourceCatalog(String sourceCatalog) { + this.sourceCatalog = sourceCatalog; + return this; + } + + public String getSourceCatalog() { + return sourceCatalog; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConnectionParameters that = (ConnectionParameters) o; + return Objects.equals(sourceCatalog, that.sourceCatalog); + } + + @Override + public int hashCode() { + return Objects.hash(sourceCatalog); + } + + @Override + public String toString() { + return new ToStringer(ConnectionParameters.class) + .add("sourceCatalog", sourceCatalog) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java index 167282b32..9d6fdfd31 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java @@ -151,6 +151,10 @@ public class CreatePipeline { @JsonProperty("trigger") private PipelineTrigger trigger; + /** Usage policy of this pipeline. */ + @JsonProperty("usage_policy_id") + private String usagePolicyId; + public CreatePipeline setAllowDuplicateNames(Boolean allowDuplicateNames) { this.allowDuplicateNames = allowDuplicateNames; return this; @@ -421,6 +425,15 @@ public PipelineTrigger getTrigger() { return trigger; } + public CreatePipeline setUsagePolicyId(String usagePolicyId) { + this.usagePolicyId = usagePolicyId; + return this; + } + + public String getUsagePolicyId() { + return usagePolicyId; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -455,7 +468,8 @@ public boolean equals(Object o) { && Objects.equals(storage, that.storage) && Objects.equals(tags, that.tags) && Objects.equals(target, that.target) - && Objects.equals(trigger, that.trigger); + && Objects.equals(trigger, that.trigger) + && Objects.equals(usagePolicyId, that.usagePolicyId); } @Override @@ -490,7 +504,8 @@ public int hashCode() { storage, tags, target, - trigger); + trigger, + usagePolicyId); } @Override @@ -526,6 +541,7 @@ public String toString() { .add("tags", tags) .add("target", target) .add("trigger", trigger) + .add("usagePolicyId", usagePolicyId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java index 5b8e3c4f2..6cc967ec2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java @@ -160,6 +160,10 @@ public class EditPipeline { @JsonProperty("trigger") private PipelineTrigger trigger; + /** Usage policy of this pipeline. */ + @JsonProperty("usage_policy_id") + private String usagePolicyId; + public EditPipeline setAllowDuplicateNames(Boolean allowDuplicateNames) { this.allowDuplicateNames = allowDuplicateNames; return this; @@ -439,6 +443,15 @@ public PipelineTrigger getTrigger() { return trigger; } + public EditPipeline setUsagePolicyId(String usagePolicyId) { + this.usagePolicyId = usagePolicyId; + return this; + } + + public String getUsagePolicyId() { + return usagePolicyId; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -474,7 +487,8 @@ public boolean equals(Object o) { && Objects.equals(storage, that.storage) && Objects.equals(tags, that.tags) && Objects.equals(target, that.target) - && Objects.equals(trigger, that.trigger); + && Objects.equals(trigger, that.trigger) + && Objects.equals(usagePolicyId, that.usagePolicyId); } @Override @@ -510,7 +524,8 @@ public int hashCode() { storage, tags, target, - trigger); + trigger, + usagePolicyId); } @Override @@ -547,6 +562,7 @@ public String toString() { .add("tags", tags) .add("target", target) .add("trigger", trigger) + .add("usagePolicyId", usagePolicyId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java index ff158fa65..1c77c1b2c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java @@ -26,6 +26,10 @@ public class GetPipelineResponse { @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; + /** Serverless usage policy ID of the pipeline. */ + @JsonProperty("effective_usage_policy_id") + private String effectiveUsagePolicyId; + /** The health of a pipeline. */ @JsonProperty("health") private GetPipelineResponseHealth health; @@ -102,6 +106,15 @@ public String getEffectiveBudgetPolicyId() { return effectiveBudgetPolicyId; } + public GetPipelineResponse setEffectiveUsagePolicyId(String effectiveUsagePolicyId) { + this.effectiveUsagePolicyId = effectiveUsagePolicyId; + return this; + } + + public String getEffectiveUsagePolicyId() { + return effectiveUsagePolicyId; + } + public GetPipelineResponse setHealth(GetPipelineResponseHealth health) { this.health = health; return this; @@ -192,6 +205,7 @@ public boolean equals(Object o) { && Objects.equals(clusterId, that.clusterId) && Objects.equals(creatorUserName, that.creatorUserName) && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId) && Objects.equals(health, that.health) && Objects.equals(lastModified, that.lastModified) && Objects.equals(latestUpdates, that.latestUpdates) @@ -210,6 +224,7 @@ public int hashCode() { clusterId, creatorUserName, effectiveBudgetPolicyId, + effectiveUsagePolicyId, health, lastModified, latestUpdates, @@ -228,6 +243,7 @@ public String toString() { .add("clusterId", clusterId) .add("creatorUserName", creatorUserName) .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("effectiveUsagePolicyId", effectiveUsagePolicyId) .add("health", health) .add("lastModified", lastModified) .add("latestUpdates", latestUpdates) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java index 58142fafd..4361c4480 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java @@ -23,6 +23,10 @@ public class IngestionGatewayPipelineDefinition { @JsonProperty("connection_name") private String connectionName; + /** Optional, Internal. Parameters required to establish an initial connection with the source. */ + @JsonProperty("connection_parameters") + private ConnectionParameters connectionParameters; + /** Required, Immutable. The name of the catalog for the gateway pipeline's storage location. */ @JsonProperty("gateway_storage_catalog") private String gatewayStorageCatalog; @@ -57,6 +61,16 @@ public String getConnectionName() { return connectionName; } + public IngestionGatewayPipelineDefinition setConnectionParameters( + ConnectionParameters connectionParameters) { + this.connectionParameters = connectionParameters; + return this; + } + + public ConnectionParameters getConnectionParameters() { + return connectionParameters; + } + public IngestionGatewayPipelineDefinition setGatewayStorageCatalog(String gatewayStorageCatalog) { this.gatewayStorageCatalog = gatewayStorageCatalog; return this; @@ -91,6 +105,7 @@ public boolean equals(Object o) { IngestionGatewayPipelineDefinition that = (IngestionGatewayPipelineDefinition) o; return Objects.equals(connectionId, that.connectionId) && Objects.equals(connectionName, that.connectionName) + && Objects.equals(connectionParameters, that.connectionParameters) && Objects.equals(gatewayStorageCatalog, that.gatewayStorageCatalog) && Objects.equals(gatewayStorageName, that.gatewayStorageName) && Objects.equals(gatewayStorageSchema, that.gatewayStorageSchema); @@ -101,6 +116,7 @@ public int hashCode() { return Objects.hash( connectionId, connectionName, + connectionParameters, gatewayStorageCatalog, gatewayStorageName, gatewayStorageSchema); @@ -111,6 +127,7 @@ public String toString() { return new ToStringer(IngestionGatewayPipelineDefinition.class) .add("connectionId", connectionId) .add("connectionName", connectionName) + .add("connectionParameters", connectionParameters) .add("gatewayStorageCatalog", gatewayStorageCatalog) .add("gatewayStorageName", gatewayStorageName) .add("gatewayStorageSchema", gatewayStorageSchema) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java index 6c311c809..1e96af349 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java @@ -17,6 +17,14 @@ public class IngestionPipelineDefinition { @JsonProperty("connection_name") private String connectionName; + /** + * Immutable. If set to true, the pipeline will ingest tables from the UC foreign catalogs + * directly without the need to specify a UC connection or ingestion gateway. The `source_catalog` + * fields in objects of IngestionConfig are interpreted as the UC foreign catalogs to ingest from. + */ + @JsonProperty("ingest_from_uc_foreign_catalog") + private Boolean ingestFromUcForeignCatalog; + /** * Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate * with the source database. This is used with connectors to databases like SQL Server. @@ -65,6 +73,16 @@ public String getConnectionName() { return connectionName; } + public IngestionPipelineDefinition setIngestFromUcForeignCatalog( + Boolean ingestFromUcForeignCatalog) { + this.ingestFromUcForeignCatalog = ingestFromUcForeignCatalog; + return this; + } + + public Boolean getIngestFromUcForeignCatalog() { + return ingestFromUcForeignCatalog; + } + public IngestionPipelineDefinition setIngestionGatewayId(String ingestionGatewayId) { this.ingestionGatewayId = ingestionGatewayId; return this; @@ -126,6 +144,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; IngestionPipelineDefinition that = (IngestionPipelineDefinition) o; return Objects.equals(connectionName, that.connectionName) + && Objects.equals(ingestFromUcForeignCatalog, that.ingestFromUcForeignCatalog) && Objects.equals(ingestionGatewayId, that.ingestionGatewayId) && Objects.equals(netsuiteJarPath, that.netsuiteJarPath) && Objects.equals(objects, that.objects) @@ -138,6 +157,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( connectionName, + ingestFromUcForeignCatalog, ingestionGatewayId, netsuiteJarPath, objects, @@ -150,6 +170,7 @@ public int hashCode() { public String toString() { return new ToStringer(IngestionPipelineDefinition.class) .add("connectionName", connectionName) + .add("ingestFromUcForeignCatalog", ingestFromUcForeignCatalog) .add("ingestionGatewayId", ingestionGatewayId) .add("netsuiteJarPath", netsuiteJarPath) .add("objects", objects) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java index 9e36878d7..88923522d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java @@ -36,6 +36,10 @@ public class Origin { @JsonProperty("flow_name") private String flowName; + /** The UUID of the graph associated with this event, corresponding to a GRAPH_UPDATED event. */ + @JsonProperty("graph_id") + private String graphId; + /** The optional host name where the event was triggered */ @JsonProperty("host") private String host; @@ -134,6 +138,15 @@ public String getFlowName() { return flowName; } + public Origin setGraphId(String graphId) { + this.graphId = graphId; + return this; + } + + public String getGraphId() { + return graphId; + } + public Origin setHost(String host) { this.host = host; return this; @@ -244,6 +257,7 @@ public boolean equals(Object o) { && Objects.equals(datasetName, that.datasetName) && Objects.equals(flowId, that.flowId) && Objects.equals(flowName, that.flowName) + && Objects.equals(graphId, that.graphId) && Objects.equals(host, that.host) && Objects.equals(maintenanceId, that.maintenanceId) && Objects.equals(materializationName, that.materializationName) @@ -266,6 +280,7 @@ public int hashCode() { datasetName, flowId, flowName, + graphId, host, maintenanceId, materializationName, @@ -288,6 +303,7 @@ public String toString() { .add("datasetName", datasetName) .add("flowId", flowId) .add("flowName", flowName) + .add("graphId", graphId) .add("host", host) .add("maintenanceId", maintenanceId) .add("materializationName", materializationName) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java index ff14ee59b..78977dc16 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java @@ -139,6 +139,10 @@ public class PipelineSpec { @JsonProperty("trigger") private PipelineTrigger trigger; + /** Usage policy of this pipeline. */ + @JsonProperty("usage_policy_id") + private String usagePolicyId; + public PipelineSpec setBudgetPolicyId(String budgetPolicyId) { this.budgetPolicyId = budgetPolicyId; return this; @@ -382,6 +386,15 @@ public PipelineTrigger getTrigger() { return trigger; } + public PipelineSpec setUsagePolicyId(String usagePolicyId) { + this.usagePolicyId = usagePolicyId; + return this; + } + + public String getUsagePolicyId() { + return usagePolicyId; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -413,7 +426,8 @@ public boolean equals(Object o) { && Objects.equals(storage, that.storage) && Objects.equals(tags, that.tags) && Objects.equals(target, that.target) - && Objects.equals(trigger, that.trigger); + && Objects.equals(trigger, that.trigger) + && Objects.equals(usagePolicyId, that.usagePolicyId); } @Override @@ -445,7 +459,8 @@ public int hashCode() { storage, tags, target, - trigger); + trigger, + usagePolicyId); } @Override @@ -478,6 +493,7 @@ public String toString() { .add("tags", tags) .add("target", target) .add("trigger", trigger) + .add("usagePolicyId", usagePolicyId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java index 0a270aad3..4504eb9d9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java @@ -89,6 +89,14 @@ public GetPipelineResponse waitGetPipelineIdle( throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } + /** + * * Applies the current pipeline environment onto the pipeline compute. The environment applied + * can be used by subsequent dev-mode updates. + */ + public ApplyEnvironmentRequestResponse applyEnvironment(ApplyEnvironmentRequest request) { + return impl.applyEnvironment(request); + } + /** * Creates a new data processing pipeline based on the requested configuration. If successful, * this method returns the ID of the new pipeline. @@ -191,6 +199,14 @@ public ListUpdatesResponse listUpdates(ListUpdatesRequest request) { return impl.listUpdates(request); } + /** + * * Restores a pipeline that was previously deleted, if within the restoration window. All tables + * deleted at pipeline deletion will be undropped as well. + */ + public RestorePipelineRequestResponse restorePipeline(RestorePipelineRequest request) { + return impl.restorePipeline(request); + } + /** * Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct * permissions if none are specified. Objects can inherit permissions from their root object. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java index 0fdad690a..9d044602a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java @@ -16,6 +16,19 @@ public PipelinesImpl(ApiClient apiClient) { this.apiClient = apiClient; } + @Override + public ApplyEnvironmentRequestResponse applyEnvironment(ApplyEnvironmentRequest request) { + String path = String.format("/api/2.0/pipelines/%s/environment/apply", request.getPipelineId()); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ApplyEnvironmentRequestResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public CreatePipelineResponse create(CreatePipeline request) { String path = "/api/2.0/pipelines"; @@ -139,6 +152,19 @@ public ListUpdatesResponse listUpdates(ListUpdatesRequest request) { } } + @Override + public RestorePipelineRequestResponse restorePipeline(RestorePipelineRequest request) { + String path = String.format("/api/2.0/pipelines/%s/restore", request.getPipelineId()); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, RestorePipelineRequestResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public PipelinePermissions setPermissions(PipelinePermissionsRequest request) { String path = String.format("/api/2.0/permissions/pipelines/%s", request.getPipelineId()); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java index d2d0a81c4..d0fa81a7a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java @@ -24,6 +24,12 @@ */ @Generated public interface PipelinesService { + /** + * * Applies the current pipeline environment onto the pipeline compute. The environment applied + * can be used by subsequent dev-mode updates. + */ + ApplyEnvironmentRequestResponse applyEnvironment(ApplyEnvironmentRequest applyEnvironmentRequest); + /** * Creates a new data processing pipeline based on the requested configuration. If successful, * this method returns the ID of the new pipeline. @@ -61,6 +67,12 @@ ListPipelineEventsResponse listPipelineEvents( /** List updates for an active pipeline. */ ListUpdatesResponse listUpdates(ListUpdatesRequest listUpdatesRequest); + /** + * * Restores a pipeline that was previously deleted, if within the restoration window. All tables + * deleted at pipeline deletion will be undropped as well. + */ + RestorePipelineRequestResponse restorePipeline(RestorePipelineRequest restorePipelineRequest); + /** * Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct * permissions if none are specified. Objects can inherit permissions from their root object. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequest.java new file mode 100755 index 000000000..c9b91f8ae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class RestorePipelineRequest { + /** The ID of the pipeline to restore */ + @JsonIgnore private String pipelineId; + + public RestorePipelineRequest setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestorePipelineRequest that = (RestorePipelineRequest) o; + return Objects.equals(pipelineId, that.pipelineId); + } + + @Override + public int hashCode() { + return Objects.hash(pipelineId); + } + + @Override + public String toString() { + return new ToStringer(RestorePipelineRequest.class).add("pipelineId", pipelineId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequestResponse.java new file mode 100755 index 000000000..293d32256 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequestResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class RestorePipelineRequestResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(RestorePipelineRequestResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java index 7626f06c3..75a00df90 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java @@ -37,6 +37,13 @@ public class TableSpecificConfig { private IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig queryBasedConnectorConfig; + /** + * (Optional, Immutable) The row filter condition to be applied to the table. It must not contain + * the WHERE keyword, only the actual filter condition. It must be in DBSQL format. + */ + @JsonProperty("row_filter") + private String rowFilter; + /** * If true, formula fields defined in the table are included in the ingestion. This setting is * only valid for the Salesforce connector @@ -98,6 +105,15 @@ public TableSpecificConfig setQueryBasedConnectorConfig( return queryBasedConnectorConfig; } + public TableSpecificConfig setRowFilter(String rowFilter) { + this.rowFilter = rowFilter; + return this; + } + + public String getRowFilter() { + return rowFilter; + } + public TableSpecificConfig setSalesforceIncludeFormulaFields( Boolean salesforceIncludeFormulaFields) { this.salesforceIncludeFormulaFields = salesforceIncludeFormulaFields; @@ -145,6 +161,7 @@ public boolean equals(Object o) { && Objects.equals(includeColumns, that.includeColumns) && Objects.equals(primaryKeys, that.primaryKeys) && Objects.equals(queryBasedConnectorConfig, that.queryBasedConnectorConfig) + && Objects.equals(rowFilter, that.rowFilter) && Objects.equals(salesforceIncludeFormulaFields, that.salesforceIncludeFormulaFields) && Objects.equals(scdType, that.scdType) && Objects.equals(sequenceBy, that.sequenceBy) @@ -158,6 +175,7 @@ public int hashCode() { includeColumns, primaryKeys, queryBasedConnectorConfig, + rowFilter, salesforceIncludeFormulaFields, scdType, sequenceBy, @@ -171,6 +189,7 @@ public String toString() { .add("includeColumns", includeColumns) .add("primaryKeys", primaryKeys) .add("queryBasedConnectorConfig", queryBasedConnectorConfig) + .add("rowFilter", rowFilter) .add("salesforceIncludeFormulaFields", salesforceIncludeFormulaFields) .add("scdType", scdType) .add("sequenceBy", sequenceBy) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java index 59982f4d8..823232d6f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java @@ -41,6 +41,13 @@ public class UpdateInfo { @JsonProperty("full_refresh_selection") private Collection fullRefreshSelection; + /** + * Indicates whether the update is either part of a continuous job run, or running in legacy + * continuous pipeline mode. + */ + @JsonProperty("mode") + private UpdateMode mode; + /** The ID of the pipeline. */ @JsonProperty("pipeline_id") private String pipelineId; @@ -122,6 +129,15 @@ public Collection getFullRefreshSelection() { return fullRefreshSelection; } + public UpdateInfo setMode(UpdateMode mode) { + this.mode = mode; + return this; + } + + public UpdateMode getMode() { + return mode; + } + public UpdateInfo setPipelineId(String pipelineId) { this.pipelineId = pipelineId; return this; @@ -178,6 +194,7 @@ public boolean equals(Object o) { && Objects.equals(creationTime, that.creationTime) && Objects.equals(fullRefresh, that.fullRefresh) && Objects.equals(fullRefreshSelection, that.fullRefreshSelection) + && Objects.equals(mode, that.mode) && Objects.equals(pipelineId, that.pipelineId) && Objects.equals(refreshSelection, that.refreshSelection) && Objects.equals(state, that.state) @@ -194,6 +211,7 @@ public int hashCode() { creationTime, fullRefresh, fullRefreshSelection, + mode, pipelineId, refreshSelection, state, @@ -210,6 +228,7 @@ public String toString() { .add("creationTime", creationTime) .add("fullRefresh", fullRefresh) .add("fullRefreshSelection", fullRefreshSelection) + .add("mode", mode) .add("pipelineId", pipelineId) .add("refreshSelection", refreshSelection) .add("state", state) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateMode.java new file mode 100755 index 000000000..752421818 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateMode.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum UpdateMode { + CONTINUOUS, + DEFAULT, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java index aeaa56801..9cd72cb1f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java @@ -22,9 +22,9 @@ public class AwsKeyInfo { private String keyRegion; /** - * This field applies only if the `use_cases` property includes `STORAGE`. If this is set to - * `true` or omitted, the key is also used to encrypt cluster EBS volumes. If you do not want to - * use this key for encrypting EBS volumes, set to `false`. + * This field applies only if the `use_cases` property includes `STORAGE`. If this is set to true + * or omitted, the key is also used to encrypt cluster EBS volumes. If you do not want to use this + * key for encrypting EBS volumes, set to false. */ @JsonProperty("reuse_key_for_cluster_volumes") private Boolean reuseKeyForClusterVolumes; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureKeyInfo.java new file mode 100755 index 000000000..69ababff2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureKeyInfo.java @@ -0,0 +1,126 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AzureKeyInfo { + /** + * The Disk Encryption Set id that is used to represent the key info used for Managed Disk BYOK + * use case + */ + @JsonProperty("disk_encryption_set_id") + private String diskEncryptionSetId; + + /** + * The structure to store key access credential This is set if the Managed Identity is being used + * to access the Azure Key Vault key. + */ + @JsonProperty("key_access_configuration") + private KeyAccessConfiguration keyAccessConfiguration; + + /** The name of the key in KeyVault. */ + @JsonProperty("key_name") + private String keyName; + + /** The base URI of the KeyVault. */ + @JsonProperty("key_vault_uri") + private String keyVaultUri; + + /** The tenant id where the KeyVault lives. */ + @JsonProperty("tenant_id") + private String tenantId; + + /** The current key version. */ + @JsonProperty("version") + private String version; + + public AzureKeyInfo setDiskEncryptionSetId(String diskEncryptionSetId) { + this.diskEncryptionSetId = diskEncryptionSetId; + return this; + } + + public String getDiskEncryptionSetId() { + return diskEncryptionSetId; + } + + public AzureKeyInfo setKeyAccessConfiguration(KeyAccessConfiguration keyAccessConfiguration) { + this.keyAccessConfiguration = keyAccessConfiguration; + return this; + } + + public KeyAccessConfiguration getKeyAccessConfiguration() { + return keyAccessConfiguration; + } + + public AzureKeyInfo setKeyName(String keyName) { + this.keyName = keyName; + return this; + } + + public String getKeyName() { + return keyName; + } + + public AzureKeyInfo setKeyVaultUri(String keyVaultUri) { + this.keyVaultUri = keyVaultUri; + return this; + } + + public String getKeyVaultUri() { + return keyVaultUri; + } + + public AzureKeyInfo setTenantId(String tenantId) { + this.tenantId = tenantId; + return this; + } + + public String getTenantId() { + return tenantId; + } + + public AzureKeyInfo setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureKeyInfo that = (AzureKeyInfo) o; + return Objects.equals(diskEncryptionSetId, that.diskEncryptionSetId) + && Objects.equals(keyAccessConfiguration, that.keyAccessConfiguration) + && Objects.equals(keyName, that.keyName) + && Objects.equals(keyVaultUri, that.keyVaultUri) + && Objects.equals(tenantId, that.tenantId) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash( + diskEncryptionSetId, keyAccessConfiguration, keyName, keyVaultUri, tenantId, version); + } + + @Override + public String toString() { + return new ToStringer(AzureKeyInfo.class) + .add("diskEncryptionSetId", diskEncryptionSetId) + .add("keyAccessConfiguration", keyAccessConfiguration) + .add("keyName", keyName) + .add("keyVaultUri", keyVaultUri) + .add("tenantId", tenantId) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java index ca8290c0b..721620a58 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java @@ -7,7 +7,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** The general workspace configurations that are specific to cloud providers. */ @Generated public class CloudResourceContainer { /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java index ae5ac50f5..6478ac476 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateAwsKeyInfo.java @@ -13,17 +13,18 @@ public class CreateAwsKeyInfo { @JsonProperty("key_alias") private String keyAlias; - /** - * The AWS KMS key's Amazon Resource Name (ARN). Note that the key's AWS region is inferred from - * the ARN. - */ + /** The AWS KMS key's Amazon Resource Name (ARN). */ @JsonProperty("key_arn") private String keyArn; + /** The AWS KMS key region. */ + @JsonProperty("key_region") + private String keyRegion; + /** - * This field applies only if the `use_cases` property includes `STORAGE`. If this is set to - * `true` or omitted, the key is also used to encrypt cluster EBS volumes. To not use this key - * also for encrypting EBS volumes, set this to `false`. + * This field applies only if the `use_cases` property includes `STORAGE`. If this is set to true + * or omitted, the key is also used to encrypt cluster EBS volumes. If you do not want to use this + * key for encrypting EBS volumes, set to false. */ @JsonProperty("reuse_key_for_cluster_volumes") private Boolean reuseKeyForClusterVolumes; @@ -46,6 +47,15 @@ public String getKeyArn() { return keyArn; } + public CreateAwsKeyInfo setKeyRegion(String keyRegion) { + this.keyRegion = keyRegion; + return this; + } + + public String getKeyRegion() { + return keyRegion; + } + public CreateAwsKeyInfo setReuseKeyForClusterVolumes(Boolean reuseKeyForClusterVolumes) { this.reuseKeyForClusterVolumes = reuseKeyForClusterVolumes; return this; @@ -62,12 +72,13 @@ public boolean equals(Object o) { CreateAwsKeyInfo that = (CreateAwsKeyInfo) o; return Objects.equals(keyAlias, that.keyAlias) && Objects.equals(keyArn, that.keyArn) + && Objects.equals(keyRegion, that.keyRegion) && Objects.equals(reuseKeyForClusterVolumes, that.reuseKeyForClusterVolumes); } @Override public int hashCode() { - return Objects.hash(keyAlias, keyArn, reuseKeyForClusterVolumes); + return Objects.hash(keyAlias, keyArn, keyRegion, reuseKeyForClusterVolumes); } @Override @@ -75,6 +86,7 @@ public String toString() { return new ToStringer(CreateAwsKeyInfo.class) .add("keyAlias", keyAlias) .add("keyArn", keyArn) + .add("keyRegion", keyRegion) .add("reuseKeyForClusterVolumes", reuseKeyForClusterVolumes) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRole.java index 461005798..c31784e77 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRole.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateCredentialStsRole.java @@ -9,7 +9,7 @@ @Generated public class CreateCredentialStsRole { - /** The Amazon Resource Name (ARN) of the cross account role. */ + /** The Amazon Resource Name (ARN) of the cross account IAM role. */ @JsonProperty("role_arn") private String roleArn; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java index 2d62f0470..f070c7a31 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java @@ -9,7 +9,10 @@ @Generated public class CreateGcpKeyInfo { - /** The GCP KMS key's resource name */ + /** + * Globally unique kms key resource id of the form + * projects/testProjectId/locations/us-east4/keyRings/gcpCmkKeyRing/cryptoKeys/cmk-eastus4 + */ @JsonProperty("kms_key_id") private String kmsKeyId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java index 9dff47e10..65b9799c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java @@ -37,8 +37,8 @@ public class CreateNetworkRequest { private NetworkVpcEndpoints vpcEndpoints; /** - * The ID of the VPC associated with this network. VPC IDs can be used in multiple network - * configurations. + * The ID of the VPC associated with this network configuration. VPC IDs can be used in multiple + * networks. */ @JsonProperty("vpc_id") private String vpcId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreatePrivateAccessSettingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreatePrivateAccessSettingsRequest.java index aed606c1f..bf8d4bd70 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreatePrivateAccessSettingsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreatePrivateAccessSettingsRequest.java @@ -11,23 +11,24 @@ @Generated public class CreatePrivateAccessSettingsRequest { /** - * An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when - * registering the VPC endpoint configuration in your Databricks account. This is not the ID of - * the VPC endpoint in AWS. - * - *

Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC - * endpoints that in your account that can connect to your workspace over AWS PrivateLink. - * - *

If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, - * this control only works for PrivateLink connections. To control how your workspace is accessed - * via public internet, see [IP access lists]. - * - *

[IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html + * An array of Databricks VPC endpoint IDs. This is the Databricks ID returned when registering + * the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC + * endpoint in AWS. Only used when private_access_level is set to ENDPOINT. This is an allow list + * of VPC endpoints registered in your Databricks account that can connect to your workspace over + * AWS PrivateLink. Note: If hybrid access to your workspace is enabled by setting + * public_access_enabled to true, this control only works for PrivateLink connections. To control + * how your workspace is accessed via public internet, see IP access lists. */ @JsonProperty("allowed_vpc_endpoint_ids") private Collection allowedVpcEndpointIds; - /** */ + /** + * The private access level controls which VPC endpoints can connect to the UI or API of any + * workspace that attaches this private access settings object. `ACCOUNT` level access (the + * default) allows only VPC endpoints that are registered in your Databricks account connect to + * your workspace. `ENDPOINT` level access allows only specified VPC endpoints connect to your + * workspace. For details, see allowed_vpc_endpoint_ids. + */ @JsonProperty("private_access_level") private PrivateAccessLevel privateAccessLevel; @@ -37,14 +38,13 @@ public class CreatePrivateAccessSettingsRequest { /** * Determines if the workspace can be accessed over public internet. For fully private workspaces, - * you can optionally specify `false`, but only if you implement both the front-end and the - * back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is - * enabled. + * you can optionally specify false, but only if you implement both the front-end and the back-end + * PrivateLink connections. Otherwise, specify true, which means that public access is enabled. */ @JsonProperty("public_access_enabled") private Boolean publicAccessEnabled; - /** The cloud region for workspaces associated with this private access settings object. */ + /** The AWS region for workspaces attached to this private access settings object. */ @JsonProperty("region") private String region; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java index 17bbcebc8..5c1e21eeb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java @@ -9,7 +9,17 @@ @Generated public class CreateStorageConfigurationRequest { - /** */ + /** + * Optional IAM role that is used to access the workspace catalog which is created during + * workspace creation for UC by Default. If a storage configuration with this field populated is + * used to create a workspace, then a workspace catalog is created together with the workspace. + * The workspace catalog shares the root bucket with internal workspace storage (including DBFS + * root) but uses a dedicated bucket path prefix. + */ + @JsonProperty("role_arn") + private String roleArn; + + /** Root S3 bucket information. */ @JsonProperty("root_bucket_info") private RootBucketInfo rootBucketInfo; @@ -17,6 +27,15 @@ public class CreateStorageConfigurationRequest { @JsonProperty("storage_configuration_name") private String storageConfigurationName; + public CreateStorageConfigurationRequest setRoleArn(String roleArn) { + this.roleArn = roleArn; + return this; + } + + public String getRoleArn() { + return roleArn; + } + public CreateStorageConfigurationRequest setRootBucketInfo(RootBucketInfo rootBucketInfo) { this.rootBucketInfo = rootBucketInfo; return this; @@ -41,18 +60,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateStorageConfigurationRequest that = (CreateStorageConfigurationRequest) o; - return Objects.equals(rootBucketInfo, that.rootBucketInfo) + return Objects.equals(roleArn, that.roleArn) + && Objects.equals(rootBucketInfo, that.rootBucketInfo) && Objects.equals(storageConfigurationName, that.storageConfigurationName); } @Override public int hashCode() { - return Objects.hash(rootBucketInfo, storageConfigurationName); + return Objects.hash(roleArn, rootBucketInfo, storageConfigurationName); } @Override public String toString() { return new ToStringer(CreateStorageConfigurationRequest.class) + .add("roleArn", roleArn) .add("rootBucketInfo", rootBucketInfo) .add("storageConfigurationName", storageConfigurationName) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java index 00ed46856..4c13c8d3e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java @@ -13,11 +13,11 @@ public class CreateVpcEndpointRequest { @JsonProperty("aws_vpc_endpoint_id") private String awsVpcEndpointId; - /** */ + /** The cloud info of this vpc endpoint. */ @JsonProperty("gcp_vpc_endpoint_info") private GcpVpcEndpointInfo gcpVpcEndpointInfo; - /** The AWS region in which this VPC endpoint object exists. */ + /** The region in which this VPC endpoint object exists. */ @JsonProperty("region") private String region; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java index 31d107a91..b478d7c6f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java @@ -10,14 +10,11 @@ @Generated public class CreateWorkspaceRequest { - /** The AWS region of the workspace's data plane. */ + /** */ @JsonProperty("aws_region") private String awsRegion; - /** - * The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field - * to `gcp`. - */ + /** The cloud name. This field always has the value `gcp`. */ @JsonProperty("cloud") private String cloud; @@ -25,6 +22,16 @@ public class CreateWorkspaceRequest { @JsonProperty("cloud_resource_container") private CloudResourceContainer cloudResourceContainer; + /** + * If the compute mode is `SERVERLESS`, a serverless workspace is created that comes + * pre-configured with serverless compute and default storage, providing a fully-managed, + * enterprise-ready SaaS experience. This means you don't need to provide any resources managed by + * you, such as credentials, storage, or network. If the compute mode is `HYBRID` (which is the + * default option), a classic workspace is created that uses customer-managed resources. + */ + @JsonProperty("compute_mode") + private CustomerFacingComputeMode computeMode; + /** ID of the workspace's credential configuration object. */ @JsonProperty("credentials_id") private String credentialsId; @@ -39,28 +46,21 @@ public class CreateWorkspaceRequest { /** * The deployment name defines part of the subdomain for the workspace. The workspace URL for the - * web application and REST APIs is `.cloud.databricks.com`. For - * example, if the deployment name is `abcsales`, your workspace URL will be - * `https://abcsales.cloud.databricks.com`. Hyphens are allowed. This property supports only the - * set of characters that are allowed in a subdomain. - * - *

To set this value, you must have a deployment name prefix. Contact your Databricks account - * team to add an account deployment name prefix to your account. - * - *

Workspace deployment names follow the account prefix and a hyphen. For example, if your - * account's deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the - * JSON response for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL - * would be `acme-workspace-1.cloud.databricks.com`. - * - *

You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the - * deployment name to only include the deployment prefix. For example, if your account's - * deployment prefix is `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` - * becomes `acme` only and the workspace URL is `acme.cloud.databricks.com`. - * - *

This value must be unique across all non-deleted deployments across all AWS regions. - * - *

If a new workspace omits this property, the server generates a unique deployment name for - * you with the pattern `dbc-xxxxxxxx-xxxx`. + * web application and REST APIs is .cloud.databricks.com. For example, + * if the deployment name is abcsales, your workspace URL will be + * https://abcsales.cloud.databricks.com. Hyphens are allowed. This property supports only the set + * of characters that are allowed in a subdomain. To set this value, you must have a deployment + * name prefix. Contact your Databricks account team to add an account deployment name prefix to + * your account. Workspace deployment names follow the account prefix and a hyphen. For example, + * if your account's deployment prefix is acme and the workspace deployment name is workspace-1, + * the JSON response for the deployment_name field becomes acme-workspace-1. The workspace URL + * would be acme-workspace-1.cloud.databricks.com. You can also set the deployment_name to the + * reserved keyword EMPTY if you want the deployment name to only include the deployment prefix. + * For example, if your account's deployment prefix is acme and the workspace deployment name is + * EMPTY, the deployment_name becomes acme only and the workspace URL is + * acme.cloud.databricks.com. This value must be unique across all non-deleted deployments across + * all AWS regions. If a new workspace omits this property, the server generates a unique + * deployment name for you with the pattern dbc-xxxxxxxx-xxxx. */ @JsonProperty("deployment_name") private String deploymentName; @@ -73,13 +73,9 @@ public class CreateWorkspaceRequest { @JsonProperty("gke_config") private GkeConfig gkeConfig; - /** Whether no public IP is enabled for the workspace. */ - @JsonProperty("is_no_public_ip_enabled") - private Boolean isNoPublicIpEnabled; - /** - * The Google Cloud region of the workspace data plane in your Google account. For example, - * `us-east4`. + * The Google Cloud region of the workspace data plane in your Google account (for example, + * `us-east4`). */ @JsonProperty("location") private String location; @@ -87,13 +83,16 @@ public class CreateWorkspaceRequest { /** * The ID of the workspace's managed services encryption key configuration object. This is used to * help protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, - * and query history. The provided key configuration object property `use_cases` must contain - * `MANAGED_SERVICES`. + * and query history. The provided key configuration object property use_cases must contain + * MANAGED_SERVICES. */ @JsonProperty("managed_services_customer_managed_key_id") private String managedServicesCustomerManagedKeyId; - /** */ + /** + * The ID of the workspace's network configuration object. To use AWS PrivateLink, this field is + * required. + */ @JsonProperty("network_id") private String networkId; @@ -102,11 +101,10 @@ public class CreateWorkspaceRequest { private PricingTier pricingTier; /** - * ID of the workspace's private access settings object. Only used for PrivateLink. This ID must - * be specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace + * ID of the workspace's private access settings object. Only used for PrivateLink. You must + * specify this ID if you are using [AWS PrivateLink] for either front-end (user-to-workspace * connection), back-end (data plane to control plane connection), or both connection types. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink].", + * Before configuring PrivateLink, read the [Databricks article about PrivateLink].", * *

[AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about * PrivateLink]: @@ -115,19 +113,19 @@ public class CreateWorkspaceRequest { @JsonProperty("private_access_settings_id") private String privateAccessSettingsId; - /** The ID of the workspace's storage configuration object. */ + /** ID of the workspace's storage configuration object. */ @JsonProperty("storage_configuration_id") private String storageConfigurationId; /** * The ID of the workspace's storage encryption key configuration object. This is used to encrypt * the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS - * volumes. The provided key configuration object property `use_cases` must contain `STORAGE`. + * volumes. The provided key configuration object property use_cases must contain STORAGE. */ @JsonProperty("storage_customer_managed_key_id") private String storageCustomerManagedKeyId; - /** The workspace's human-readable name. */ + /** The human-readable name of the workspace. */ @JsonProperty("workspace_name") private String workspaceName; @@ -159,6 +157,15 @@ public CloudResourceContainer getCloudResourceContainer() { return cloudResourceContainer; } + public CreateWorkspaceRequest setComputeMode(CustomerFacingComputeMode computeMode) { + this.computeMode = computeMode; + return this; + } + + public CustomerFacingComputeMode getComputeMode() { + return computeMode; + } + public CreateWorkspaceRequest setCredentialsId(String credentialsId) { this.credentialsId = credentialsId; return this; @@ -205,15 +212,6 @@ public GkeConfig getGkeConfig() { return gkeConfig; } - public CreateWorkspaceRequest setIsNoPublicIpEnabled(Boolean isNoPublicIpEnabled) { - this.isNoPublicIpEnabled = isNoPublicIpEnabled; - return this; - } - - public Boolean getIsNoPublicIpEnabled() { - return isNoPublicIpEnabled; - } - public CreateWorkspaceRequest setLocation(String location) { this.location = location; return this; @@ -295,12 +293,12 @@ public boolean equals(Object o) { return Objects.equals(awsRegion, that.awsRegion) && Objects.equals(cloud, that.cloud) && Objects.equals(cloudResourceContainer, that.cloudResourceContainer) + && Objects.equals(computeMode, that.computeMode) && Objects.equals(credentialsId, that.credentialsId) && Objects.equals(customTags, that.customTags) && Objects.equals(deploymentName, that.deploymentName) && Objects.equals(gcpManagedNetworkConfig, that.gcpManagedNetworkConfig) && Objects.equals(gkeConfig, that.gkeConfig) - && Objects.equals(isNoPublicIpEnabled, that.isNoPublicIpEnabled) && Objects.equals(location, that.location) && Objects.equals( managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId) @@ -318,12 +316,12 @@ public int hashCode() { awsRegion, cloud, cloudResourceContainer, + computeMode, credentialsId, customTags, deploymentName, gcpManagedNetworkConfig, gkeConfig, - isNoPublicIpEnabled, location, managedServicesCustomerManagedKeyId, networkId, @@ -340,12 +338,12 @@ public String toString() { .add("awsRegion", awsRegion) .add("cloud", cloud) .add("cloudResourceContainer", cloudResourceContainer) + .add("computeMode", computeMode) .add("credentialsId", credentialsId) .add("customTags", customTags) .add("deploymentName", deploymentName) .add("gcpManagedNetworkConfig", gcpManagedNetworkConfig) .add("gkeConfig", gkeConfig) - .add("isNoPublicIpEnabled", isNoPublicIpEnabled) .add("location", location) .add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId) .add("networkId", networkId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsAPI.java index 40ca82451..035ea5659 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsAPI.java @@ -48,16 +48,16 @@ public Credential create(CreateCredentialRequest request) { return impl.create(request); } - public void delete(String credentialsId) { - delete(new DeleteCredentialRequest().setCredentialsId(credentialsId)); + public Credential delete(String credentialsId) { + return delete(new DeleteCredentialRequest().setCredentialsId(credentialsId)); } /** * Deletes a Databricks credential configuration object for an account, both specified by ID. You * cannot delete a credential that is associated with any workspace. */ - public void delete(DeleteCredentialRequest request) { - impl.delete(request); + public Credential delete(DeleteCredentialRequest request) { + return impl.delete(request); } public Credential get(String credentialsId) { @@ -69,7 +69,7 @@ public Credential get(GetCredentialRequest request) { return impl.get(request); } - /** Gets all Databricks credential configurations associated with an account specified by ID. */ + /** List Databricks credential configuration objects for an account, specified by ID. */ public Iterable list() { return impl.list(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java index 4aca2d8bb..029b5e209 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java @@ -33,7 +33,7 @@ public Credential create(CreateCredentialRequest request) { } @Override - public void delete(DeleteCredentialRequest request) { + public Credential delete(DeleteCredentialRequest request) { String path = String.format( "/api/2.0/accounts/%s/credentials/%s", @@ -42,7 +42,7 @@ public void delete(DeleteCredentialRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, Credential.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsService.java index 620a274b6..31a55161c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsService.java @@ -38,11 +38,11 @@ public interface CredentialsService { * Deletes a Databricks credential configuration object for an account, both specified by ID. You * cannot delete a credential that is associated with any workspace. */ - void delete(DeleteCredentialRequest deleteCredentialRequest); + Credential delete(DeleteCredentialRequest deleteCredentialRequest); /** Gets a Databricks credential configuration object for an account, both specified by ID. */ Credential get(GetCredentialRequest getCredentialRequest); - /** Gets all Databricks credential configurations associated with an account specified by ID. */ + /** List Databricks credential configuration objects for an account, specified by ID. */ Collection list(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingComputeMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingComputeMode.java new file mode 100755 index 000000000..e6b581f25 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingComputeMode.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; + +/** + * Corresponds to compute mode defined here: + * https://src.dev.databricks.com/databricks/universe@9076536b18479afd639d1c1f9dd5a59f72215e69/-/blob/central/api/common.proto?L872 + */ +@Generated +public enum CustomerFacingComputeMode { + HYBRID, + SERVERLESS, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainer.java index 5781e69d3..cf3d76e0c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainer.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingGcpCloudResourceContainer.java @@ -7,13 +7,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** The general workspace configurations that are specific to Google Cloud. */ @Generated public class CustomerFacingGcpCloudResourceContainer { - /** - * The Google Cloud project ID, which the workspace uses to instantiate cloud resources for your - * workspace. - */ + /** */ @JsonProperty("project_id") private String projectId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingStorageMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingStorageMode.java new file mode 100755 index 000000000..db8196275 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerFacingStorageMode.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum CustomerFacingStorageMode { + CUSTOMER_HOSTED, + DEFAULT_STORAGE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKey.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKey.java index 40da65d77..737b2a075 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKey.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CustomerManagedKey.java @@ -18,6 +18,10 @@ public class CustomerManagedKey { @JsonProperty("aws_key_info") private AwsKeyInfo awsKeyInfo; + /** */ + @JsonProperty("azure_key_info") + private AzureKeyInfo azureKeyInfo; + /** Time in epoch milliseconds when the customer key was created. */ @JsonProperty("creation_time") private Long creationTime; @@ -52,6 +56,15 @@ public AwsKeyInfo getAwsKeyInfo() { return awsKeyInfo; } + public CustomerManagedKey setAzureKeyInfo(AzureKeyInfo azureKeyInfo) { + this.azureKeyInfo = azureKeyInfo; + return this; + } + + public AzureKeyInfo getAzureKeyInfo() { + return azureKeyInfo; + } + public CustomerManagedKey setCreationTime(Long creationTime) { this.creationTime = creationTime; return this; @@ -95,6 +108,7 @@ public boolean equals(Object o) { CustomerManagedKey that = (CustomerManagedKey) o; return Objects.equals(accountId, that.accountId) && Objects.equals(awsKeyInfo, that.awsKeyInfo) + && Objects.equals(azureKeyInfo, that.azureKeyInfo) && Objects.equals(creationTime, that.creationTime) && Objects.equals(customerManagedKeyId, that.customerManagedKeyId) && Objects.equals(gcpKeyInfo, that.gcpKeyInfo) @@ -104,7 +118,13 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - accountId, awsKeyInfo, creationTime, customerManagedKeyId, gcpKeyInfo, useCases); + accountId, + awsKeyInfo, + azureKeyInfo, + creationTime, + customerManagedKeyId, + gcpKeyInfo, + useCases); } @Override @@ -112,6 +132,7 @@ public String toString() { return new ToStringer(CustomerManagedKey.class) .add("accountId", accountId) .add("awsKeyInfo", awsKeyInfo) + .add("azureKeyInfo", azureKeyInfo) .add("creationTime", creationTime) .add("customerManagedKeyId", customerManagedKeyId) .add("gcpKeyInfo", gcpKeyInfo) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequest.java index d58b68aa2..98d6f09f8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequest.java @@ -9,7 +9,7 @@ @Generated public class DeletePrivateAccesRequest { - /** Databricks Account API private access settings ID. */ + /** */ @JsonIgnore private String privateAccessSettingsId; public DeletePrivateAccesRequest setPrivateAccessSettingsId(String privateAccessSettingsId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequest.java index 3ff26d6da..9c7f773f7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequest.java @@ -9,7 +9,7 @@ @Generated public class DeleteStorageRequest { - /** Databricks Account API storage configuration ID. */ + /** */ @JsonIgnore private String storageConfigurationId; public DeleteStorageRequest setStorageConfigurationId(String storageConfigurationId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequest.java index 7a3ebaf99..7c61b9be2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequest.java @@ -9,7 +9,7 @@ @Generated public class DeleteVpcEndpointRequest { - /** Databricks VPC endpoint ID. */ + /** */ @JsonIgnore private String vpcEndpointId; public DeleteVpcEndpointRequest setVpcEndpointId(String vpcEndpointId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequest.java index 3ee6f1ee3..f5ede1c3e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequest.java @@ -9,7 +9,7 @@ @Generated public class DeleteWorkspaceRequest { - /** Workspace ID. */ + /** */ @JsonIgnore private Long workspaceId; public DeleteWorkspaceRequest setWorkspaceId(Long workspaceId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java index cc89ea513..a9c6d4ad7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java @@ -56,16 +56,16 @@ public CustomerManagedKey create(CreateCustomerManagedKeyRequest request) { return impl.create(request); } - public void delete(String customerManagedKeyId) { - delete(new DeleteEncryptionKeyRequest().setCustomerManagedKeyId(customerManagedKeyId)); + public CustomerManagedKey delete(String customerManagedKeyId) { + return delete(new DeleteEncryptionKeyRequest().setCustomerManagedKeyId(customerManagedKeyId)); } /** * Deletes a customer-managed key configuration object for an account. You cannot delete a * configuration that is associated with a running workspace. */ - public void delete(DeleteEncryptionKeyRequest request) { - impl.delete(request); + public CustomerManagedKey delete(DeleteEncryptionKeyRequest request) { + return impl.delete(request); } public CustomerManagedKey get(String customerManagedKeyId) { @@ -90,19 +90,7 @@ public CustomerManagedKey get(GetEncryptionKeyRequest request) { return impl.get(request); } - /** - * Gets all customer-managed key configuration objects for an account. If the key is specified as - * a workspace's managed services customer-managed key, Databricks uses the key to encrypt the - * workspace's notebooks and secrets in the control plane, in addition to Databricks SQL queries - * and query history. If the key is specified as a workspace's storage customer-managed key, the - * key is used to encrypt the workspace's root S3 bucket and optionally can encrypt cluster EBS - * volumes data in the data plane. - * - *

**Important**: Customer-managed keys are supported only for some deployment types, - * subscription types, and AWS regions. - * - *

This operation is available only if your account is on the E2 version of the platform. - */ + /** Lists Databricks customer-managed key configurations for an account. */ public Iterable list() { return impl.list(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java index 20424aec0..22cf9aa46 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java @@ -34,7 +34,7 @@ public CustomerManagedKey create(CreateCustomerManagedKeyRequest request) { } @Override - public void delete(DeleteEncryptionKeyRequest request) { + public CustomerManagedKey delete(DeleteEncryptionKeyRequest request) { String path = String.format( "/api/2.0/accounts/%s/customer-managed-keys/%s", @@ -43,7 +43,7 @@ public void delete(DeleteEncryptionKeyRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, CustomerManagedKey.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java index 083d083ae..e9741ccb3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java @@ -46,7 +46,7 @@ public interface EncryptionKeysService { * Deletes a customer-managed key configuration object for an account. You cannot delete a * configuration that is associated with a running workspace. */ - void delete(DeleteEncryptionKeyRequest deleteEncryptionKeyRequest); + CustomerManagedKey delete(DeleteEncryptionKeyRequest deleteEncryptionKeyRequest); /** * Gets a customer-managed key configuration object for an account, specified by ID. This @@ -64,18 +64,6 @@ public interface EncryptionKeysService { */ CustomerManagedKey get(GetEncryptionKeyRequest getEncryptionKeyRequest); - /** - * Gets all customer-managed key configuration objects for an account. If the key is specified as - * a workspace's managed services customer-managed key, Databricks uses the key to encrypt the - * workspace's notebooks and secrets in the control plane, in addition to Databricks SQL queries - * and query history. If the key is specified as a workspace's storage customer-managed key, the - * key is used to encrypt the workspace's root S3 bucket and optionally can encrypt cluster EBS - * volumes data in the data plane. - * - *

**Important**: Customer-managed keys are supported only for some deployment types, - * subscription types, and AWS regions. - * - *

This operation is available only if your account is on the E2 version of the platform. - */ + /** Lists Databricks customer-managed key configurations for an account. */ Collection list(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EndpointUseCase.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EndpointUseCase.java index 8c3f268ba..db8f30d1a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EndpointUseCase.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EndpointUseCase.java @@ -4,12 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * This enumeration represents the type of Databricks VPC [endpoint service] that was used when - * creating this VPC endpoint. - * - *

[endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html - */ @Generated public enum EndpointUseCase { DATAPLANE_RELAY_ACCESS, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ErrorType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ErrorType.java index 38b0ddcdb..5ccb0ab61 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ErrorType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ErrorType.java @@ -6,8 +6,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; /** - * The AWS resource associated with this error: credentials, VPC, subnet, security group, or network - * ACL. + * ErrorType and WarningType are used to represent the type of error or warning by NetworkHealth and + * NetworkWarning defined in central/api/accounts/accounts.proto */ @Generated public enum ErrorType { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java deleted file mode 100755 index 7654c68e7..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java +++ /dev/null @@ -1,74 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.provisioning; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class ExternalCustomerInfo { - /** Email of the authoritative user. */ - @JsonProperty("authoritative_user_email") - private String authoritativeUserEmail; - - /** The authoritative user full name. */ - @JsonProperty("authoritative_user_full_name") - private String authoritativeUserFullName; - - /** The legal entity name for the external workspace */ - @JsonProperty("customer_name") - private String customerName; - - public ExternalCustomerInfo setAuthoritativeUserEmail(String authoritativeUserEmail) { - this.authoritativeUserEmail = authoritativeUserEmail; - return this; - } - - public String getAuthoritativeUserEmail() { - return authoritativeUserEmail; - } - - public ExternalCustomerInfo setAuthoritativeUserFullName(String authoritativeUserFullName) { - this.authoritativeUserFullName = authoritativeUserFullName; - return this; - } - - public String getAuthoritativeUserFullName() { - return authoritativeUserFullName; - } - - public ExternalCustomerInfo setCustomerName(String customerName) { - this.customerName = customerName; - return this; - } - - public String getCustomerName() { - return customerName; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ExternalCustomerInfo that = (ExternalCustomerInfo) o; - return Objects.equals(authoritativeUserEmail, that.authoritativeUserEmail) - && Objects.equals(authoritativeUserFullName, that.authoritativeUserFullName) - && Objects.equals(customerName, that.customerName); - } - - @Override - public int hashCode() { - return Objects.hash(authoritativeUserEmail, authoritativeUserFullName, customerName); - } - - @Override - public String toString() { - return new ToStringer(ExternalCustomerInfo.class) - .add("authoritativeUserEmail", authoritativeUserEmail) - .add("authoritativeUserFullName", authoritativeUserFullName) - .add("customerName", customerName) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpCommonNetworkConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpCommonNetworkConfig.java new file mode 100755 index 000000000..21b86acd9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpCommonNetworkConfig.java @@ -0,0 +1,67 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * The shared network config for GCP workspace. This object has common network configurations that + * are network attributions of a workspace. DEPRECATED. Use GkeConfig instead. + */ +@Generated +public class GcpCommonNetworkConfig { + /** + * The IP range that will be used to allocate GKE cluster master resources from. This field must + * not be set if gke_cluster_type=PUBLIC_NODE_PUBLIC_MASTER. + */ + @JsonProperty("gke_cluster_master_ip_range") + private String gkeClusterMasterIpRange; + + /** The type of network connectivity of the GKE cluster. */ + @JsonProperty("gke_connectivity_type") + private GkeConfigConnectivityType gkeConnectivityType; + + public GcpCommonNetworkConfig setGkeClusterMasterIpRange(String gkeClusterMasterIpRange) { + this.gkeClusterMasterIpRange = gkeClusterMasterIpRange; + return this; + } + + public String getGkeClusterMasterIpRange() { + return gkeClusterMasterIpRange; + } + + public GcpCommonNetworkConfig setGkeConnectivityType( + GkeConfigConnectivityType gkeConnectivityType) { + this.gkeConnectivityType = gkeConnectivityType; + return this; + } + + public GkeConfigConnectivityType getGkeConnectivityType() { + return gkeConnectivityType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GcpCommonNetworkConfig that = (GcpCommonNetworkConfig) o; + return Objects.equals(gkeClusterMasterIpRange, that.gkeClusterMasterIpRange) + && Objects.equals(gkeConnectivityType, that.gkeConnectivityType); + } + + @Override + public int hashCode() { + return Objects.hash(gkeClusterMasterIpRange, gkeConnectivityType); + } + + @Override + public String toString() { + return new ToStringer(GcpCommonNetworkConfig.class) + .add("gkeClusterMasterIpRange", gkeClusterMasterIpRange) + .add("gkeConnectivityType", gkeConnectivityType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java index be5eba082..2a6b12355 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java @@ -9,7 +9,10 @@ @Generated public class GcpKeyInfo { - /** The GCP KMS key's resource name */ + /** + * Globally unique kms key resource id of the form + * projects/testProjectId/locations/us-east4/keyRings/gcpCmkKeyRing/cryptoKeys/cmk-eastus4 + */ @JsonProperty("kms_key_id") private String kmsKeyId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfig.java index e27c531f4..d6e9e9efd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfig.java @@ -7,46 +7,20 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** - * The network settings for the workspace. The configurations are only for Databricks-managed VPCs. - * It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range - * configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks - * detects an IP range overlap. - * - *

Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and - * all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, - * `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - * - *

The sizes of these IP ranges affect the maximum number of nodes for the workspace. - * - *

**Important**: Confirm the IP ranges used by your Databricks workspace before creating the - * workspace. You cannot change them after your workspace is deployed. If the IP address ranges for - * your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To - * determine the address range sizes that you need, Databricks provides a calculator as a Microsoft - * Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - * - *

[calculate subnet sizes for a new workspace]: - * https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html - */ +/** The network configuration for the workspace. */ @Generated public class GcpManagedNetworkConfig { - /** - * The IP range from which to allocate GKE cluster pods. No bigger than `/9` and no smaller than - * `/21`. - */ + /** The IP range that will be used to allocate GKE cluster Pods from. */ @JsonProperty("gke_cluster_pod_ip_range") private String gkeClusterPodIpRange; - /** - * The IP range from which to allocate GKE cluster services. No bigger than `/16` and no smaller - * than `/27`. - */ + /** The IP range that will be used to allocate GKE cluster Services from. */ @JsonProperty("gke_cluster_service_ip_range") private String gkeClusterServiceIpRange; /** - * The IP range from which to allocate GKE cluster nodes. No bigger than `/9` and no smaller than - * `/29`. + * The IP range which will be used to allocate GKE cluster nodes from. Note: Pods, services and + * master IP range must be mutually exclusive. */ @JsonProperty("subnet_cidr") private String subnetCidr; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfo.java index 08f497789..ee4f4e301 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfo.java @@ -7,42 +7,35 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** - * The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and - * secondary IP ranges). - */ @Generated public class GcpNetworkInfo { - /** The Google Cloud project ID of the VPC network. */ + /** The GCP project ID for network resources. This project is where the VPC and subnet resides. */ @JsonProperty("network_project_id") private String networkProjectId; /** - * The name of the secondary IP range for pods. A Databricks-managed GKE cluster uses this IP - * range for its pods. This secondary IP range can be used by only one workspace. + * Name of the secondary range within the subnet that will be used by GKE as Pod IP range. This is + * BYO VPC specific. DB VPC uses network.getGcpManagedNetworkConfig.getGkeClusterPodIpRange */ @JsonProperty("pod_ip_range_name") private String podIpRangeName; - /** - * The name of the secondary IP range for services. A Databricks-managed GKE cluster uses this IP - * range for its services. This secondary IP range can be used by only one workspace. - */ + /** Name of the secondary range within the subnet that will be used by GKE as Service IP range. */ @JsonProperty("service_ip_range_name") private String serviceIpRangeName; - /** The ID of the subnet associated with this network. */ + /** + * The customer-provided Subnet ID that will be available to Clusters in Workspaces using this + * Network. + */ @JsonProperty("subnet_id") private String subnetId; - /** The Google Cloud region of the workspace data plane (for example, `us-east4`). */ + /** */ @JsonProperty("subnet_region") private String subnetRegion; - /** - * The ID of the VPC associated with this network. VPC IDs can be used in multiple network - * configurations. - */ + /** The customer-provided VPC ID. */ @JsonProperty("vpc_id") private String vpcId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpVpcEndpointInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpVpcEndpointInfo.java index 3298b72bf..2cbbbbec5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpVpcEndpointInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpVpcEndpointInfo.java @@ -7,26 +7,25 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** The Google Cloud specific information for this Private Service Connect endpoint. */ @Generated public class GcpVpcEndpointInfo { - /** Region of the PSC endpoint. */ + /** */ @JsonProperty("endpoint_region") private String endpointRegion; - /** The Google Cloud project ID of the VPC network where the PSC connection resides. */ + /** */ @JsonProperty("project_id") private String projectId; - /** The unique ID of this PSC connection. */ + /** */ @JsonProperty("psc_connection_id") private String pscConnectionId; - /** The name of the PSC endpoint in the Google Cloud project. */ + /** */ @JsonProperty("psc_endpoint_name") private String pscEndpointName; - /** The service attachment this PSC connection connects to. */ + /** */ @JsonProperty("service_attachment_id") private String serviceAttachmentId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetCredentialRequest.java index f4b3add8a..0d95a3d55 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetCredentialRequest.java @@ -9,7 +9,7 @@ @Generated public class GetCredentialRequest { - /** Databricks Account API credential configuration ID */ + /** Credential configuration ID */ @JsonIgnore private String credentialsId; public GetCredentialRequest setCredentialsId(String credentialsId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetPrivateAccesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetPrivateAccesRequest.java index 563d2ea88..afe93b072 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetPrivateAccesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetPrivateAccesRequest.java @@ -9,7 +9,7 @@ @Generated public class GetPrivateAccesRequest { - /** Databricks Account API private access settings ID. */ + /** */ @JsonIgnore private String privateAccessSettingsId; public GetPrivateAccesRequest setPrivateAccessSettingsId(String privateAccessSettingsId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetStorageRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetStorageRequest.java index f9a99b30a..15bae7e12 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetStorageRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetStorageRequest.java @@ -9,7 +9,7 @@ @Generated public class GetStorageRequest { - /** Databricks Account API storage configuration ID. */ + /** */ @JsonIgnore private String storageConfigurationId; public GetStorageRequest setStorageConfigurationId(String storageConfigurationId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequest.java index 7b8b7bb44..30ece2cca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GetWorkspaceRequest.java @@ -9,7 +9,7 @@ @Generated public class GetWorkspaceRequest { - /** Workspace ID. */ + /** */ @JsonIgnore private Long workspaceId; public GetWorkspaceRequest setWorkspaceId(Long workspaceId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfig.java index 064319e4f..4435e9994 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GkeConfig.java @@ -7,26 +7,16 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** The configurations for the GKE cluster of a Databricks workspace. */ +/** The configurations of the GKE cluster used by the GCP workspace. */ @Generated public class GkeConfig { - /** - * Specifies the network connectivity types for the GKE nodes and the GKE master network. - * - *

Set to `PRIVATE_NODE_PUBLIC_MASTER` for a private GKE cluster for the workspace. The GKE - * nodes will not have public IPs. - * - *

Set to `PUBLIC_NODE_PUBLIC_MASTER` for a public GKE cluster. The nodes of a public GKE - * cluster have public IP addresses. - */ + /** The type of network connectivity of the GKE cluster. */ @JsonProperty("connectivity_type") private GkeConfigConnectivityType connectivityType; /** - * The IP range from which to allocate GKE cluster master resources. This field will be ignored if - * GKE private cluster is not enabled. - * - *

It must be exactly as big as `/28`. + * The IP range that will be used to allocate GKE cluster master resources from. This field must + * not be set if gke_cluster_type=PUBLIC_NODE_PUBLIC_MASTER. */ @JsonProperty("master_ip_range") private String masterIpRange; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/KeyAccessConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/KeyAccessConfiguration.java new file mode 100755 index 000000000..14dc163c3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/KeyAccessConfiguration.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The credential ID that is used to access the key vault. */ +@Generated +public class KeyAccessConfiguration { + /** */ + @JsonProperty("credential_id") + private String credentialId; + + public KeyAccessConfiguration setCredentialId(String credentialId) { + this.credentialId = credentialId; + return this; + } + + public String getCredentialId() { + return credentialId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + KeyAccessConfiguration that = (KeyAccessConfiguration) o; + return Objects.equals(credentialId, that.credentialId); + } + + @Override + public int hashCode() { + return Objects.hash(credentialId); + } + + @Override + public String toString() { + return new ToStringer(KeyAccessConfiguration.class) + .add("credentialId", credentialId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/KeyUseCase.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/KeyUseCase.java index 1c019242d..308014cf0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/KeyUseCase.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/KeyUseCase.java @@ -4,15 +4,8 @@ import com.databricks.sdk.support.Generated; -/** - * Possible values are: * `MANAGED_SERVICES`: Encrypts notebook and secret data in the control plane - * * `STORAGE`: Encrypts the workspace's root S3 bucket (root DBFS and system data) and, optionally, - * cluster EBS volumes. - */ @Generated public enum KeyUseCase { - MANAGED_SERVICES, // Encrypts notebook and secret data in the control plane - STORAGE, // Encrypts the workspace's root S3 bucket (root DBFS and system data) and, - // optionally, cluster EBS volumes. - + MANAGED_SERVICES, + STORAGE, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java index b149a4d48..39be0950e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java @@ -34,11 +34,17 @@ public class Network { @JsonProperty("network_name") private String networkName; - /** */ + /** + * IDs of one to five security groups associated with this network. Security group IDs **cannot** + * be used in multiple network configurations. + */ @JsonProperty("security_group_ids") private Collection securityGroupIds; - /** */ + /** + * IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in + * multiple network configurations. + */ @JsonProperty("subnet_ids") private Collection subnetIds; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkVpcEndpoints.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkVpcEndpoints.java index 962a872a2..e227d5bfb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkVpcEndpoints.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkVpcEndpoints.java @@ -8,12 +8,6 @@ import java.util.Collection; import java.util.Objects; -/** - * If specified, contains the VPC endpoints used to allow cluster communication from this VPC over - * [AWS PrivateLink]. - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink/ - */ @Generated public class NetworkVpcEndpoints { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksAPI.java index 02cc7a527..628802833 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksAPI.java @@ -34,8 +34,8 @@ public Network create(CreateNetworkRequest request) { return impl.create(request); } - public void delete(String networkId) { - delete(new DeleteNetworkRequest().setNetworkId(networkId)); + public Network delete(String networkId) { + return delete(new DeleteNetworkRequest().setNetworkId(networkId)); } /** @@ -44,8 +44,8 @@ public void delete(String networkId) { * *

This operation is available only if your account is on the E2 version of the platform. */ - public void delete(DeleteNetworkRequest request) { - impl.delete(request); + public Network delete(DeleteNetworkRequest request) { + return impl.delete(request); } public Network get(String networkId) { @@ -57,11 +57,7 @@ public Network get(GetNetworkRequest request) { return impl.get(request); } - /** - * Gets a list of all Databricks network configurations for an account, specified by ID. - * - *

This operation is available only if your account is on the E2 version of the platform. - */ + /** Lists Databricks network configurations for an account. */ public Iterable list() { return impl.list(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java index 5a6c8d710..524e596ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java @@ -32,7 +32,7 @@ public Network create(CreateNetworkRequest request) { } @Override - public void delete(DeleteNetworkRequest request) { + public Network delete(DeleteNetworkRequest request) { String path = String.format( "/api/2.0/accounts/%s/networks/%s", @@ -41,7 +41,7 @@ public void delete(DeleteNetworkRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, Network.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksService.java index db13e6997..092c7b5bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksService.java @@ -26,15 +26,11 @@ public interface NetworksService { * *

This operation is available only if your account is on the E2 version of the platform. */ - void delete(DeleteNetworkRequest deleteNetworkRequest); + Network delete(DeleteNetworkRequest deleteNetworkRequest); /** Gets a Databricks network configuration, which represents a cloud VPC and its resources. */ Network get(GetNetworkRequest getNetworkRequest); - /** - * Gets a list of all Databricks network configurations for an account, specified by ID. - * - *

This operation is available only if your account is on the E2 version of the platform. - */ + /** Lists Databricks network configurations for an account. */ Collection list(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PricingTier.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PricingTier.java index cdbdc6e5a..b3879eb41 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PricingTier.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PricingTier.java @@ -4,11 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - * - *

[AWS Pricing]: https://databricks.com/product/aws-pricing - */ @Generated public enum PricingTier { COMMUNITY_EDITION, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessAPI.java index 82b70e7a6..644d702be 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessAPI.java @@ -24,87 +24,53 @@ public PrivateAccessAPI(PrivateAccessService mock) { } /** - * Creates a private access settings object, which specifies how your workspace is accessed over - * [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings - * object referenced by ID in the workspace's `private_access_settings_id` property. - * - *

You can share one private access settings with multiple workspaces in a single account. - * However, private access settings are specific to AWS regions, so only workspaces in the same - * AWS region can use a given private access settings object. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about - * PrivateLink]: - * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + * Creates a private access settings configuration, which represents network access restrictions + * for workspace resources. Private access settings configure whether workspaces can be accessed + * from the public internet or only from private endpoints. */ public PrivateAccessSettings create(CreatePrivateAccessSettingsRequest request) { return impl.create(request); } - public void delete(String privateAccessSettingsId) { - delete(new DeletePrivateAccesRequest().setPrivateAccessSettingsId(privateAccessSettingsId)); + public PrivateAccessSettings delete(String privateAccessSettingsId) { + return delete( + new DeletePrivateAccesRequest().setPrivateAccessSettingsId(privateAccessSettingsId)); } - /** - * Deletes a private access settings object, which determines how your workspace is accessed over - * [AWS PrivateLink]. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about - * PrivateLink]: - * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - */ - public void delete(DeletePrivateAccesRequest request) { - impl.delete(request); + /** Deletes a Databricks private access settings configuration, both specified by ID. */ + public PrivateAccessSettings delete(DeletePrivateAccesRequest request) { + return impl.delete(request); } public PrivateAccessSettings get(String privateAccessSettingsId) { return get(new GetPrivateAccesRequest().setPrivateAccessSettingsId(privateAccessSettingsId)); } - /** - * Gets a private access settings object, which specifies how your workspace is accessed over [AWS - * PrivateLink]. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about - * PrivateLink]: - * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - */ + /** Gets a Databricks private access settings configuration, both specified by ID. */ public PrivateAccessSettings get(GetPrivateAccesRequest request) { return impl.get(request); } - /** Gets a list of all private access settings objects for an account, specified by ID. */ + /** Lists Databricks private access settings for an account. */ public Iterable list() { return impl.list(); } /** * Updates an existing private access settings object, which specifies how your workspace is - * accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access - * settings object referenced by ID in the workspace's `private_access_settings_id` property. - * - *

This operation completely overwrites your existing private access settings object attached - * to your workspaces. All workspaces attached to the private access settings are affected by any - * change. If `public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are - * updated, effects of these changes might take several minutes to propagate to the workspace API. - * - *

You can share one private access settings object with multiple workspaces in a single - * account. However, private access settings are specific to AWS regions, so only workspaces in - * the same AWS region can use a given private access settings object. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about - * PrivateLink]: - * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + * accessed over AWS PrivateLink. To use AWS PrivateLink, a workspace must have a private access + * settings object referenced by ID in the workspace's private_access_settings_id property. This + * operation completely overwrites your existing private access settings object attached to your + * workspaces. All workspaces attached to the private access settings are affected by any change. + * If public_access_enabled, private_access_level, or allowed_vpc_endpoint_ids are updated, + * effects of these changes might take several minutes to propagate to the workspace API. You can + * share one private access settings object with multiple workspaces in a single account. However, + * private access settings are specific to AWS regions, so only workspaces in the same AWS region + * can use a given private access settings object. Before configuring PrivateLink, read the + * Databricks article about PrivateLink. */ - public void replace(ReplacePrivateAccessSettingsRequest request) { - impl.replace(request); + public PrivateAccessSettings replace(ReplacePrivateAccessSettingsRequest request) { + return impl.replace(request); } public PrivateAccessService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java index dcca1eeba..1ed77901e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java @@ -34,7 +34,7 @@ public PrivateAccessSettings create(CreatePrivateAccessSettingsRequest request) } @Override - public void delete(DeletePrivateAccesRequest request) { + public PrivateAccessSettings delete(DeletePrivateAccesRequest request) { String path = String.format( "/api/2.0/accounts/%s/private-access-settings/%s", @@ -43,7 +43,7 @@ public void delete(DeletePrivateAccesRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, PrivateAccessSettings.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -76,17 +76,19 @@ public Collection list() { } @Override - public void replace(ReplacePrivateAccessSettingsRequest request) { + public PrivateAccessSettings replace(ReplacePrivateAccessSettingsRequest request) { String path = String.format( "/api/2.0/accounts/%s/private-access-settings/%s", apiClient.configuredAccountID(), request.getPrivateAccessSettingsId()); try { - Request req = new Request("PUT", path, apiClient.serialize(request)); + Request req = + new Request( + "PUT", path, apiClient.serialize(request.getCustomerFacingPrivateAccessSettings())); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, PrivateAccessSettings.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessLevel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessLevel.java index eb3dba693..73b5dd3f1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessLevel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessLevel.java @@ -4,13 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * The private access level controls which VPC endpoints can connect to the UI or API of any - * workspace that attaches this private access settings object. * `ACCOUNT` level access (the - * default) allows only VPC endpoints that are registered in your Databricks account connect to your - * workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your - * workspace. For details, see `allowed_vpc_endpoint_ids`. - */ @Generated public enum PrivateAccessLevel { ACCOUNT, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessService.java index 1b3d54252..66137804a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessService.java @@ -14,69 +14,35 @@ @Generated public interface PrivateAccessService { /** - * Creates a private access settings object, which specifies how your workspace is accessed over - * [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings - * object referenced by ID in the workspace's `private_access_settings_id` property. - * - *

You can share one private access settings with multiple workspaces in a single account. - * However, private access settings are specific to AWS regions, so only workspaces in the same - * AWS region can use a given private access settings object. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about - * PrivateLink]: - * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + * Creates a private access settings configuration, which represents network access restrictions + * for workspace resources. Private access settings configure whether workspaces can be accessed + * from the public internet or only from private endpoints. */ PrivateAccessSettings create( CreatePrivateAccessSettingsRequest createPrivateAccessSettingsRequest); - /** - * Deletes a private access settings object, which determines how your workspace is accessed over - * [AWS PrivateLink]. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about - * PrivateLink]: - * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - */ - void delete(DeletePrivateAccesRequest deletePrivateAccesRequest); + /** Deletes a Databricks private access settings configuration, both specified by ID. */ + PrivateAccessSettings delete(DeletePrivateAccesRequest deletePrivateAccesRequest); - /** - * Gets a private access settings object, which specifies how your workspace is accessed over [AWS - * PrivateLink]. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about - * PrivateLink]: - * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - */ + /** Gets a Databricks private access settings configuration, both specified by ID. */ PrivateAccessSettings get(GetPrivateAccesRequest getPrivateAccesRequest); - /** Gets a list of all private access settings objects for an account, specified by ID. */ + /** Lists Databricks private access settings for an account. */ Collection list(); /** * Updates an existing private access settings object, which specifies how your workspace is - * accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access - * settings object referenced by ID in the workspace's `private_access_settings_id` property. - * - *

This operation completely overwrites your existing private access settings object attached - * to your workspaces. All workspaces attached to the private access settings are affected by any - * change. If `public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are - * updated, effects of these changes might take several minutes to propagate to the workspace API. - * - *

You can share one private access settings object with multiple workspaces in a single - * account. However, private access settings are specific to AWS regions, so only workspaces in - * the same AWS region can use a given private access settings object. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about - * PrivateLink]: - * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + * accessed over AWS PrivateLink. To use AWS PrivateLink, a workspace must have a private access + * settings object referenced by ID in the workspace's private_access_settings_id property. This + * operation completely overwrites your existing private access settings object attached to your + * workspaces. All workspaces attached to the private access settings are affected by any change. + * If public_access_enabled, private_access_level, or allowed_vpc_endpoint_ids are updated, + * effects of these changes might take several minutes to propagate to the workspace API. You can + * share one private access settings object with multiple workspaces in a single account. However, + * private access settings are specific to AWS regions, so only workspaces in the same AWS region + * can use a given private access settings object. Before configuring PrivateLink, read the + * Databricks article about PrivateLink. */ - void replace(ReplacePrivateAccessSettingsRequest replacePrivateAccessSettingsRequest); + PrivateAccessSettings replace( + ReplacePrivateAccessSettingsRequest replacePrivateAccessSettingsRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java index 03c466d78..a36f8b90b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java @@ -8,17 +8,32 @@ import java.util.Collection; import java.util.Objects; +/** * */ @Generated public class PrivateAccessSettings { - /** The Databricks account ID that hosts the credential. */ + /** The Databricks account ID that hosts the private access settings. */ @JsonProperty("account_id") private String accountId; - /** An array of Databricks VPC endpoint IDs. */ + /** + * An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when + * registering the VPC endpoint configuration in your Databricks account. This is not the ID of + * the VPC endpoint in AWS. Only used when private_access_level is set to ENDPOINT. This is an + * allow list of VPC endpoints that in your account that can connect to your workspace over AWS + * PrivateLink. If hybrid access to your workspace is enabled by setting public_access_enabled to + * true, this control only works for PrivateLink connections. To control how your workspace is + * accessed via public internet, see IP access lists. + */ @JsonProperty("allowed_vpc_endpoint_ids") private Collection allowedVpcEndpointIds; - /** */ + /** + * The private access level controls which VPC endpoints can connect to the UI or API of any + * workspace that attaches this private access settings object. `ACCOUNT` level access (the + * default) allows only VPC endpoints that are registered in your Databricks account connect to + * your workspace. `ENDPOINT` level access allows only specified VPC endpoints connect to your + * workspace. For details, see allowed_vpc_endpoint_ids. + */ @JsonProperty("private_access_level") private PrivateAccessLevel privateAccessLevel; @@ -32,14 +47,13 @@ public class PrivateAccessSettings { /** * Determines if the workspace can be accessed over public internet. For fully private workspaces, - * you can optionally specify `false`, but only if you implement both the front-end and the - * back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is - * enabled. + * you can optionally specify false, but only if you implement both the front-end and the back-end + * PrivateLink connections. Otherwise, specify true, which means that public access is enabled. */ @JsonProperty("public_access_enabled") private Boolean publicAccessEnabled; - /** The cloud region for workspaces attached to this private access settings object. */ + /** The AWS region for workspaces attached to this private access settings object. */ @JsonProperty("region") private String region; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplacePrivateAccessSettingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplacePrivateAccessSettingsRequest.java index 4d62abff4..643a96014 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplacePrivateAccessSettingsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplacePrivateAccessSettingsRequest.java @@ -6,70 +6,25 @@ import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; import java.util.Objects; @Generated public class ReplacePrivateAccessSettingsRequest { - /** - * An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when - * registering the VPC endpoint configuration in your Databricks account. This is not the ID of - * the VPC endpoint in AWS. - * - *

Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC - * endpoints that in your account that can connect to your workspace over AWS PrivateLink. - * - *

If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, - * this control only works for PrivateLink connections. To control how your workspace is accessed - * via public internet, see [IP access lists]. - * - *

[IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html - */ - @JsonProperty("allowed_vpc_endpoint_ids") - private Collection allowedVpcEndpointIds; + /** Properties of the new private access settings object. */ + @JsonProperty("customer_facing_private_access_settings") + private PrivateAccessSettings customerFacingPrivateAccessSettings; - /** */ - @JsonProperty("private_access_level") - private PrivateAccessLevel privateAccessLevel; - - /** Databricks Account API private access settings ID. */ + /** Databricks private access settings ID. */ @JsonIgnore private String privateAccessSettingsId; - /** The human-readable name of the private access settings object. */ - @JsonProperty("private_access_settings_name") - private String privateAccessSettingsName; - - /** - * Determines if the workspace can be accessed over public internet. For fully private workspaces, - * you can optionally specify `false`, but only if you implement both the front-end and the - * back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is - * enabled. - */ - @JsonProperty("public_access_enabled") - private Boolean publicAccessEnabled; - - /** The cloud region for workspaces associated with this private access settings object. */ - @JsonProperty("region") - private String region; - - public ReplacePrivateAccessSettingsRequest setAllowedVpcEndpointIds( - Collection allowedVpcEndpointIds) { - this.allowedVpcEndpointIds = allowedVpcEndpointIds; - return this; - } - - public Collection getAllowedVpcEndpointIds() { - return allowedVpcEndpointIds; - } - - public ReplacePrivateAccessSettingsRequest setPrivateAccessLevel( - PrivateAccessLevel privateAccessLevel) { - this.privateAccessLevel = privateAccessLevel; + public ReplacePrivateAccessSettingsRequest setCustomerFacingPrivateAccessSettings( + PrivateAccessSettings customerFacingPrivateAccessSettings) { + this.customerFacingPrivateAccessSettings = customerFacingPrivateAccessSettings; return this; } - public PrivateAccessLevel getPrivateAccessLevel() { - return privateAccessLevel; + public PrivateAccessSettings getCustomerFacingPrivateAccessSettings() { + return customerFacingPrivateAccessSettings; } public ReplacePrivateAccessSettingsRequest setPrivateAccessSettingsId( @@ -82,67 +37,26 @@ public String getPrivateAccessSettingsId() { return privateAccessSettingsId; } - public ReplacePrivateAccessSettingsRequest setPrivateAccessSettingsName( - String privateAccessSettingsName) { - this.privateAccessSettingsName = privateAccessSettingsName; - return this; - } - - public String getPrivateAccessSettingsName() { - return privateAccessSettingsName; - } - - public ReplacePrivateAccessSettingsRequest setPublicAccessEnabled(Boolean publicAccessEnabled) { - this.publicAccessEnabled = publicAccessEnabled; - return this; - } - - public Boolean getPublicAccessEnabled() { - return publicAccessEnabled; - } - - public ReplacePrivateAccessSettingsRequest setRegion(String region) { - this.region = region; - return this; - } - - public String getRegion() { - return region; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ReplacePrivateAccessSettingsRequest that = (ReplacePrivateAccessSettingsRequest) o; - return Objects.equals(allowedVpcEndpointIds, that.allowedVpcEndpointIds) - && Objects.equals(privateAccessLevel, that.privateAccessLevel) - && Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId) - && Objects.equals(privateAccessSettingsName, that.privateAccessSettingsName) - && Objects.equals(publicAccessEnabled, that.publicAccessEnabled) - && Objects.equals(region, that.region); + return Objects.equals( + customerFacingPrivateAccessSettings, that.customerFacingPrivateAccessSettings) + && Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId); } @Override public int hashCode() { - return Objects.hash( - allowedVpcEndpointIds, - privateAccessLevel, - privateAccessSettingsId, - privateAccessSettingsName, - publicAccessEnabled, - region); + return Objects.hash(customerFacingPrivateAccessSettings, privateAccessSettingsId); } @Override public String toString() { return new ToStringer(ReplacePrivateAccessSettingsRequest.class) - .add("allowedVpcEndpointIds", allowedVpcEndpointIds) - .add("privateAccessLevel", privateAccessLevel) + .add("customerFacingPrivateAccessSettings", customerFacingPrivateAccessSettings) .add("privateAccessSettingsId", privateAccessSettingsId) - .add("privateAccessSettingsName", privateAccessSettingsName) - .add("publicAccessEnabled", publicAccessEnabled) - .add("region", region) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfo.java index 4faf62f43..410637961 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfo.java @@ -7,10 +7,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Root S3 bucket information. */ @Generated public class RootBucketInfo { - /** The name of the S3 bucket. */ + /** Name of the S3 bucket */ @JsonProperty("bucket_name") private String bucketName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageAPI.java index 5dc37db4d..b8a3cb710 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageAPI.java @@ -29,32 +29,21 @@ public StorageAPI(StorageService mock) { impl = mock; } - /** - * Creates new storage configuration for an account, specified by ID. Uploads a storage - * configuration object that represents the root AWS S3 bucket in your account. Databricks stores - * related workspace assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, - * you need to configure the required bucket policy. - * - *

For information about how to create a new workspace with this API, see [Create a new - * workspace using the Account API] - * - *

[Create a new workspace using the Account API]: - * http://docs.databricks.com/administration-guide/account-api/new-workspace.html - */ + /** Creates a Databricks storage configuration for an account. */ public StorageConfiguration create(CreateStorageConfigurationRequest request) { return impl.create(request); } - public void delete(String storageConfigurationId) { - delete(new DeleteStorageRequest().setStorageConfigurationId(storageConfigurationId)); + public StorageConfiguration delete(String storageConfigurationId) { + return delete(new DeleteStorageRequest().setStorageConfigurationId(storageConfigurationId)); } /** * Deletes a Databricks storage configuration. You cannot delete a storage configuration that is * associated with any workspace. */ - public void delete(DeleteStorageRequest request) { - impl.delete(request); + public StorageConfiguration delete(DeleteStorageRequest request) { + return impl.delete(request); } public StorageConfiguration get(String storageConfigurationId) { @@ -66,7 +55,7 @@ public StorageConfiguration get(GetStorageRequest request) { return impl.get(request); } - /** Gets a list of all Databricks storage configurations for your account, specified by ID. */ + /** Lists Databricks storage configurations for an account, specified by ID. */ public Iterable list() { return impl.list(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java index a20cf9f3b..e8af04fa9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java @@ -9,7 +9,7 @@ @Generated public class StorageConfiguration { - /** The Databricks account ID that hosts the credential. */ + /** The Databricks account ID associated with this storage configuration. */ @JsonProperty("account_id") private String accountId; @@ -17,7 +17,17 @@ public class StorageConfiguration { @JsonProperty("creation_time") private Long creationTime; - /** */ + /** + * Optional IAM role that is used to access the workspace catalog which is created during + * workspace creation for UC by Default. If a storage configuration with this field populated is + * used to create a workspace, then a workspace catalog is created together with the workspace. + * The workspace catalog shares the root bucket with internal workspace storage (including DBFS + * root) but uses a dedicated bucket path prefix. + */ + @JsonProperty("role_arn") + private String roleArn; + + /** The root bucket information for the storage configuration. */ @JsonProperty("root_bucket_info") private RootBucketInfo rootBucketInfo; @@ -47,6 +57,15 @@ public Long getCreationTime() { return creationTime; } + public StorageConfiguration setRoleArn(String roleArn) { + this.roleArn = roleArn; + return this; + } + + public String getRoleArn() { + return roleArn; + } + public StorageConfiguration setRootBucketInfo(RootBucketInfo rootBucketInfo) { this.rootBucketInfo = rootBucketInfo; return this; @@ -81,6 +100,7 @@ public boolean equals(Object o) { StorageConfiguration that = (StorageConfiguration) o; return Objects.equals(accountId, that.accountId) && Objects.equals(creationTime, that.creationTime) + && Objects.equals(roleArn, that.roleArn) && Objects.equals(rootBucketInfo, that.rootBucketInfo) && Objects.equals(storageConfigurationId, that.storageConfigurationId) && Objects.equals(storageConfigurationName, that.storageConfigurationName); @@ -89,7 +109,12 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - accountId, creationTime, rootBucketInfo, storageConfigurationId, storageConfigurationName); + accountId, + creationTime, + roleArn, + rootBucketInfo, + storageConfigurationId, + storageConfigurationName); } @Override @@ -97,6 +122,7 @@ public String toString() { return new ToStringer(StorageConfiguration.class) .add("accountId", accountId) .add("creationTime", creationTime) + .add("roleArn", roleArn) .add("rootBucketInfo", rootBucketInfo) .add("storageConfigurationId", storageConfigurationId) .add("storageConfigurationName", storageConfigurationName) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java index 6be79f6d7..e81d1652d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java @@ -34,7 +34,7 @@ public StorageConfiguration create(CreateStorageConfigurationRequest request) { } @Override - public void delete(DeleteStorageRequest request) { + public StorageConfiguration delete(DeleteStorageRequest request) { String path = String.format( "/api/2.0/accounts/%s/storage-configurations/%s", @@ -43,7 +43,7 @@ public void delete(DeleteStorageRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, StorageConfiguration.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageService.java index 8324e8267..ba398e5aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageService.java @@ -17,29 +17,18 @@ */ @Generated public interface StorageService { - /** - * Creates new storage configuration for an account, specified by ID. Uploads a storage - * configuration object that represents the root AWS S3 bucket in your account. Databricks stores - * related workspace assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, - * you need to configure the required bucket policy. - * - *

For information about how to create a new workspace with this API, see [Create a new - * workspace using the Account API] - * - *

[Create a new workspace using the Account API]: - * http://docs.databricks.com/administration-guide/account-api/new-workspace.html - */ + /** Creates a Databricks storage configuration for an account. */ StorageConfiguration create(CreateStorageConfigurationRequest createStorageConfigurationRequest); /** * Deletes a Databricks storage configuration. You cannot delete a storage configuration that is * associated with any workspace. */ - void delete(DeleteStorageRequest deleteStorageRequest); + StorageConfiguration delete(DeleteStorageRequest deleteStorageRequest); /** Gets a Databricks storage configuration for an account, both specified by ID. */ StorageConfiguration get(GetStorageRequest getStorageRequest); - /** Gets a list of all Databricks storage configurations for your account, specified by ID. */ + /** Lists Databricks storage configurations for an account, specified by ID. */ Collection list(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRole.java index ee42e7691..39c3d6d28 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRole.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRole.java @@ -9,26 +9,10 @@ @Generated public class StsRole { - /** - * The external ID that needs to be trusted by the cross-account role. This is always your - * Databricks account ID. - */ - @JsonProperty("external_id") - private String externalId; - - /** The Amazon Resource Name (ARN) of the cross account role. */ + /** The Amazon Resource Name (ARN) of the cross account IAM role. */ @JsonProperty("role_arn") private String roleArn; - public StsRole setExternalId(String externalId) { - this.externalId = externalId; - return this; - } - - public String getExternalId() { - return externalId; - } - public StsRole setRoleArn(String roleArn) { this.roleArn = roleArn; return this; @@ -43,19 +27,16 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; StsRole that = (StsRole) o; - return Objects.equals(externalId, that.externalId) && Objects.equals(roleArn, that.roleArn); + return Objects.equals(roleArn, that.roleArn); } @Override public int hashCode() { - return Objects.hash(externalId, roleArn); + return Objects.hash(roleArn); } @Override public String toString() { - return new ToStringer(StsRole.class) - .add("externalId", externalId) - .add("roleArn", roleArn) - .toString(); + return new ToStringer(StsRole.class).add("roleArn", roleArn).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java index a690adac9..242a2a8ba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java @@ -3,159 +3,52 @@ package com.databricks.sdk.service.provisioning; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Map; import java.util.Objects; @Generated public class UpdateWorkspaceRequest { - /** - * The AWS region of the workspace's data plane (for example, `us-west-2`). This parameter is - * available only for updating failed workspaces. - */ - @JsonProperty("aws_region") - private String awsRegion; - - /** - * ID of the workspace's credential configuration object. This parameter is available for updating - * both failed and running workspaces. - */ - @JsonProperty("credentials_id") - private String credentialsId; - - /** - * The custom tags key-value pairing that is attached to this workspace. The key-value pair is a - * string of utf-8 characters. The value can be an empty string, with maximum length of 255 - * characters. The key can be of maximum length of 127 characters, and cannot be empty. - */ - @JsonProperty("custom_tags") - private Map customTags; - - /** - * The ID of the workspace's managed services encryption key configuration object. This parameter - * is available only for updating failed workspaces. - */ - @JsonProperty("managed_services_customer_managed_key_id") - private String managedServicesCustomerManagedKeyId; - /** */ - @JsonProperty("network_connectivity_config_id") - private String networkConnectivityConfigId; - - /** - * The ID of the workspace's network configuration object. Used only if you already use a - * customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC - * to a customer-managed VPC by updating the workspace to add a network configuration ID. - */ - @JsonProperty("network_id") - private String networkId; - - /** - * The ID of the workspace's private access settings configuration object. This parameter is - * available only for updating failed workspaces. - */ - @JsonProperty("private_access_settings_id") - private String privateAccessSettingsId; - - /** - * The ID of the workspace's storage configuration object. This parameter is available only for - * updating failed workspaces. - */ - @JsonProperty("storage_configuration_id") - private String storageConfigurationId; + @JsonProperty("customer_facing_workspace") + private Workspace customerFacingWorkspace; /** - * The ID of the key configuration object for workspace storage. This parameter is available for - * updating both failed and running workspaces. + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. */ - @JsonProperty("storage_customer_managed_key_id") - private String storageCustomerManagedKeyId; + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; - /** Workspace ID. */ + /** A unique integer ID for the workspace */ @JsonIgnore private Long workspaceId; - public UpdateWorkspaceRequest setAwsRegion(String awsRegion) { - this.awsRegion = awsRegion; - return this; - } - - public String getAwsRegion() { - return awsRegion; - } - - public UpdateWorkspaceRequest setCredentialsId(String credentialsId) { - this.credentialsId = credentialsId; - return this; - } - - public String getCredentialsId() { - return credentialsId; - } - - public UpdateWorkspaceRequest setCustomTags(Map customTags) { - this.customTags = customTags; - return this; - } - - public Map getCustomTags() { - return customTags; - } - - public UpdateWorkspaceRequest setManagedServicesCustomerManagedKeyId( - String managedServicesCustomerManagedKeyId) { - this.managedServicesCustomerManagedKeyId = managedServicesCustomerManagedKeyId; - return this; - } - - public String getManagedServicesCustomerManagedKeyId() { - return managedServicesCustomerManagedKeyId; - } - - public UpdateWorkspaceRequest setNetworkConnectivityConfigId(String networkConnectivityConfigId) { - this.networkConnectivityConfigId = networkConnectivityConfigId; - return this; - } - - public String getNetworkConnectivityConfigId() { - return networkConnectivityConfigId; - } - - public UpdateWorkspaceRequest setNetworkId(String networkId) { - this.networkId = networkId; - return this; - } - - public String getNetworkId() { - return networkId; - } - - public UpdateWorkspaceRequest setPrivateAccessSettingsId(String privateAccessSettingsId) { - this.privateAccessSettingsId = privateAccessSettingsId; - return this; - } - - public String getPrivateAccessSettingsId() { - return privateAccessSettingsId; - } - - public UpdateWorkspaceRequest setStorageConfigurationId(String storageConfigurationId) { - this.storageConfigurationId = storageConfigurationId; + public UpdateWorkspaceRequest setCustomerFacingWorkspace(Workspace customerFacingWorkspace) { + this.customerFacingWorkspace = customerFacingWorkspace; return this; } - public String getStorageConfigurationId() { - return storageConfigurationId; + public Workspace getCustomerFacingWorkspace() { + return customerFacingWorkspace; } - public UpdateWorkspaceRequest setStorageCustomerManagedKeyId(String storageCustomerManagedKeyId) { - this.storageCustomerManagedKeyId = storageCustomerManagedKeyId; + public UpdateWorkspaceRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; return this; } - public String getStorageCustomerManagedKeyId() { - return storageCustomerManagedKeyId; + public String getUpdateMask() { + return updateMask; } public UpdateWorkspaceRequest setWorkspaceId(Long workspaceId) { @@ -172,46 +65,21 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateWorkspaceRequest that = (UpdateWorkspaceRequest) o; - return Objects.equals(awsRegion, that.awsRegion) - && Objects.equals(credentialsId, that.credentialsId) - && Objects.equals(customTags, that.customTags) - && Objects.equals( - managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId) - && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) - && Objects.equals(networkId, that.networkId) - && Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId) - && Objects.equals(storageConfigurationId, that.storageConfigurationId) - && Objects.equals(storageCustomerManagedKeyId, that.storageCustomerManagedKeyId) + return Objects.equals(customerFacingWorkspace, that.customerFacingWorkspace) + && Objects.equals(updateMask, that.updateMask) && Objects.equals(workspaceId, that.workspaceId); } @Override public int hashCode() { - return Objects.hash( - awsRegion, - credentialsId, - customTags, - managedServicesCustomerManagedKeyId, - networkConnectivityConfigId, - networkId, - privateAccessSettingsId, - storageConfigurationId, - storageCustomerManagedKeyId, - workspaceId); + return Objects.hash(customerFacingWorkspace, updateMask, workspaceId); } @Override public String toString() { return new ToStringer(UpdateWorkspaceRequest.class) - .add("awsRegion", awsRegion) - .add("credentialsId", credentialsId) - .add("customTags", customTags) - .add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId) - .add("networkConnectivityConfigId", networkConnectivityConfigId) - .add("networkId", networkId) - .add("privateAccessSettingsId", privateAccessSettingsId) - .add("storageConfigurationId", storageConfigurationId) - .add("storageCustomerManagedKeyId", storageCustomerManagedKeyId) + .add("customerFacingWorkspace", customerFacingWorkspace) + .add("updateMask", updateMask) .add("workspaceId", workspaceId) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java index 82fe9b177..ea8b27b53 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java @@ -7,9 +7,13 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** * */ @Generated public class VpcEndpoint { - /** The Databricks account ID that hosts the VPC endpoint configuration. */ + /** + * The Databricks account ID that hosts the VPC endpoint configuration. TODO - This may signal an + * OpenAPI diff; it does not show up in the generated spec + */ @JsonProperty("account_id") private String accountId; @@ -33,7 +37,7 @@ public class VpcEndpoint { @JsonProperty("aws_vpc_endpoint_id") private String awsVpcEndpointId; - /** */ + /** The cloud info of this vpc endpoint. Info for a GCP vpc endpoint. */ @JsonProperty("gcp_vpc_endpoint_info") private GcpVpcEndpointInfo gcpVpcEndpointInfo; @@ -51,7 +55,13 @@ public class VpcEndpoint { @JsonProperty("state") private String state; - /** */ + /** + * This enumeration represents the type of Databricks VPC endpoint service that was used when + * creating this VPC endpoint. If the VPC endpoint connects to the Databricks control plane for + * either the front-end connection or the back-end REST API connection, the value is + * WORKSPACE_ACCESS. If the VPC endpoint connects to the Databricks workspace for the back-end + * secure cluster connectivity relay, the value is DATAPLANE_RELAY_ACCESS. + */ @JsonProperty("use_case") private EndpointUseCase useCase; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsAPI.java index 8c57e5857..e9568104b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsAPI.java @@ -43,23 +43,16 @@ public VpcEndpoint create(CreateVpcEndpointRequest request) { return impl.create(request); } - public void delete(String vpcEndpointId) { - delete(new DeleteVpcEndpointRequest().setVpcEndpointId(vpcEndpointId)); + public VpcEndpoint delete(String vpcEndpointId) { + return delete(new DeleteVpcEndpointRequest().setVpcEndpointId(vpcEndpointId)); } /** - * Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can - * communicate privately with Databricks over [AWS PrivateLink]. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink [AWS VPC endpoint]: - * https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html [Databricks article about - * PrivateLink]: - * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + * Deletes a Databricks VPC endpoint configuration. You cannot delete a VPC endpoint configuration + * that is associated with any workspace. */ - public void delete(DeleteVpcEndpointRequest request) { - impl.delete(request); + public VpcEndpoint delete(DeleteVpcEndpointRequest request) { + return impl.delete(request); } public VpcEndpoint get(String vpcEndpointId) { @@ -77,14 +70,7 @@ public VpcEndpoint get(GetVpcEndpointRequest request) { return impl.get(request); } - /** - * Gets a list of all VPC endpoints for an account, specified by ID. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - * - *

[Databricks article about PrivateLink]: - * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - */ + /** Lists Databricks VPC endpoint configurations for an account. */ public Iterable list() { return impl.list(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java index c6141bb98..b11d9f759 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java @@ -33,7 +33,7 @@ public VpcEndpoint create(CreateVpcEndpointRequest request) { } @Override - public void delete(DeleteVpcEndpointRequest request) { + public VpcEndpoint delete(DeleteVpcEndpointRequest request) { String path = String.format( "/api/2.0/accounts/%s/vpc-endpoints/%s", @@ -42,7 +42,7 @@ public void delete(DeleteVpcEndpointRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, VpcEndpoint.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsService.java index 0f4a57c16..8814e90ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsService.java @@ -32,17 +32,10 @@ public interface VpcEndpointsService { VpcEndpoint create(CreateVpcEndpointRequest createVpcEndpointRequest); /** - * Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can - * communicate privately with Databricks over [AWS PrivateLink]. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink [AWS VPC endpoint]: - * https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html [Databricks article about - * PrivateLink]: - * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + * Deletes a Databricks VPC endpoint configuration. You cannot delete a VPC endpoint configuration + * that is associated with any workspace. */ - void delete(DeleteVpcEndpointRequest deleteVpcEndpointRequest); + VpcEndpoint delete(DeleteVpcEndpointRequest deleteVpcEndpointRequest); /** * Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to @@ -53,13 +46,6 @@ public interface VpcEndpointsService { */ VpcEndpoint get(GetVpcEndpointRequest getVpcEndpointRequest); - /** - * Gets a list of all VPC endpoints for an account, specified by ID. - * - *

Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - * - *

[Databricks article about PrivateLink]: - * https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - */ + /** Lists Databricks VPC endpoint configurations for an account. */ Collection list(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcStatus.java index d6f34acaa..eae712fe4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcStatus.java @@ -4,14 +4,10 @@ import com.databricks.sdk.support.Generated; -/** - * The status of this network configuration object in terms of its use in a workspace: * - * `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned. - */ @Generated public enum VpcStatus { - BROKEN, // Broken. - UNATTACHED, // Unattached. - VALID, // Valid. - WARNED, // Warned. + BROKEN, + UNATTACHED, + VALID, + WARNED, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WarningType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WarningType.java index 184b8f62c..69379d15f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WarningType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WarningType.java @@ -5,7 +5,6 @@ import com.databricks.sdk.support.Generated; import com.fasterxml.jackson.annotation.JsonProperty; -/** The AWS resource associated with this warning: a subnet or a security group. */ @Generated public enum WarningType { @JsonProperty("securityGroup") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java index ea983a894..886c4c627 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java @@ -14,7 +14,7 @@ public class Workspace { @JsonProperty("account_id") private String accountId; - /** The AWS region of the workspace data plane (for example, `us-west-2`). */ + /** */ @JsonProperty("aws_region") private String awsRegion; @@ -30,6 +30,10 @@ public class Workspace { @JsonProperty("cloud_resource_container") private CloudResourceContainer cloudResourceContainer; + /** The compute mode of the workspace. */ + @JsonProperty("compute_mode") + private CustomerFacingComputeMode computeMode; + /** Time in epoch milliseconds when the workspace was created. */ @JsonProperty("creation_time") private Long creationTime; @@ -46,21 +50,16 @@ public class Workspace { @JsonProperty("custom_tags") private Map customTags; - /** - * The deployment name defines part of the subdomain for the workspace. The workspace URL for web - * application and REST APIs is `.cloud.databricks.com`. - * - *

This value must be unique across all non-deleted deployments across all AWS regions. - */ + /** */ @JsonProperty("deployment_name") private String deploymentName; /** - * If this workspace is for a external customer, then external_customer_info is populated. If this - * workspace is not for a external customer, then external_customer_info is empty. + * A client owned field used to indicate the workspace status that the client expects to be in. + * For now this is only used to unblock Temporal workflow for GCP least privileged workspace. */ - @JsonProperty("external_customer_info") - private ExternalCustomerInfo externalCustomerInfo; + @JsonProperty("expected_workspace_status") + private WorkspaceStatus expectedWorkspaceStatus; /** */ @JsonProperty("gcp_managed_network_config") @@ -70,10 +69,6 @@ public class Workspace { @JsonProperty("gke_config") private GkeConfig gkeConfig; - /** Whether no public IP is enabled for the workspace. */ - @JsonProperty("is_no_public_ip_enabled") - private Boolean isNoPublicIpEnabled; - /** * The Google Cloud region of the workspace data plane in your Google account (for example, * `us-east4`). @@ -86,8 +81,20 @@ public class Workspace { private String managedServicesCustomerManagedKeyId; /** - * The network configuration ID that is attached to the workspace. This field is available only if - * the network is a customer-managed network. + * The network configuration for the workspace. + * + *

DEPRECATED. Use `network_id` instead. + */ + @JsonProperty("network") + private WorkspaceNetwork network; + + /** The object ID of network connectivity config. */ + @JsonProperty("network_connectivity_config_id") + private String networkConnectivityConfigId; + + /** + * If this workspace is BYO VPC, then the network_id will be populated. If this workspace is not + * BYO VPC, then the network_id will be empty. */ @JsonProperty("network_id") private String networkId; @@ -118,6 +125,10 @@ public class Workspace { @JsonProperty("storage_customer_managed_key_id") private String storageCustomerManagedKeyId; + /** The storage mode of the workspace. */ + @JsonProperty("storage_mode") + private CustomerFacingStorageMode storageMode; + /** A unique integer ID for the workspace */ @JsonProperty("workspace_id") private Long workspaceId; @@ -126,7 +137,7 @@ public class Workspace { @JsonProperty("workspace_name") private String workspaceName; - /** */ + /** The status of a workspace */ @JsonProperty("workspace_status") private WorkspaceStatus workspaceStatus; @@ -179,6 +190,15 @@ public CloudResourceContainer getCloudResourceContainer() { return cloudResourceContainer; } + public Workspace setComputeMode(CustomerFacingComputeMode computeMode) { + this.computeMode = computeMode; + return this; + } + + public CustomerFacingComputeMode getComputeMode() { + return computeMode; + } + public Workspace setCreationTime(Long creationTime) { this.creationTime = creationTime; return this; @@ -215,13 +235,13 @@ public String getDeploymentName() { return deploymentName; } - public Workspace setExternalCustomerInfo(ExternalCustomerInfo externalCustomerInfo) { - this.externalCustomerInfo = externalCustomerInfo; + public Workspace setExpectedWorkspaceStatus(WorkspaceStatus expectedWorkspaceStatus) { + this.expectedWorkspaceStatus = expectedWorkspaceStatus; return this; } - public ExternalCustomerInfo getExternalCustomerInfo() { - return externalCustomerInfo; + public WorkspaceStatus getExpectedWorkspaceStatus() { + return expectedWorkspaceStatus; } public Workspace setGcpManagedNetworkConfig(GcpManagedNetworkConfig gcpManagedNetworkConfig) { @@ -242,15 +262,6 @@ public GkeConfig getGkeConfig() { return gkeConfig; } - public Workspace setIsNoPublicIpEnabled(Boolean isNoPublicIpEnabled) { - this.isNoPublicIpEnabled = isNoPublicIpEnabled; - return this; - } - - public Boolean getIsNoPublicIpEnabled() { - return isNoPublicIpEnabled; - } - public Workspace setLocation(String location) { this.location = location; return this; @@ -270,6 +281,24 @@ public String getManagedServicesCustomerManagedKeyId() { return managedServicesCustomerManagedKeyId; } + public Workspace setNetwork(WorkspaceNetwork network) { + this.network = network; + return this; + } + + public WorkspaceNetwork getNetwork() { + return network; + } + + public Workspace setNetworkConnectivityConfigId(String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + public Workspace setNetworkId(String networkId) { this.networkId = networkId; return this; @@ -315,6 +344,15 @@ public String getStorageCustomerManagedKeyId() { return storageCustomerManagedKeyId; } + public Workspace setStorageMode(CustomerFacingStorageMode storageMode) { + this.storageMode = storageMode; + return this; + } + + public CustomerFacingStorageMode getStorageMode() { + return storageMode; + } + public Workspace setWorkspaceId(Long workspaceId) { this.workspaceId = workspaceId; return this; @@ -361,22 +399,25 @@ public boolean equals(Object o) { && Objects.equals(azureWorkspaceInfo, that.azureWorkspaceInfo) && Objects.equals(cloud, that.cloud) && Objects.equals(cloudResourceContainer, that.cloudResourceContainer) + && Objects.equals(computeMode, that.computeMode) && Objects.equals(creationTime, that.creationTime) && Objects.equals(credentialsId, that.credentialsId) && Objects.equals(customTags, that.customTags) && Objects.equals(deploymentName, that.deploymentName) - && Objects.equals(externalCustomerInfo, that.externalCustomerInfo) + && Objects.equals(expectedWorkspaceStatus, that.expectedWorkspaceStatus) && Objects.equals(gcpManagedNetworkConfig, that.gcpManagedNetworkConfig) && Objects.equals(gkeConfig, that.gkeConfig) - && Objects.equals(isNoPublicIpEnabled, that.isNoPublicIpEnabled) && Objects.equals(location, that.location) && Objects.equals( managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId) + && Objects.equals(network, that.network) + && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) && Objects.equals(networkId, that.networkId) && Objects.equals(pricingTier, that.pricingTier) && Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId) && Objects.equals(storageConfigurationId, that.storageConfigurationId) && Objects.equals(storageCustomerManagedKeyId, that.storageCustomerManagedKeyId) + && Objects.equals(storageMode, that.storageMode) && Objects.equals(workspaceId, that.workspaceId) && Objects.equals(workspaceName, that.workspaceName) && Objects.equals(workspaceStatus, that.workspaceStatus) @@ -391,21 +432,24 @@ public int hashCode() { azureWorkspaceInfo, cloud, cloudResourceContainer, + computeMode, creationTime, credentialsId, customTags, deploymentName, - externalCustomerInfo, + expectedWorkspaceStatus, gcpManagedNetworkConfig, gkeConfig, - isNoPublicIpEnabled, location, managedServicesCustomerManagedKeyId, + network, + networkConnectivityConfigId, networkId, pricingTier, privateAccessSettingsId, storageConfigurationId, storageCustomerManagedKeyId, + storageMode, workspaceId, workspaceName, workspaceStatus, @@ -420,21 +464,24 @@ public String toString() { .add("azureWorkspaceInfo", azureWorkspaceInfo) .add("cloud", cloud) .add("cloudResourceContainer", cloudResourceContainer) + .add("computeMode", computeMode) .add("creationTime", creationTime) .add("credentialsId", credentialsId) .add("customTags", customTags) .add("deploymentName", deploymentName) - .add("externalCustomerInfo", externalCustomerInfo) + .add("expectedWorkspaceStatus", expectedWorkspaceStatus) .add("gcpManagedNetworkConfig", gcpManagedNetworkConfig) .add("gkeConfig", gkeConfig) - .add("isNoPublicIpEnabled", isNoPublicIpEnabled) .add("location", location) .add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId) + .add("network", network) + .add("networkConnectivityConfigId", networkConnectivityConfigId) .add("networkId", networkId) .add("pricingTier", pricingTier) .add("privateAccessSettingsId", privateAccessSettingsId) .add("storageConfigurationId", storageConfigurationId) .add("storageCustomerManagedKeyId", storageCustomerManagedKeyId) + .add("storageMode", storageMode) .add("workspaceId", workspaceId) .add("workspaceName", workspaceName) .add("workspaceStatus", workspaceStatus) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspaceNetwork.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspaceNetwork.java new file mode 100755 index 000000000..478a1fd93 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspaceNetwork.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The network configuration for workspaces. */ +@Generated +public class WorkspaceNetwork { + /** + * The shared network config for GCP workspace. This object has common network configurations that + * are network attributions of a workspace. This object is input-only. + */ + @JsonProperty("gcp_common_network_config") + private GcpCommonNetworkConfig gcpCommonNetworkConfig; + + /** + * The mutually exclusive network deployment modes. The option decides which network mode the + * workspace will use. The network config for GCP workspace with Databricks managed network. This + * object is input-only and will not be provided when listing workspaces. See + * go/gcp-byovpc-alpha-design for interface decisions. + */ + @JsonProperty("gcp_managed_network_config") + private GcpManagedNetworkConfig gcpManagedNetworkConfig; + + /** + * The ID of the network object, if the workspace is a BYOVPC workspace. This should apply to + * workspaces on all clouds in internal services. In accounts-rest-api, user will use + * workspace.network_id for input and output instead. Currently (2021-06-19) the network ID is + * only used by GCP. + */ + @JsonProperty("network_id") + private String networkId; + + public WorkspaceNetwork setGcpCommonNetworkConfig(GcpCommonNetworkConfig gcpCommonNetworkConfig) { + this.gcpCommonNetworkConfig = gcpCommonNetworkConfig; + return this; + } + + public GcpCommonNetworkConfig getGcpCommonNetworkConfig() { + return gcpCommonNetworkConfig; + } + + public WorkspaceNetwork setGcpManagedNetworkConfig( + GcpManagedNetworkConfig gcpManagedNetworkConfig) { + this.gcpManagedNetworkConfig = gcpManagedNetworkConfig; + return this; + } + + public GcpManagedNetworkConfig getGcpManagedNetworkConfig() { + return gcpManagedNetworkConfig; + } + + public WorkspaceNetwork setNetworkId(String networkId) { + this.networkId = networkId; + return this; + } + + public String getNetworkId() { + return networkId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceNetwork that = (WorkspaceNetwork) o; + return Objects.equals(gcpCommonNetworkConfig, that.gcpCommonNetworkConfig) + && Objects.equals(gcpManagedNetworkConfig, that.gcpManagedNetworkConfig) + && Objects.equals(networkId, that.networkId); + } + + @Override + public int hashCode() { + return Objects.hash(gcpCommonNetworkConfig, gcpManagedNetworkConfig, networkId); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceNetwork.class) + .add("gcpCommonNetworkConfig", gcpCommonNetworkConfig) + .add("gcpManagedNetworkConfig", gcpManagedNetworkConfig) + .add("networkId", networkId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspaceStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspaceStatus.java index c6614660a..628d49e35 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspaceStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspaceStatus.java @@ -5,8 +5,12 @@ import com.databricks.sdk.support.Generated; /** - * The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` - * initially. Continue to check the status until the status is `RUNNING`. + * The different statuses of a workspace. The following represents the current set of valid + * transitions from status to status: NOT_PROVISIONED -> PROVISIONING -> CANCELLED PROVISIONING -> + * RUNNING -> FAILED -> CANCELLED (note that this transition is disallowed in the MultiWorkspace + * Project) RUNNING -> PROVISIONING -> BANNED -> CANCELLED FAILED -> PROVISIONING -> CANCELLED + * BANNED -> RUNNING -> CANCELLED Note that a transition from any state to itself is also valid. + * TODO(PLAT-5867): add a transition from CANCELLED to some other value (e.g. RECOVERING) */ @Generated public enum WorkspaceStatus { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java index 879c61231..479e91661 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java @@ -83,14 +83,38 @@ public Workspace waitGetWorkspaceRunning( } /** - * Creates a new workspace. + * Creates a new workspace using a credential configuration and a storage configuration, an + * optional network configuration (if using a customer-managed VPC), an optional managed services + * key configuration (if using customer-managed keys for managed services), and an optional + * storage key configuration (if using customer-managed keys for storage). The key configurations + * used for managed services and storage encryption can be the same or different. + * + *

Important: This operation is asynchronous. A response with HTTP status code 200 means the + * request has been accepted and is in progress, but does not mean that the workspace deployed + * successfully and is running. The initial workspace status is typically PROVISIONING. Use the + * workspace ID (workspace_id) field in the response to identify the new workspace and make + * repeated GET requests with the workspace ID and check its status. The workspace becomes + * available when the status changes to RUNNING. + * + *

You can share one customer-managed VPC with multiple workspaces in a single account. It is + * not required to create a new VPC for each workspace. However, you cannot reuse subnets or + * Security Groups between workspaces. If you plan to share one VPC with multiple workspaces, make + * sure you size your VPC and subnets accordingly. Because a Databricks Account API network + * configuration encapsulates this information, you cannot reuse a Databricks Account API network + * configuration across workspaces. + * + *

For information about how to create a new workspace with this API including error handling, + * see [Create a new workspace using the Account API]. + * + *

Important: Customer-managed VPCs, PrivateLink, and customer-managed keys are supported on a + * limited set of deployment and subscription types. If you have questions about availability, + * contact your Databricks representative. * - *

**Important**: This operation is asynchronous. A response with HTTP status code 200 means - * the request has been accepted and is in progress, but does not mean that the workspace deployed - * successfully and is running. The initial workspace status is typically `PROVISIONING`. Use the - * workspace ID (`workspace_id`) field in the response to identify the new workspace and make - * repeated `GET` requests with the workspace ID and check its status. The workspace becomes - * available when the status changes to `RUNNING`. + *

This operation is available only if your account is on the E2 version of the platform or on + * a select custom plan that allows multiple workspaces per account. + * + *

[Create a new workspace using the Account API]: + * http://docs.databricks.com/administration-guide/account-api/new-workspace.html */ public Wait create(CreateWorkspaceRequest request) { Workspace response = impl.create(request); @@ -100,20 +124,13 @@ public Wait create(CreateWorkspaceRequest request) { response); } - public void delete(long workspaceId) { - delete(new DeleteWorkspaceRequest().setWorkspaceId(workspaceId)); + public Workspace delete(long workspaceId) { + return delete(new DeleteWorkspaceRequest().setWorkspaceId(workspaceId)); } - /** - * Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. - * However, it might take a few minutes for all workspaces resources to be deleted, depending on - * the size and number of workspace resources. - * - *

This operation is available only if your account is on the E2 version of the platform or on - * a select custom plan that allows multiple workspaces per account. - */ - public void delete(DeleteWorkspaceRequest request) { - impl.delete(request); + /** Deletes a Databricks workspace, both specified by ID. */ + public Workspace delete(DeleteWorkspaceRequest request) { + return impl.delete(request); } public Workspace get(long workspaceId) { @@ -124,13 +141,9 @@ public Workspace get(long workspaceId) { * Gets information including status for a Databricks workspace, specified by ID. In the response, * the `workspace_status` field indicates the current status. After initial workspace creation * (which is asynchronous), make repeated `GET` requests with the workspace ID and check its - * status. The workspace becomes available when the status changes to `RUNNING`. - * - *

For information about how to create a new workspace with this API **including error - * handling**, see [Create a new workspace using the Account API]. - * - *

This operation is available only if your account is on the E2 version of the platform or on - * a select custom plan that allows multiple workspaces per account. + * status. The workspace becomes available when the status changes to `RUNNING`. For information + * about how to create a new workspace with this API **including error handling**, see [Create a + * new workspace using the Account API]. * *

[Create a new workspace using the Account API]: * http://docs.databricks.com/administration-guide/account-api/new-workspace.html @@ -139,124 +152,18 @@ public Workspace get(GetWorkspaceRequest request) { return impl.get(request); } - /** - * Gets a list of all workspaces associated with an account, specified by ID. - * - *

This operation is available only if your account is on the E2 version of the platform or on - * a select custom plan that allows multiple workspaces per account. - */ + /** Lists Databricks workspaces for an account. */ public Iterable list() { return impl.list(); } - /** - * Updates a workspace configuration for either a running workspace or a failed workspace. The - * elements that can be updated varies between these two use cases. - * - *

### Update a failed workspace You can update a Databricks workspace configuration for failed - * workspace deployment for some fields, but not all fields. For a failed workspace, this request - * supports updates to the following fields only: - Credential configuration ID - Storage - * configuration ID - Network configuration ID. Used only to add or change a network configuration - * for a customer-managed VPC. For a failed workspace only, you can convert a workspace with - * Databricks-managed VPC to use a customer-managed VPC by adding this ID. You cannot downgrade a - * workspace with a customer-managed VPC to be a Databricks-managed VPC. You can update the - * network configuration for a failed or running workspace to add PrivateLink support, though you - * must also add a private access settings object. - Key configuration ID for managed services - * (control plane storage, such as notebook source and Databricks SQL queries). Used only if you - * use customer-managed keys for managed services. - Key configuration ID for workspace storage - * (root S3 bucket and, optionally, EBS volumes). Used only if you use customer-managed keys for - * workspace storage. **Important**: If the workspace was ever in the running state, even if - * briefly before becoming a failed workspace, you cannot add a new key configuration ID for - * workspace storage. - Private access settings ID to add PrivateLink support. You can add or - * update the private access settings ID to upgrade a workspace to add support for front-end, - * back-end, or both types of connectivity. You cannot remove (downgrade) any existing front-end - * or back-end PrivateLink support on a workspace. - Custom tags. Given you provide an empty - * custom tags, the update would not be applied. - Network connectivity configuration ID to add - * serverless stable IP support. You can add or update the network connectivity configuration ID - * to ensure the workspace uses the same set of stable IP CIDR blocks to access your resources. - * You cannot remove a network connectivity configuration from the workspace once attached, you - * can only switch to another one. - * - *

After calling the `PATCH` operation to update the workspace configuration, make repeated - * `GET` requests with the workspace ID and check the workspace status. The workspace is - * successful if the status changes to `RUNNING`. - * - *

For information about how to create a new workspace with this API **including error - * handling**, see [Create a new workspace using the Account API]. - * - *

### Update a running workspace You can update a Databricks workspace configuration for - * running workspaces for some fields, but not all fields. For a running workspace, this request - * supports updating the following fields only: - Credential configuration ID - Network - * configuration ID. Used only if you already use a customer-managed VPC. You cannot convert a - * running workspace from a Databricks-managed VPC to a customer-managed VPC. You can use a - * network configuration update in this API for a failed or running workspace to add support for - * PrivateLink, although you also need to add a private access settings object. - Key - * configuration ID for managed services (control plane storage, such as notebook source and - * Databricks SQL queries). Databricks does not directly encrypt the data with the - * customer-managed key (CMK). Databricks uses both the CMK and the Databricks managed key (DMK) - * that is unique to your workspace to encrypt the Data Encryption Key (DEK). Databricks uses the - * DEK to encrypt your workspace's managed services persisted data. If the workspace does not - * already have a CMK for managed services, adding this ID enables managed services encryption for - * new or updated data. Existing managed services data that existed before adding the key remains - * not encrypted with the DEK until it is modified. If the workspace already has customer-managed - * keys for managed services, this request rotates (changes) the CMK keys and the DEK is - * re-encrypted with the DMK and the new CMK. - Key configuration ID for workspace storage (root - * S3 bucket and, optionally, EBS volumes). You can set this only if the workspace does not - * already have a customer-managed key configuration for workspace storage. - Private access - * settings ID to add PrivateLink support. You can add or update the private access settings ID to - * upgrade a workspace to add support for front-end, back-end, or both types of connectivity. You - * cannot remove (downgrade) any existing front-end or back-end PrivateLink support on a - * workspace. - Custom tags. Given you provide an empty custom tags, the update would not be - * applied. - Network connectivity configuration ID to add serverless stable IP support. You can - * add or update the network connectivity configuration ID to ensure the workspace uses the same - * set of stable IP CIDR blocks to access your resources. You cannot remove a network connectivity - * configuration from the workspace once attached, you can only switch to another one. - * - *

**Important**: To update a running workspace, your workspace must have no running compute - * resources that run in your workspace's VPC in the Classic data plane. For example, stop all - * all-purpose clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If - * you do not terminate all cluster instances in the workspace before calling this API, the - * request will fail. - * - *

### Wait until changes take effect. After calling the `PATCH` operation to update the - * workspace configuration, make repeated `GET` requests with the workspace ID and check the - * workspace status and the status of the fields. * For workspaces with a Databricks-managed VPC, - * the workspace status becomes `PROVISIONING` temporarily (typically under 20 minutes). If the - * workspace update is successful, the workspace status changes to `RUNNING`. Note that you can - * also check the workspace status in the [Account Console]. However, you cannot use or create - * clusters for another 20 minutes after that status change. This results in a total of up to 40 - * minutes in which you cannot create clusters. If you create or use clusters before this time - * interval elapses, clusters do not launch successfully, fail, or could cause other unexpected - * behavior. * For workspaces with a customer-managed VPC, the workspace status stays at status - * `RUNNING` and the VPC change happens immediately. A change to the storage customer-managed key - * configuration ID might take a few minutes to update, so continue to check the workspace until - * you observe that it has been updated. If the update fails, the workspace might revert silently - * to its original configuration. After the workspace has been updated, you cannot use or create - * clusters for another 20 minutes. If you create or use clusters before this time interval - * elapses, clusters do not launch successfully, fail, or could cause other unexpected behavior. - * - *

If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the - * changes to fully take effect. During the 20 minute wait, it is important that you stop all REST - * API calls to the DBFS API. If you are modifying _only the managed services key configuration_, - * you can omit the 20 minute wait. - * - *

**Important**: Customer-managed keys and customer-managed VPCs are supported by only some - * deployment types and subscription types. If you have questions about availability, contact your - * Databricks representative. - * - *

This operation is available only if your account is on the E2 version of the platform or on - * a select custom plan that allows multiple workspaces per account. - * - *

[Account Console]: - * https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html - * [Create a new workspace using the Account API]: - * http://docs.databricks.com/administration-guide/account-api/new-workspace.html - */ - public Wait update(UpdateWorkspaceRequest request) { - impl.update(request); + /** Updates a workspace. */ + public Wait update(UpdateWorkspaceRequest request) { + Workspace response = impl.update(request); return new Wait<>( (timeout, callback) -> - waitGetWorkspaceRunning(request.getWorkspaceId(), timeout, callback)); + waitGetWorkspaceRunning(response.getWorkspaceId(), timeout, callback), + response); } public WorkspacesService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java index 61557b7dd..f07945f8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java @@ -32,7 +32,7 @@ public Workspace create(CreateWorkspaceRequest request) { } @Override - public void delete(DeleteWorkspaceRequest request) { + public Workspace delete(DeleteWorkspaceRequest request) { String path = String.format( "/api/2.0/accounts/%s/workspaces/%s", @@ -41,7 +41,7 @@ public void delete(DeleteWorkspaceRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, Workspace.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -72,17 +72,18 @@ public Collection list() { } @Override - public void update(UpdateWorkspaceRequest request) { + public Workspace update(UpdateWorkspaceRequest request) { String path = String.format( "/api/2.0/accounts/%s/workspaces/%s", apiClient.configuredAccountID(), request.getWorkspaceId()); try { - Request req = new Request("PATCH", path, apiClient.serialize(request)); + Request req = + new Request("PATCH", path, apiClient.serialize(request.getCustomerFacingWorkspace())); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, Void.class); + return apiClient.execute(req, Workspace.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java index 7679504a0..8388eeb18 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java @@ -20,154 +20,60 @@ @Generated public interface WorkspacesService { /** - * Creates a new workspace. - * - *

**Important**: This operation is asynchronous. A response with HTTP status code 200 means - * the request has been accepted and is in progress, but does not mean that the workspace deployed - * successfully and is running. The initial workspace status is typically `PROVISIONING`. Use the - * workspace ID (`workspace_id`) field in the response to identify the new workspace and make - * repeated `GET` requests with the workspace ID and check its status. The workspace becomes - * available when the status changes to `RUNNING`. - */ - Workspace create(CreateWorkspaceRequest createWorkspaceRequest); - - /** - * Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. - * However, it might take a few minutes for all workspaces resources to be deleted, depending on - * the size and number of workspace resources. + * Creates a new workspace using a credential configuration and a storage configuration, an + * optional network configuration (if using a customer-managed VPC), an optional managed services + * key configuration (if using customer-managed keys for managed services), and an optional + * storage key configuration (if using customer-managed keys for storage). The key configurations + * used for managed services and storage encryption can be the same or different. + * + *

Important: This operation is asynchronous. A response with HTTP status code 200 means the + * request has been accepted and is in progress, but does not mean that the workspace deployed + * successfully and is running. The initial workspace status is typically PROVISIONING. Use the + * workspace ID (workspace_id) field in the response to identify the new workspace and make + * repeated GET requests with the workspace ID and check its status. The workspace becomes + * available when the status changes to RUNNING. + * + *

You can share one customer-managed VPC with multiple workspaces in a single account. It is + * not required to create a new VPC for each workspace. However, you cannot reuse subnets or + * Security Groups between workspaces. If you plan to share one VPC with multiple workspaces, make + * sure you size your VPC and subnets accordingly. Because a Databricks Account API network + * configuration encapsulates this information, you cannot reuse a Databricks Account API network + * configuration across workspaces. + * + *

For information about how to create a new workspace with this API including error handling, + * see [Create a new workspace using the Account API]. + * + *

Important: Customer-managed VPCs, PrivateLink, and customer-managed keys are supported on a + * limited set of deployment and subscription types. If you have questions about availability, + * contact your Databricks representative. * *

This operation is available only if your account is on the E2 version of the platform or on * a select custom plan that allows multiple workspaces per account. + * + *

[Create a new workspace using the Account API]: + * http://docs.databricks.com/administration-guide/account-api/new-workspace.html */ - void delete(DeleteWorkspaceRequest deleteWorkspaceRequest); + Workspace create(CreateWorkspaceRequest createWorkspaceRequest); + + /** Deletes a Databricks workspace, both specified by ID. */ + Workspace delete(DeleteWorkspaceRequest deleteWorkspaceRequest); /** * Gets information including status for a Databricks workspace, specified by ID. In the response, * the `workspace_status` field indicates the current status. After initial workspace creation * (which is asynchronous), make repeated `GET` requests with the workspace ID and check its - * status. The workspace becomes available when the status changes to `RUNNING`. - * - *

For information about how to create a new workspace with this API **including error - * handling**, see [Create a new workspace using the Account API]. - * - *

This operation is available only if your account is on the E2 version of the platform or on - * a select custom plan that allows multiple workspaces per account. + * status. The workspace becomes available when the status changes to `RUNNING`. For information + * about how to create a new workspace with this API **including error handling**, see [Create a + * new workspace using the Account API]. * *

[Create a new workspace using the Account API]: * http://docs.databricks.com/administration-guide/account-api/new-workspace.html */ Workspace get(GetWorkspaceRequest getWorkspaceRequest); - /** - * Gets a list of all workspaces associated with an account, specified by ID. - * - *

This operation is available only if your account is on the E2 version of the platform or on - * a select custom plan that allows multiple workspaces per account. - */ + /** Lists Databricks workspaces for an account. */ Collection list(); - /** - * Updates a workspace configuration for either a running workspace or a failed workspace. The - * elements that can be updated varies between these two use cases. - * - *

### Update a failed workspace You can update a Databricks workspace configuration for failed - * workspace deployment for some fields, but not all fields. For a failed workspace, this request - * supports updates to the following fields only: - Credential configuration ID - Storage - * configuration ID - Network configuration ID. Used only to add or change a network configuration - * for a customer-managed VPC. For a failed workspace only, you can convert a workspace with - * Databricks-managed VPC to use a customer-managed VPC by adding this ID. You cannot downgrade a - * workspace with a customer-managed VPC to be a Databricks-managed VPC. You can update the - * network configuration for a failed or running workspace to add PrivateLink support, though you - * must also add a private access settings object. - Key configuration ID for managed services - * (control plane storage, such as notebook source and Databricks SQL queries). Used only if you - * use customer-managed keys for managed services. - Key configuration ID for workspace storage - * (root S3 bucket and, optionally, EBS volumes). Used only if you use customer-managed keys for - * workspace storage. **Important**: If the workspace was ever in the running state, even if - * briefly before becoming a failed workspace, you cannot add a new key configuration ID for - * workspace storage. - Private access settings ID to add PrivateLink support. You can add or - * update the private access settings ID to upgrade a workspace to add support for front-end, - * back-end, or both types of connectivity. You cannot remove (downgrade) any existing front-end - * or back-end PrivateLink support on a workspace. - Custom tags. Given you provide an empty - * custom tags, the update would not be applied. - Network connectivity configuration ID to add - * serverless stable IP support. You can add or update the network connectivity configuration ID - * to ensure the workspace uses the same set of stable IP CIDR blocks to access your resources. - * You cannot remove a network connectivity configuration from the workspace once attached, you - * can only switch to another one. - * - *

After calling the `PATCH` operation to update the workspace configuration, make repeated - * `GET` requests with the workspace ID and check the workspace status. The workspace is - * successful if the status changes to `RUNNING`. - * - *

For information about how to create a new workspace with this API **including error - * handling**, see [Create a new workspace using the Account API]. - * - *

### Update a running workspace You can update a Databricks workspace configuration for - * running workspaces for some fields, but not all fields. For a running workspace, this request - * supports updating the following fields only: - Credential configuration ID - Network - * configuration ID. Used only if you already use a customer-managed VPC. You cannot convert a - * running workspace from a Databricks-managed VPC to a customer-managed VPC. You can use a - * network configuration update in this API for a failed or running workspace to add support for - * PrivateLink, although you also need to add a private access settings object. - Key - * configuration ID for managed services (control plane storage, such as notebook source and - * Databricks SQL queries). Databricks does not directly encrypt the data with the - * customer-managed key (CMK). Databricks uses both the CMK and the Databricks managed key (DMK) - * that is unique to your workspace to encrypt the Data Encryption Key (DEK). Databricks uses the - * DEK to encrypt your workspace's managed services persisted data. If the workspace does not - * already have a CMK for managed services, adding this ID enables managed services encryption for - * new or updated data. Existing managed services data that existed before adding the key remains - * not encrypted with the DEK until it is modified. If the workspace already has customer-managed - * keys for managed services, this request rotates (changes) the CMK keys and the DEK is - * re-encrypted with the DMK and the new CMK. - Key configuration ID for workspace storage (root - * S3 bucket and, optionally, EBS volumes). You can set this only if the workspace does not - * already have a customer-managed key configuration for workspace storage. - Private access - * settings ID to add PrivateLink support. You can add or update the private access settings ID to - * upgrade a workspace to add support for front-end, back-end, or both types of connectivity. You - * cannot remove (downgrade) any existing front-end or back-end PrivateLink support on a - * workspace. - Custom tags. Given you provide an empty custom tags, the update would not be - * applied. - Network connectivity configuration ID to add serverless stable IP support. You can - * add or update the network connectivity configuration ID to ensure the workspace uses the same - * set of stable IP CIDR blocks to access your resources. You cannot remove a network connectivity - * configuration from the workspace once attached, you can only switch to another one. - * - *

**Important**: To update a running workspace, your workspace must have no running compute - * resources that run in your workspace's VPC in the Classic data plane. For example, stop all - * all-purpose clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If - * you do not terminate all cluster instances in the workspace before calling this API, the - * request will fail. - * - *

### Wait until changes take effect. After calling the `PATCH` operation to update the - * workspace configuration, make repeated `GET` requests with the workspace ID and check the - * workspace status and the status of the fields. * For workspaces with a Databricks-managed VPC, - * the workspace status becomes `PROVISIONING` temporarily (typically under 20 minutes). If the - * workspace update is successful, the workspace status changes to `RUNNING`. Note that you can - * also check the workspace status in the [Account Console]. However, you cannot use or create - * clusters for another 20 minutes after that status change. This results in a total of up to 40 - * minutes in which you cannot create clusters. If you create or use clusters before this time - * interval elapses, clusters do not launch successfully, fail, or could cause other unexpected - * behavior. * For workspaces with a customer-managed VPC, the workspace status stays at status - * `RUNNING` and the VPC change happens immediately. A change to the storage customer-managed key - * configuration ID might take a few minutes to update, so continue to check the workspace until - * you observe that it has been updated. If the update fails, the workspace might revert silently - * to its original configuration. After the workspace has been updated, you cannot use or create - * clusters for another 20 minutes. If you create or use clusters before this time interval - * elapses, clusters do not launch successfully, fail, or could cause other unexpected behavior. - * - *

If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the - * changes to fully take effect. During the 20 minute wait, it is important that you stop all REST - * API calls to the DBFS API. If you are modifying _only the managed services key configuration_, - * you can omit the 20 minute wait. - * - *

**Important**: Customer-managed keys and customer-managed VPCs are supported by only some - * deployment types and subscription types. If you have questions about availability, contact your - * Databricks representative. - * - *

This operation is available only if your account is on the E2 version of the platform or on - * a select custom plan that allows multiple workspaces per account. - * - *

[Account Console]: - * https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html - * [Create a new workspace using the Account API]: - * http://docs.databricks.com/administration-guide/account-api/new-workspace.html - */ - void update(UpdateWorkspaceRequest updateWorkspaceRequest); + /** Updates a workspace. */ + Workspace update(UpdateWorkspaceRequest updateWorkspaceRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java index 5b14da636..88cd606f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java @@ -9,6 +9,10 @@ @Generated public class AnomalyDetectionConfig { + /** The type of the last run of the workflow. */ + @JsonProperty("job_type") + private AnomalyDetectionJobType jobType; + /** Run id of the last run of the workflow */ @JsonProperty("last_run_id") private String lastRunId; @@ -17,6 +21,15 @@ public class AnomalyDetectionConfig { @JsonProperty("latest_run_status") private AnomalyDetectionRunStatus latestRunStatus; + public AnomalyDetectionConfig setJobType(AnomalyDetectionJobType jobType) { + this.jobType = jobType; + return this; + } + + public AnomalyDetectionJobType getJobType() { + return jobType; + } + public AnomalyDetectionConfig setLastRunId(String lastRunId) { this.lastRunId = lastRunId; return this; @@ -40,18 +53,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AnomalyDetectionConfig that = (AnomalyDetectionConfig) o; - return Objects.equals(lastRunId, that.lastRunId) + return Objects.equals(jobType, that.jobType) + && Objects.equals(lastRunId, that.lastRunId) && Objects.equals(latestRunStatus, that.latestRunStatus); } @Override public int hashCode() { - return Objects.hash(lastRunId, latestRunStatus); + return Objects.hash(jobType, lastRunId, latestRunStatus); } @Override public String toString() { return new ToStringer(AnomalyDetectionConfig.class) + .add("jobType", jobType) .add("lastRunId", lastRunId) .add("latestRunStatus", latestRunStatus) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionJobType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionJobType.java new file mode 100755 index 000000000..f54a53676 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionJobType.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AnomalyDetectionJobType { + ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN, + ANOMALY_DETECTION_JOB_TYPE_NORMAL, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java index ce08dd45a..509c6bbf1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java @@ -252,6 +252,12 @@ public Wait updateConfig( response); } + /** Updates the email and webhook notification settings for an endpoint. */ + public UpdateInferenceEndpointNotificationsResponse updateNotifications( + UpdateInferenceEndpointNotifications request) { + return impl.updateNotifications(request); + } + /** * Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from * their root object. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java index 65de06770..db7231266 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java @@ -270,6 +270,21 @@ public ServingEndpointDetailed updateConfig(EndpointCoreConfigInput request) { } } + @Override + public UpdateInferenceEndpointNotificationsResponse updateNotifications( + UpdateInferenceEndpointNotifications request) { + String path = String.format("/api/2.0/serving-endpoints/%s/notifications", request.getName()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UpdateInferenceEndpointNotificationsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public ServingEndpointPermissions updatePermissions(ServingEndpointPermissionsRequest request) { String path = diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java index b3d0a60cc..b6c99591d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java @@ -99,6 +99,10 @@ ServingEndpointPermissions setPermissions( */ ServingEndpointDetailed updateConfig(EndpointCoreConfigInput endpointCoreConfigInput); + /** Updates the email and webhook notification settings for an endpoint. */ + UpdateInferenceEndpointNotificationsResponse updateNotifications( + UpdateInferenceEndpointNotifications updateInferenceEndpointNotifications); + /** * Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from * their root object. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateInferenceEndpointNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateInferenceEndpointNotifications.java new file mode 100755 index 000000000..181da16f8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateInferenceEndpointNotifications.java @@ -0,0 +1,65 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateInferenceEndpointNotifications { + /** + * The email notification settings to update. Specify email addresses to notify when endpoint + * state changes occur. + */ + @JsonProperty("email_notifications") + private EmailNotifications emailNotifications; + + /** + * The name of the serving endpoint whose notifications are being updated. This field is required. + */ + @JsonIgnore private String name; + + public UpdateInferenceEndpointNotifications setEmailNotifications( + EmailNotifications emailNotifications) { + this.emailNotifications = emailNotifications; + return this; + } + + public EmailNotifications getEmailNotifications() { + return emailNotifications; + } + + public UpdateInferenceEndpointNotifications setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateInferenceEndpointNotifications that = (UpdateInferenceEndpointNotifications) o; + return Objects.equals(emailNotifications, that.emailNotifications) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(emailNotifications, name); + } + + @Override + public String toString() { + return new ToStringer(UpdateInferenceEndpointNotifications.class) + .add("emailNotifications", emailNotifications) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateInferenceEndpointNotificationsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateInferenceEndpointNotificationsResponse.java new file mode 100755 index 000000000..1ba0b7e66 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateInferenceEndpointNotificationsResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateInferenceEndpointNotificationsResponse { + /** */ + @JsonProperty("email_notifications") + private EmailNotifications emailNotifications; + + /** */ + @JsonProperty("name") + private String name; + + public UpdateInferenceEndpointNotificationsResponse setEmailNotifications( + EmailNotifications emailNotifications) { + this.emailNotifications = emailNotifications; + return this; + } + + public EmailNotifications getEmailNotifications() { + return emailNotifications; + } + + public UpdateInferenceEndpointNotificationsResponse setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateInferenceEndpointNotificationsResponse that = + (UpdateInferenceEndpointNotificationsResponse) o; + return Objects.equals(emailNotifications, that.emailNotifications) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(emailNotifications, name); + } + + @Override + public String toString() { + return new ToStringer(UpdateInferenceEndpointNotificationsResponse.class) + .add("emailNotifications", emailNotifications) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceStandard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceStandard.java index 133a6b3ae..d9a3105be 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceStandard.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceStandard.java @@ -13,6 +13,7 @@ public enum ComplianceStandard { FEDRAMP_IL5, FEDRAMP_MODERATE, GERMANY_C5, + GERMANY_TISAX, HIPAA, HITRUST, IRAP_PROTECTED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java index 51630a687..063926344 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java @@ -30,6 +30,10 @@ public class CreatePrivateEndpointRule { @JsonProperty("endpoint_service") private String endpointService; + /** */ + @JsonProperty("gcp_endpoint_spec") + private GcpEndpointSpec gcpEndpointSpec; + /** * Not used by customer-managed private endpoint services. * @@ -72,6 +76,15 @@ public String getEndpointService() { return endpointService; } + public CreatePrivateEndpointRule setGcpEndpointSpec(GcpEndpointSpec gcpEndpointSpec) { + this.gcpEndpointSpec = gcpEndpointSpec; + return this; + } + + public GcpEndpointSpec getGcpEndpointSpec() { + return gcpEndpointSpec; + } + public CreatePrivateEndpointRule setGroupId(String groupId) { this.groupId = groupId; return this; @@ -106,6 +119,7 @@ public boolean equals(Object o) { CreatePrivateEndpointRule that = (CreatePrivateEndpointRule) o; return Objects.equals(domainNames, that.domainNames) && Objects.equals(endpointService, that.endpointService) + && Objects.equals(gcpEndpointSpec, that.gcpEndpointSpec) && Objects.equals(groupId, that.groupId) && Objects.equals(resourceId, that.resourceId) && Objects.equals(resourceNames, that.resourceNames); @@ -113,7 +127,8 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(domainNames, endpointService, groupId, resourceId, resourceNames); + return Objects.hash( + domainNames, endpointService, gcpEndpointSpec, groupId, resourceId, resourceNames); } @Override @@ -121,6 +136,7 @@ public String toString() { return new ToStringer(CreatePrivateEndpointRule.class) .add("domainNames", domainNames) .add("endpointService", endpointService) + .add("gcpEndpointSpec", gcpEndpointSpec) .add("groupId", groupId) .add("resourceId", resourceId) .add("resourceNames", resourceNames) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GcpEndpointSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GcpEndpointSpec.java new file mode 100755 index 000000000..44c8379f5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GcpEndpointSpec.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GcpEndpointSpec { + /** Output only. The URI of the created PSC endpoint. */ + @JsonProperty("psc_endpoint_uri") + private String pscEndpointUri; + + /** + * The full url of the target service attachment. Example: + * projects/my-gcp-project/regions/us-east4/serviceAttachments/my-service-attachment + */ + @JsonProperty("service_attachment") + private String serviceAttachment; + + public GcpEndpointSpec setPscEndpointUri(String pscEndpointUri) { + this.pscEndpointUri = pscEndpointUri; + return this; + } + + public String getPscEndpointUri() { + return pscEndpointUri; + } + + public GcpEndpointSpec setServiceAttachment(String serviceAttachment) { + this.serviceAttachment = serviceAttachment; + return this; + } + + public String getServiceAttachment() { + return serviceAttachment; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GcpEndpointSpec that = (GcpEndpointSpec) o; + return Objects.equals(pscEndpointUri, that.pscEndpointUri) + && Objects.equals(serviceAttachment, that.serviceAttachment); + } + + @Override + public int hashCode() { + return Objects.hash(pscEndpointUri, serviceAttachment); + } + + @Override + public String toString() { + return new ToStringer(GcpEndpointSpec.class) + .add("pscEndpointUri", pscEndpointUri) + .add("serviceAttachment", serviceAttachment) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java index d4f40b77f..1e9ae30e1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java @@ -18,6 +18,10 @@ public class NccEgressDefaultRules { @JsonProperty("azure_service_endpoint_rule") private NccAzureServiceEndpointRule azureServiceEndpointRule; + /** */ + @JsonProperty("gcp_project_id_rule") + private NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule gcpProjectIdRule; + public NccEgressDefaultRules setAwsStableIpRule(NccAwsStableIpRule awsStableIpRule) { this.awsStableIpRule = awsStableIpRule; return this; @@ -37,18 +41,29 @@ public NccAzureServiceEndpointRule getAzureServiceEndpointRule() { return azureServiceEndpointRule; } + public NccEgressDefaultRules setGcpProjectIdRule( + NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule gcpProjectIdRule) { + this.gcpProjectIdRule = gcpProjectIdRule; + return this; + } + + public NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule getGcpProjectIdRule() { + return gcpProjectIdRule; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; NccEgressDefaultRules that = (NccEgressDefaultRules) o; return Objects.equals(awsStableIpRule, that.awsStableIpRule) - && Objects.equals(azureServiceEndpointRule, that.azureServiceEndpointRule); + && Objects.equals(azureServiceEndpointRule, that.azureServiceEndpointRule) + && Objects.equals(gcpProjectIdRule, that.gcpProjectIdRule); } @Override public int hashCode() { - return Objects.hash(awsStableIpRule, azureServiceEndpointRule); + return Objects.hash(awsStableIpRule, azureServiceEndpointRule, gcpProjectIdRule); } @Override @@ -56,6 +71,7 @@ public String toString() { return new ToStringer(NccEgressDefaultRules.class) .add("awsStableIpRule", awsStableIpRule) .add("azureServiceEndpointRule", azureServiceEndpointRule) + .add("gcpProjectIdRule", gcpProjectIdRule) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java index cf51cf09f..353ece87c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java @@ -72,6 +72,10 @@ public class NccPrivateEndpointRule { @JsonProperty("endpoint_service") private String endpointService; + /** */ + @JsonProperty("gcp_endpoint_spec") + private GcpEndpointSpec gcpEndpointSpec; + /** * Not used by customer-managed private endpoint services. * @@ -200,6 +204,15 @@ public String getEndpointService() { return endpointService; } + public NccPrivateEndpointRule setGcpEndpointSpec(GcpEndpointSpec gcpEndpointSpec) { + this.gcpEndpointSpec = gcpEndpointSpec; + return this; + } + + public GcpEndpointSpec getGcpEndpointSpec() { + return gcpEndpointSpec; + } + public NccPrivateEndpointRule setGroupId(String groupId) { this.groupId = groupId; return this; @@ -277,6 +290,7 @@ public boolean equals(Object o) { && Objects.equals(enabled, that.enabled) && Objects.equals(endpointName, that.endpointName) && Objects.equals(endpointService, that.endpointService) + && Objects.equals(gcpEndpointSpec, that.gcpEndpointSpec) && Objects.equals(groupId, that.groupId) && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) && Objects.equals(resourceId, that.resourceId) @@ -298,6 +312,7 @@ public int hashCode() { enabled, endpointName, endpointService, + gcpEndpointSpec, groupId, networkConnectivityConfigId, resourceId, @@ -319,6 +334,7 @@ public String toString() { .add("enabled", enabled) .add("endpointName", endpointName) .add("endpointService", endpointService) + .add("gcpEndpointSpec", gcpEndpointSpec) .add("groupId", groupId) .add("networkConnectivityConfigId", networkConnectivityConfigId) .add("resourceId", resourceId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule.java new file mode 100755 index 000000000..f5fd2ed39 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule { + /** + * A list of Databricks internal project IDs from where network access originates for serverless + * DBSQL, This list is stable and will not change once the NCC object is created. + */ + @JsonProperty("project_ids") + private Collection projectIds; + + public NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule setProjectIds( + Collection projectIds) { + this.projectIds = projectIds; + return this; + } + + public Collection getProjectIds() { + return projectIds; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule that = + (NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule) o; + return Objects.equals(projectIds, that.projectIds); + } + + @Override + public int hashCode() { + return Objects.hash(projectIds); + } + + @Override + public String toString() { + return new ToStringer(NetworkConnectivityConfigEgressConfigDefaultRuleGcpProjectIdRule.class) + .add("projectIds", projectIds) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java index 94975cd2e..eb94c2b55 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java @@ -32,6 +32,10 @@ public class UpdatePrivateEndpointRule { @JsonProperty("enabled") private Boolean enabled; + /** */ + @JsonProperty("gcp_endpoint_spec") + private GcpEndpointSpec gcpEndpointSpec; + /** * Only used by private endpoints towards AWS S3 service. * @@ -61,6 +65,15 @@ public Boolean getEnabled() { return enabled; } + public UpdatePrivateEndpointRule setGcpEndpointSpec(GcpEndpointSpec gcpEndpointSpec) { + this.gcpEndpointSpec = gcpEndpointSpec; + return this; + } + + public GcpEndpointSpec getGcpEndpointSpec() { + return gcpEndpointSpec; + } + public UpdatePrivateEndpointRule setResourceNames(Collection resourceNames) { this.resourceNames = resourceNames; return this; @@ -77,12 +90,13 @@ public boolean equals(Object o) { UpdatePrivateEndpointRule that = (UpdatePrivateEndpointRule) o; return Objects.equals(domainNames, that.domainNames) && Objects.equals(enabled, that.enabled) + && Objects.equals(gcpEndpointSpec, that.gcpEndpointSpec) && Objects.equals(resourceNames, that.resourceNames); } @Override public int hashCode() { - return Objects.hash(domainNames, enabled, resourceNames); + return Objects.hash(domainNames, enabled, gcpEndpointSpec, resourceNames); } @Override @@ -90,6 +104,7 @@ public String toString() { return new ToStringer(UpdatePrivateEndpointRule.class) .add("domainNames", domainNames) .add("enabled", enabled) + .add("gcpEndpointSpec", gcpEndpointSpec) .add("resourceNames", resourceNames) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare.java index 012bc8f99..5f53dde84 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare.java @@ -17,6 +17,13 @@ public class CreateShare { @JsonProperty("name") private String name; + /** + * Serverless budget policy id (can only be created/updated when calling data-sharing service) + * [Create,Update:IGN] + */ + @JsonProperty("serverless_budget_policy_id") + private String serverlessBudgetPolicyId; + /** Storage root URL for the share. */ @JsonProperty("storage_root") private String storageRoot; @@ -39,6 +46,15 @@ public String getName() { return name; } + public CreateShare setServerlessBudgetPolicyId(String serverlessBudgetPolicyId) { + this.serverlessBudgetPolicyId = serverlessBudgetPolicyId; + return this; + } + + public String getServerlessBudgetPolicyId() { + return serverlessBudgetPolicyId; + } + public CreateShare setStorageRoot(String storageRoot) { this.storageRoot = storageRoot; return this; @@ -55,12 +71,13 @@ public boolean equals(Object o) { CreateShare that = (CreateShare) o; return Objects.equals(comment, that.comment) && Objects.equals(name, that.name) + && Objects.equals(serverlessBudgetPolicyId, that.serverlessBudgetPolicyId) && Objects.equals(storageRoot, that.storageRoot); } @Override public int hashCode() { - return Objects.hash(comment, name, storageRoot); + return Objects.hash(comment, name, serverlessBudgetPolicyId, storageRoot); } @Override @@ -68,6 +85,7 @@ public String toString() { return new ToStringer(CreateShare.class) .add("comment", comment) .add("name", name) + .add("serverlessBudgetPolicyId", serverlessBudgetPolicyId) .add("storageRoot", storageRoot) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java index 4467ec7fd..89a7ccfdc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java @@ -21,6 +21,18 @@ public class PermissionsChange { @JsonProperty("principal") private String principal; + /** + * An opaque internal ID that identifies the principal whose privileges should be removed. + * + *

This field is intended for removing privileges associated with a deleted user. When set, + * only the entries specified in the remove field are processed; any entries in the add field will + * be rejected. + * + *

Only one of principal or principal_id should be specified, never both at the same time. + */ + @JsonProperty("principal_id") + private Long principalId; + /** The set of privileges to remove. */ @JsonProperty("remove") private Collection remove; @@ -43,6 +55,15 @@ public String getPrincipal() { return principal; } + public PermissionsChange setPrincipalId(Long principalId) { + this.principalId = principalId; + return this; + } + + public Long getPrincipalId() { + return principalId; + } + public PermissionsChange setRemove(Collection remove) { this.remove = remove; return this; @@ -59,12 +80,13 @@ public boolean equals(Object o) { PermissionsChange that = (PermissionsChange) o; return Objects.equals(add, that.add) && Objects.equals(principal, that.principal) + && Objects.equals(principalId, that.principalId) && Objects.equals(remove, that.remove); } @Override public int hashCode() { - return Objects.hash(add, principal, remove); + return Objects.hash(add, principal, principalId, remove); } @Override @@ -72,6 +94,7 @@ public String toString() { return new ToStringer(PermissionsChange.class) .add("add", add) .add("principal", principal) + .add("principalId", principalId) .add("remove", remove) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java index c3e2fcfae..365e263de 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java @@ -17,6 +17,13 @@ public class PrivilegeAssignment { @JsonProperty("principal") private String principal; + /** + * Unique identifier of the principal. For active principals, both `principal` and `principal_id` + * are present. + */ + @JsonProperty("principal_id") + private Long principalId; + /** The privileges assigned to the principal. */ @JsonProperty("privileges") private Collection privileges; @@ -30,6 +37,15 @@ public String getPrincipal() { return principal; } + public PrivilegeAssignment setPrincipalId(Long principalId) { + this.principalId = principalId; + return this; + } + + public Long getPrincipalId() { + return principalId; + } + public PrivilegeAssignment setPrivileges(Collection privileges) { this.privileges = privileges; return this; @@ -44,18 +60,21 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PrivilegeAssignment that = (PrivilegeAssignment) o; - return Objects.equals(principal, that.principal) && Objects.equals(privileges, that.privileges); + return Objects.equals(principal, that.principal) + && Objects.equals(principalId, that.principalId) + && Objects.equals(privileges, that.privileges); } @Override public int hashCode() { - return Objects.hash(principal, privileges); + return Objects.hash(principal, principalId, privileges); } @Override public String toString() { return new ToStringer(PrivilegeAssignment.class) .add("principal", principal) + .add("principalId", principalId) .add("privileges", privileges) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareInfo.java index 36deca691..f80fbc9ce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareInfo.java @@ -34,6 +34,13 @@ public class ShareInfo { @JsonProperty("owner") private String owner; + /** + * Serverless budget policy id (can only be created/updated when calling data-sharing service) + * [Create,Update:IGN] + */ + @JsonProperty("serverless_budget_policy_id") + private String serverlessBudgetPolicyId; + /** Storage Location URL (full path) for the share. */ @JsonProperty("storage_location") private String storageLocation; @@ -104,6 +111,15 @@ public String getOwner() { return owner; } + public ShareInfo setServerlessBudgetPolicyId(String serverlessBudgetPolicyId) { + this.serverlessBudgetPolicyId = serverlessBudgetPolicyId; + return this; + } + + public String getServerlessBudgetPolicyId() { + return serverlessBudgetPolicyId; + } + public ShareInfo setStorageLocation(String storageLocation) { this.storageLocation = storageLocation; return this; @@ -151,6 +167,7 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(objects, that.objects) && Objects.equals(owner, that.owner) + && Objects.equals(serverlessBudgetPolicyId, that.serverlessBudgetPolicyId) && Objects.equals(storageLocation, that.storageLocation) && Objects.equals(storageRoot, that.storageRoot) && Objects.equals(updatedAt, that.updatedAt) @@ -166,6 +183,7 @@ public int hashCode() { name, objects, owner, + serverlessBudgetPolicyId, storageLocation, storageRoot, updatedAt, @@ -181,6 +199,7 @@ public String toString() { .add("name", name) .add("objects", objects) .add("owner", owner) + .add("serverlessBudgetPolicyId", serverlessBudgetPolicyId) .add("storageLocation", storageLocation) .add("storageRoot", storageRoot) .add("updatedAt", updatedAt) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java index 3f225c8c7..1175d97f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java @@ -5,6 +5,7 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; /** Internal information for D2D sharing that should not be disclosed to external users. */ @@ -14,6 +15,13 @@ public class TableInternalAttributes { @JsonProperty("auxiliary_managed_location") private String auxiliaryManagedLocation; + /** + * Storage locations of all table dependencies for shared views. Used on the recipient side for + * SEG (Secure Egress Gateway) whitelisting. + */ + @JsonProperty("dependency_storage_locations") + private Collection dependencyStorageLocations; + /** * Will be populated in the reconciliation response for VIEW and FOREIGN_TABLE, with the value of * the parent UC entity's storage_location, following the same logic as getManagedEntityPath in @@ -46,6 +54,16 @@ public String getAuxiliaryManagedLocation() { return auxiliaryManagedLocation; } + public TableInternalAttributes setDependencyStorageLocations( + Collection dependencyStorageLocations) { + this.dependencyStorageLocations = dependencyStorageLocations; + return this; + } + + public Collection getDependencyStorageLocations() { + return dependencyStorageLocations; + } + public TableInternalAttributes setParentStorageLocation(String parentStorageLocation) { this.parentStorageLocation = parentStorageLocation; return this; @@ -88,6 +106,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; TableInternalAttributes that = (TableInternalAttributes) o; return Objects.equals(auxiliaryManagedLocation, that.auxiliaryManagedLocation) + && Objects.equals(dependencyStorageLocations, that.dependencyStorageLocations) && Objects.equals(parentStorageLocation, that.parentStorageLocation) && Objects.equals(storageLocation, that.storageLocation) && Objects.equals(typeValue, that.typeValue) @@ -98,6 +117,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( auxiliaryManagedLocation, + dependencyStorageLocations, parentStorageLocation, storageLocation, typeValue, @@ -108,6 +128,7 @@ public int hashCode() { public String toString() { return new ToStringer(TableInternalAttributes.class) .add("auxiliaryManagedLocation", auxiliaryManagedLocation) + .add("dependencyStorageLocations", dependencyStorageLocations) .add("parentStorageLocation", parentStorageLocation) .add("storageLocation", storageLocation) .add("typeValue", typeValue) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateShare.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateShare.java index 088633347..3add066b0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateShare.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateShare.java @@ -26,6 +26,13 @@ public class UpdateShare { @JsonProperty("owner") private String owner; + /** + * Serverless budget policy id (can only be created/updated when calling data-sharing service) + * [Create,Update:IGN] + */ + @JsonProperty("serverless_budget_policy_id") + private String serverlessBudgetPolicyId; + /** Storage root URL for the share. */ @JsonProperty("storage_root") private String storageRoot; @@ -70,6 +77,15 @@ public String getOwner() { return owner; } + public UpdateShare setServerlessBudgetPolicyId(String serverlessBudgetPolicyId) { + this.serverlessBudgetPolicyId = serverlessBudgetPolicyId; + return this; + } + + public String getServerlessBudgetPolicyId() { + return serverlessBudgetPolicyId; + } + public UpdateShare setStorageRoot(String storageRoot) { this.storageRoot = storageRoot; return this; @@ -97,13 +113,15 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(newName, that.newName) && Objects.equals(owner, that.owner) + && Objects.equals(serverlessBudgetPolicyId, that.serverlessBudgetPolicyId) && Objects.equals(storageRoot, that.storageRoot) && Objects.equals(updates, that.updates); } @Override public int hashCode() { - return Objects.hash(comment, name, newName, owner, storageRoot, updates); + return Objects.hash( + comment, name, newName, owner, serverlessBudgetPolicyId, storageRoot, updates); } @Override @@ -113,6 +131,7 @@ public String toString() { .add("name", name) .add("newName", newName) .add("owner", owner) + .add("serverlessBudgetPolicyId", serverlessBudgetPolicyId) .add("storageRoot", storageRoot) .add("updates", updates) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java index 38cf96892..06d18cb7e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java @@ -25,6 +25,10 @@ public class AlertV2 { @JsonProperty("display_name") private String displayName; + /** The actual workspace path of the folder containing the alert. This is an output-only field. */ + @JsonProperty("effective_parent_path") + private String effectiveParentPath; + /** * The actual identity that will be used to execute the alert. This is an output-only field that * shows the resolved run-as identity after applying permissions and defaults. @@ -126,6 +130,15 @@ public String getDisplayName() { return displayName; } + public AlertV2 setEffectiveParentPath(String effectiveParentPath) { + this.effectiveParentPath = effectiveParentPath; + return this; + } + + public String getEffectiveParentPath() { + return effectiveParentPath; + } + public AlertV2 setEffectiveRunAs(AlertV2RunAs effectiveRunAs) { this.effectiveRunAs = effectiveRunAs; return this; @@ -243,6 +256,7 @@ public boolean equals(Object o) { && Objects.equals(customDescription, that.customDescription) && Objects.equals(customSummary, that.customSummary) && Objects.equals(displayName, that.displayName) + && Objects.equals(effectiveParentPath, that.effectiveParentPath) && Objects.equals(effectiveRunAs, that.effectiveRunAs) && Objects.equals(evaluation, that.evaluation) && Objects.equals(id, that.id) @@ -264,6 +278,7 @@ public int hashCode() { customDescription, customSummary, displayName, + effectiveParentPath, effectiveRunAs, evaluation, id, @@ -285,6 +300,7 @@ public String toString() { .add("customDescription", customDescription) .add("customSummary", customSummary) .add("displayName", displayName) + .add("effectiveParentPath", effectiveParentPath) .add("effectiveRunAs", effectiveRunAs) .add("evaluation", evaluation) .add("id", id) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Notification.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Notification.java index 6fa20c245..6583a95dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Notification.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Notification.java @@ -10,6 +10,20 @@ @Generated public class AlertV2Notification { + /** + * The actual behavior of whether to notify alert subscribers when alert returns back to normal. + * This is an output-only field. + */ + @JsonProperty("effective_notify_on_ok") + private Boolean effectiveNotifyOnOk; + + /** + * The actual number of seconds an alert must wait after being triggered to rearm itself. This is + * an output-only field. + */ + @JsonProperty("effective_retrigger_seconds") + private Long effectiveRetriggerSeconds; + /** Whether to notify alert subscribers when alert returns back to normal. */ @JsonProperty("notify_on_ok") private Boolean notifyOnOk; @@ -25,6 +39,24 @@ public class AlertV2Notification { @JsonProperty("subscriptions") private Collection subscriptions; + public AlertV2Notification setEffectiveNotifyOnOk(Boolean effectiveNotifyOnOk) { + this.effectiveNotifyOnOk = effectiveNotifyOnOk; + return this; + } + + public Boolean getEffectiveNotifyOnOk() { + return effectiveNotifyOnOk; + } + + public AlertV2Notification setEffectiveRetriggerSeconds(Long effectiveRetriggerSeconds) { + this.effectiveRetriggerSeconds = effectiveRetriggerSeconds; + return this; + } + + public Long getEffectiveRetriggerSeconds() { + return effectiveRetriggerSeconds; + } + public AlertV2Notification setNotifyOnOk(Boolean notifyOnOk) { this.notifyOnOk = notifyOnOk; return this; @@ -57,19 +89,28 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AlertV2Notification that = (AlertV2Notification) o; - return Objects.equals(notifyOnOk, that.notifyOnOk) + return Objects.equals(effectiveNotifyOnOk, that.effectiveNotifyOnOk) + && Objects.equals(effectiveRetriggerSeconds, that.effectiveRetriggerSeconds) + && Objects.equals(notifyOnOk, that.notifyOnOk) && Objects.equals(retriggerSeconds, that.retriggerSeconds) && Objects.equals(subscriptions, that.subscriptions); } @Override public int hashCode() { - return Objects.hash(notifyOnOk, retriggerSeconds, subscriptions); + return Objects.hash( + effectiveNotifyOnOk, + effectiveRetriggerSeconds, + notifyOnOk, + retriggerSeconds, + subscriptions); } @Override public String toString() { return new ToStringer(AlertV2Notification.class) + .add("effectiveNotifyOnOk", effectiveNotifyOnOk) + .add("effectiveRetriggerSeconds", effectiveRetriggerSeconds) .add("notifyOnOk", notifyOnOk) .add("retriggerSeconds", retriggerSeconds) .add("subscriptions", subscriptions) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfo.java index 1a58b0485..2595fc462 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfo.java @@ -7,10 +7,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** - * Describes metadata for a particular chunk, within a result set; this structure is used both - * within a manifest, and when fetching individual chunk data or links. - */ @Generated public class BaseChunkInfo { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java index 71850c011..2c29ba7cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java @@ -7,14 +7,14 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Creates a new SQL warehouse. */ @Generated public class CreateWarehouseRequest { /** * The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) * before it is automatically stopped. * - *

Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins - * for non-serverless warehouses - 0 indicates no autostop. + *

Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. * *

Defaults to 120 mins */ @@ -59,7 +59,7 @@ public class CreateWarehouseRequest { /** * Maximum number of clusters that the autoscaler will create to handle concurrent queries. * - *

Supported values: - Must be >= min_num_clusters - Must be <= 30. + *

Supported values: - Must be >= min_num_clusters - Must be <= 40. * *

Defaults to min_clusters if unset. */ @@ -87,7 +87,7 @@ public class CreateWarehouseRequest { @JsonProperty("name") private String name; - /** */ + /** Configurations whether the endpoint should use spot instances. */ @JsonProperty("spot_instance_policy") private SpotInstancePolicy spotInstancePolicy; @@ -100,7 +100,10 @@ public class CreateWarehouseRequest { @JsonProperty("tags") private EndpointTags tags; - /** */ + /** + * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to + * `PRO` and also set the field `enable_serverless_compute` to `true`. + */ @JsonProperty("warehouse_type") private CreateWarehouseRequestWarehouseType warehouseType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequestWarehouseType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequestWarehouseType.java index f0b104ca6..03a31eee6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequestWarehouseType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequestWarehouseType.java @@ -4,10 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - * and also set the field `enable_serverless_compute` to `true`. - */ @Generated public enum CreateWarehouseRequestWarehouseType { CLASSIC, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedule.java index d2245c416..69d9f448f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedule.java @@ -9,6 +9,10 @@ @Generated public class CronSchedule { + /** The actual pause status of the schedule. This is an output-only field. */ + @JsonProperty("effective_pause_status") + private SchedulePauseStatus effectivePauseStatus; + /** Indicate whether this schedule is paused or not. */ @JsonProperty("pause_status") private SchedulePauseStatus pauseStatus; @@ -30,6 +34,15 @@ public class CronSchedule { @JsonProperty("timezone_id") private String timezoneId; + public CronSchedule setEffectivePauseStatus(SchedulePauseStatus effectivePauseStatus) { + this.effectivePauseStatus = effectivePauseStatus; + return this; + } + + public SchedulePauseStatus getEffectivePauseStatus() { + return effectivePauseStatus; + } + public CronSchedule setPauseStatus(SchedulePauseStatus pauseStatus) { this.pauseStatus = pauseStatus; return this; @@ -62,19 +75,21 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CronSchedule that = (CronSchedule) o; - return Objects.equals(pauseStatus, that.pauseStatus) + return Objects.equals(effectivePauseStatus, that.effectivePauseStatus) + && Objects.equals(pauseStatus, that.pauseStatus) && Objects.equals(quartzCronSchedule, that.quartzCronSchedule) && Objects.equals(timezoneId, that.timezoneId); } @Override public int hashCode() { - return Objects.hash(pauseStatus, quartzCronSchedule, timezoneId); + return Objects.hash(effectivePauseStatus, pauseStatus, quartzCronSchedule, timezoneId); } @Override public String toString() { return new ToStringer(CronSchedule.class) + .add("effectivePauseStatus", effectivePauseStatus) .add("pauseStatus", pauseStatus) .add("quartzCronSchedule", quartzCronSchedule) .add("timezoneId", timezoneId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java index d07e8e25b..251b1555b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java @@ -8,6 +8,11 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** + * This is an incremental edit functionality, so all fields except id are optional. If a field is + * set, the corresponding configuration in the SQL warehouse is modified. If a field is unset, the + * existing configuration value in the SQL warehouse is retained. Thus, this API is not idempotent. + */ @Generated public class EditWarehouseRequest { /** @@ -48,7 +53,7 @@ public class EditWarehouseRequest { @JsonProperty("enable_photon") private Boolean enablePhoton; - /** Configures whether the warehouse should use serverless compute. */ + /** Configures whether the warehouse should use serverless compute */ @JsonProperty("enable_serverless_compute") private Boolean enableServerlessCompute; @@ -62,7 +67,7 @@ public class EditWarehouseRequest { /** * Maximum number of clusters that the autoscaler will create to handle concurrent queries. * - *

Supported values: - Must be >= min_num_clusters - Must be <= 30. + *

Supported values: - Must be >= min_num_clusters - Must be <= 40. * *

Defaults to min_clusters if unset. */ @@ -90,7 +95,7 @@ public class EditWarehouseRequest { @JsonProperty("name") private String name; - /** */ + /** Configurations whether the endpoint should use spot instances. */ @JsonProperty("spot_instance_policy") private SpotInstancePolicy spotInstancePolicy; @@ -103,7 +108,10 @@ public class EditWarehouseRequest { @JsonProperty("tags") private EndpointTags tags; - /** */ + /** + * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to + * `PRO` and also set the field `enable_serverless_compute` to `true`. + */ @JsonProperty("warehouse_type") private EditWarehouseRequestWarehouseType warehouseType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequestWarehouseType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequestWarehouseType.java index 5c4337caa..b180bfd82 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequestWarehouseType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequestWarehouseType.java @@ -4,10 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - * and also set the field `enable_serverless_compute` to `true`. - */ @Generated public enum EditWarehouseRequestWarehouseType { CLASSIC, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java index 243675c94..5caff457b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java @@ -24,7 +24,7 @@ public class EndpointHealth { @JsonProperty("message") private String message; - /** */ + /** Health status of the endpoint. */ @JsonProperty("status") private Status status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java index 92868d39b..03ae34840 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java @@ -70,7 +70,7 @@ public class EndpointInfo { /** * Maximum number of clusters that the autoscaler will create to handle concurrent queries. * - *

Supported values: - Must be >= min_num_clusters - Must be <= 30. + *

Supported values: - Must be >= min_num_clusters - Must be <= 40. * *

Defaults to min_clusters if unset. */ @@ -110,11 +110,11 @@ public class EndpointInfo { @JsonProperty("odbc_params") private OdbcParams odbcParams; - /** */ + /** Configurations whether the endpoint should use spot instances. */ @JsonProperty("spot_instance_policy") private SpotInstancePolicy spotInstancePolicy; - /** */ + /** state of the endpoint */ @JsonProperty("state") private State state; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfoWarehouseType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfoWarehouseType.java index 320369adf..498289a1d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfoWarehouseType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfoWarehouseType.java @@ -4,10 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - * and also set the field `enable_serverless_compute` to `true`. - */ @Generated public enum EndpointInfoWarehouseType { CLASSIC, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequest.java index cc6231890..5aa72af6c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequest.java @@ -15,7 +15,7 @@ public class ExecuteStatementRequest { * data representations and might not match the final size in the requested `format`. If the * result was truncated due to the byte limit, then `truncated` in the response is set to `true`. * When using `EXTERNAL_LINKS` disposition, a default `byte_limit` of 100 GiB is applied if - * `byte_limit` is not explcitly set. + * `byte_limit` is not explicitly set. */ @JsonProperty("byte_limit") private Long byteLimit; @@ -29,7 +29,29 @@ public class ExecuteStatementRequest { @JsonProperty("catalog") private String catalog; - /** */ + /** + * The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. + * + *

Statements executed with `INLINE` disposition will return result data inline, in + * `JSON_ARRAY` format, in a series of chunks. If a given statement produces a result set with a + * size larger than 25 MiB, that statement execution is aborted, and no result set will be + * available. + * + *

**NOTE** Byte limits are computed based upon internal representations of the result set + * data, and might not match the sizes visible in JSON responses. + * + *

Statements executed with `EXTERNAL_LINKS` disposition will return result data as external + * links: URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` + * disposition allows statements to generate arbitrarily sized result sets for fetching up to 100 + * GiB. The resulting links have two important properties: + * + *

1. They point to resources _external_ to the Databricks compute; therefore any associated + * authentication information (typically a personal access token, OAuth token, or similar) _must + * be removed_ when fetching from these links. + * + *

2. These are URLs with a specific expiration, indicated in the response. The behavior when + * attempting to use an expired link is cloud specific. + */ @JsonProperty("disposition") private Disposition disposition; @@ -93,13 +115,13 @@ public class ExecuteStatementRequest { * *

For example, the following statement contains two parameters, `my_name` and `my_date`: * - *

SELECT * FROM my_table WHERE name = :my_name AND date = :my_date + *

``` SELECT * FROM my_table WHERE name = :my_name AND date = :my_date ``` * *

The parameters can be passed in the request body as follows: * - *

{ ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date", + *

` { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date", * "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value": - * "2020-01-01", "type": "DATE" } ] } + * "2020-01-01", "type": "DATE" } ] } ` * *

Currently, positional parameters denoted by a `?` marker are not supported by the Databricks * SQL Statement Execution API. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalLink.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalLink.java index 1b88216f2..569281981 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalLink.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalLink.java @@ -28,7 +28,11 @@ public class ExternalLink { @JsonProperty("expiration") private String expiration; - /** */ + /** + * A URL pointing to a chunk of result data, hosted by an external service, with a short + * expiration time (<= 15 minutes). As this URL contains a temporary credential, it should be + * considered sensitive and the client should not expose this URL in a log. + */ @JsonProperty("external_link") private String externalLink; @@ -44,7 +48,7 @@ public class ExternalLink { /** * When fetching, provides the `chunk_index` for the _next_ chunk. If absent, indicates there are * no more chunks. The next chunk can be fetched with a - * :method:statementexecution/getStatementResultChunkN request. + * :method:statementexecution/getstatementresultchunkn request. */ @JsonProperty("next_chunk_index") private Long nextChunkIndex; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java index e7cb13ba1..6a789a483 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java @@ -70,7 +70,7 @@ public class GetWarehouseResponse { /** * Maximum number of clusters that the autoscaler will create to handle concurrent queries. * - *

Supported values: - Must be >= min_num_clusters - Must be <= 30. + *

Supported values: - Must be >= min_num_clusters - Must be <= 40. * *

Defaults to min_clusters if unset. */ @@ -110,11 +110,11 @@ public class GetWarehouseResponse { @JsonProperty("odbc_params") private OdbcParams odbcParams; - /** */ + /** Configurations whether the endpoint should use spot instances. */ @JsonProperty("spot_instance_policy") private SpotInstancePolicy spotInstancePolicy; - /** */ + /** state of the endpoint */ @JsonProperty("state") private State state; @@ -127,7 +127,10 @@ public class GetWarehouseResponse { @JsonProperty("tags") private EndpointTags tags; - /** */ + /** + * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to + * `PRO` and also set the field `enable_serverless_compute` to `true`. + */ @JsonProperty("warehouse_type") private GetWarehouseResponseWarehouseType warehouseType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponseWarehouseType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponseWarehouseType.java index 7e1ada451..ea11574db 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponseWarehouseType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponseWarehouseType.java @@ -4,10 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - * and also set the field `enable_serverless_compute` to `true`. - */ @Generated public enum GetWarehouseResponseWarehouseType { CLASSIC, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponse.java index 17524f5d2..50913917d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponse.java @@ -25,6 +25,10 @@ public class GetWorkspaceWarehouseConfigResponse { @JsonProperty("data_access_config") private Collection dataAccessConfig; + /** Enable Serverless compute for SQL warehouses */ + @JsonProperty("enable_serverless_compute") + private Boolean enableServerlessCompute; + /** * List of Warehouse Types allowed in this workspace (limits allowed value of the type field in * CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be @@ -43,7 +47,10 @@ public class GetWorkspaceWarehouseConfigResponse { @JsonProperty("google_service_account") private String googleServiceAccount; - /** AWS Only: Instance profile used to pass IAM role to the cluster */ + /** + * AWS Only: The instance profile used to pass an IAM role to the SQL warehouses. This + * configuration is also applied to the workspace's serverless compute for notebooks and jobs. + */ @JsonProperty("instance_profile_arn") private String instanceProfileArn; @@ -83,6 +90,16 @@ public Collection getDataAccessConfig() { return dataAccessConfig; } + public GetWorkspaceWarehouseConfigResponse setEnableServerlessCompute( + Boolean enableServerlessCompute) { + this.enableServerlessCompute = enableServerlessCompute; + return this; + } + + public Boolean getEnableServerlessCompute() { + return enableServerlessCompute; + } + public GetWorkspaceWarehouseConfigResponse setEnabledWarehouseTypes( Collection enabledWarehouseTypes) { this.enabledWarehouseTypes = enabledWarehouseTypes; @@ -148,6 +165,7 @@ public boolean equals(Object o) { return Objects.equals(channel, that.channel) && Objects.equals(configParam, that.configParam) && Objects.equals(dataAccessConfig, that.dataAccessConfig) + && Objects.equals(enableServerlessCompute, that.enableServerlessCompute) && Objects.equals(enabledWarehouseTypes, that.enabledWarehouseTypes) && Objects.equals(globalParam, that.globalParam) && Objects.equals(googleServiceAccount, that.googleServiceAccount) @@ -162,6 +180,7 @@ public int hashCode() { channel, configParam, dataAccessConfig, + enableServerlessCompute, enabledWarehouseTypes, globalParam, googleServiceAccount, @@ -176,6 +195,7 @@ public String toString() { .add("channel", channel) .add("configParam", configParam) .add("dataAccessConfig", dataAccessConfig) + .add("enableServerlessCompute", enableServerlessCompute) .add("enabledWarehouseTypes", enabledWarehouseTypes) .add("globalParam", globalParam) .add("googleServiceAccount", googleServiceAccount) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponseSecurityPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponseSecurityPolicy.java index f7066b756..2abbb5e26 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponseSecurityPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWorkspaceWarehouseConfigResponseSecurityPolicy.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Security policy for warehouses */ +/** Security policy to be used for warehouses */ @Generated public enum GetWorkspaceWarehouseConfigResponseSecurityPolicy { DATA_ACCESS_CONTROL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequest.java index 157451774..41bb6164a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequest.java @@ -10,14 +10,48 @@ @Generated public class ListWarehousesRequest { + /** The max number of warehouses to return. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * A page token, received from a previous `ListWarehouses` call. Provide this to retrieve the + * subsequent page; otherwise the first will be retrieved. + * + *

When paginating, all other parameters provided to `ListWarehouses` must match the call that + * provided the page token. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + /** - * Service Principal which will be used to fetch the list of warehouses. If not specified, the - * user from the session header is used. + * Service Principal which will be used to fetch the list of endpoints. If not specified, SQL + * Gateway will use the user from the session header. */ @JsonIgnore @QueryParam("run_as_user_id") private Long runAsUserId; + public ListWarehousesRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListWarehousesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + public ListWarehousesRequest setRunAsUserId(Long runAsUserId) { this.runAsUserId = runAsUserId; return this; @@ -32,16 +66,22 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListWarehousesRequest that = (ListWarehousesRequest) o; - return Objects.equals(runAsUserId, that.runAsUserId); + return Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(runAsUserId, that.runAsUserId); } @Override public int hashCode() { - return Objects.hash(runAsUserId); + return Objects.hash(pageSize, pageToken, runAsUserId); } @Override public String toString() { - return new ToStringer(ListWarehousesRequest.class).add("runAsUserId", runAsUserId).toString(); + return new ToStringer(ListWarehousesRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("runAsUserId", runAsUserId) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesResponse.java index 53eb8c282..8f2aed6f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesResponse.java @@ -10,10 +10,26 @@ @Generated public class ListWarehousesResponse { + /** + * A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted, + * there are no subsequent pages. + */ + @JsonProperty("next_page_token") + private String nextPageToken; + /** A list of warehouses and their configurations. */ @JsonProperty("warehouses") private Collection warehouses; + public ListWarehousesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + public ListWarehousesResponse setWarehouses(Collection warehouses) { this.warehouses = warehouses; return this; @@ -28,16 +44,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListWarehousesResponse that = (ListWarehousesResponse) o; - return Objects.equals(warehouses, that.warehouses); + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(warehouses, that.warehouses); } @Override public int hashCode() { - return Objects.hash(warehouses); + return Objects.hash(nextPageToken, warehouses); } @Override public String toString() { - return new ToStringer(ListWarehousesResponse.class).add("warehouses", warehouses).toString(); + return new ToStringer(ListWarehousesResponse.class) + .add("nextPageToken", nextPageToken) + .add("warehouses", warehouses) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultData.java index be696df15..217e370f9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultData.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultData.java @@ -8,6 +8,13 @@ import java.util.Collection; import java.util.Objects; +/** + * Contains the result data of a single chunk when using `INLINE` disposition. When using + * `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide URLs to the + * result data in cloud storage. Exactly one of these alternatives is used. (While the + * `external_links` array prepares the API to return multiple links in a single response. Currently + * only a single link is returned.) + */ @Generated public class ResultData { /** @@ -35,7 +42,7 @@ public class ResultData { /** * When fetching, provides the `chunk_index` for the _next_ chunk. If absent, indicates there are * no more chunks. The next chunk can be fetched with a - * :method:statementexecution/getStatementResultChunkN request. + * :method:statementexecution/getstatementresultchunkn request. */ @JsonProperty("next_chunk_index") private Long nextChunkIndex; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequest.java index fed2b2a4e..9a19ee96b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequest.java @@ -8,6 +8,12 @@ import java.util.Collection; import java.util.Objects; +/** + * Sets the workspace level warehouse configuration that is shared by all SQL warehouses in this + * workspace. + * + *

This is idempotent. + */ @Generated public class SetWorkspaceWarehouseConfigRequest { /** Optional: Channel selection details */ @@ -25,6 +31,10 @@ public class SetWorkspaceWarehouseConfigRequest { @JsonProperty("data_access_config") private Collection dataAccessConfig; + /** Enable Serverless compute for SQL warehouses */ + @JsonProperty("enable_serverless_compute") + private Boolean enableServerlessCompute; + /** * List of Warehouse Types allowed in this workspace (limits allowed value of the type field in * CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be @@ -43,7 +53,10 @@ public class SetWorkspaceWarehouseConfigRequest { @JsonProperty("google_service_account") private String googleServiceAccount; - /** AWS Only: Instance profile used to pass IAM role to the cluster */ + /** + * AWS Only: The instance profile used to pass an IAM role to the SQL warehouses. This + * configuration is also applied to the workspace's serverless compute for notebooks and jobs. + */ @JsonProperty("instance_profile_arn") private String instanceProfileArn; @@ -83,6 +96,16 @@ public Collection getDataAccessConfig() { return dataAccessConfig; } + public SetWorkspaceWarehouseConfigRequest setEnableServerlessCompute( + Boolean enableServerlessCompute) { + this.enableServerlessCompute = enableServerlessCompute; + return this; + } + + public Boolean getEnableServerlessCompute() { + return enableServerlessCompute; + } + public SetWorkspaceWarehouseConfigRequest setEnabledWarehouseTypes( Collection enabledWarehouseTypes) { this.enabledWarehouseTypes = enabledWarehouseTypes; @@ -148,6 +171,7 @@ public boolean equals(Object o) { return Objects.equals(channel, that.channel) && Objects.equals(configParam, that.configParam) && Objects.equals(dataAccessConfig, that.dataAccessConfig) + && Objects.equals(enableServerlessCompute, that.enableServerlessCompute) && Objects.equals(enabledWarehouseTypes, that.enabledWarehouseTypes) && Objects.equals(globalParam, that.globalParam) && Objects.equals(googleServiceAccount, that.googleServiceAccount) @@ -162,6 +186,7 @@ public int hashCode() { channel, configParam, dataAccessConfig, + enableServerlessCompute, enabledWarehouseTypes, globalParam, googleServiceAccount, @@ -176,6 +201,7 @@ public String toString() { .add("channel", channel) .add("configParam", configParam) .add("dataAccessConfig", dataAccessConfig) + .add("enableServerlessCompute", enableServerlessCompute) .add("enabledWarehouseTypes", enabledWarehouseTypes) .add("globalParam", globalParam) .add("googleServiceAccount", googleServiceAccount) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequestSecurityPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequestSecurityPolicy.java index dcff39c66..87d99e715 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequestSecurityPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequestSecurityPolicy.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Security policy for warehouses */ +/** Security policy to be used for warehouses */ @Generated public enum SetWorkspaceWarehouseConfigRequestSecurityPolicy { DATA_ACCESS_CONTROL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SpotInstancePolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SpotInstancePolicy.java index 2c6f25fb7..bd5ec7c05 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SpotInstancePolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SpotInstancePolicy.java @@ -4,7 +4,22 @@ import com.databricks.sdk.support.Generated; -/** Configurations whether the warehouse should use spot instances. */ +/** + * EndpointSpotInstancePolicy configures whether the endpoint should use spot instances. + * + *

The breakdown of how the EndpointSpotInstancePolicy converts to per cloud configurations is: + * + *

+-------+--------------------------------------+--------------------------------+ | Cloud | + * COST_OPTIMIZED | RELIABILITY_OPTIMIZED | + * +-------+--------------------------------------+--------------------------------+ | AWS | On + * Demand Driver with Spot Executors | On Demand Driver and Executors | | AZURE | On Demand Driver + * and Executors | On Demand Driver and Executors | + * +-------+--------------------------------------+--------------------------------+ + * + *

While including "spot" in the enum name may limit the the future extensibility of this field + * because it limits this enum to denoting "spot or not", this is the field that PM recommends after + * discussion with customers per SC-48783. + */ @Generated public enum SpotInstancePolicy { COST_OPTIMIZED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/State.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/State.java index 64dde2ce5..c6f05715e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/State.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/State.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** State of the warehouse */ +/** * State of a warehouse. */ @Generated public enum State { DELETED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java index 16b6fa0b0..c47eed4aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java @@ -33,18 +33,19 @@ * yet finished. This can be set to either `CONTINUE`, to fallback to asynchronous mode, or it can * be set to `CANCEL`, which cancels the statement. * - *

In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call + *

In summary: - **Synchronous mode** (`wait_timeout=30s` and `on_wait_timeout=CANCEL`): The call * waits up to 30 seconds; if the statement execution finishes within this time, the result data is * returned directly in the response. If the execution takes longer than 30 seconds, the execution - * is canceled and the call returns with a `CANCELED` state. - Asynchronous mode - `wait_timeout=0s` - * (`on_wait_timeout` is ignored) - The call doesn't wait for the statement to finish but returns - * directly with a statement ID. The status of the statement execution can be polled by issuing - * :method:statementexecution/getStatement with the statement ID. Once the execution has succeeded, - * this call also returns the result and metadata in the response. - Hybrid mode (default) - - * `wait_timeout=10s` and `on_wait_timeout=CONTINUE` - The call waits for up to 10 seconds; if the - * statement execution finishes within this time, the result data is returned directly in the - * response. If the execution takes longer than 10 seconds, a statement ID is returned. The - * statement ID can be used to fetch status and results in the same way as in the asynchronous mode. + * is canceled and the call returns with a `CANCELED` state. - **Asynchronous mode** + * (`wait_timeout=0s` and `on_wait_timeout` is ignored): The call doesn't wait for the statement to + * finish but returns directly with a statement ID. The status of the statement execution can be + * polled by issuing :method:statementexecution/getStatement with the statement ID. Once the + * execution has succeeded, this call also returns the result and metadata in the response. - + * **[Default] Hybrid mode** (`wait_timeout=10s` and `on_wait_timeout=CONTINUE`): The call waits for + * up to 10 seconds; if the statement execution finishes within this time, the result data is + * returned directly in the response. If the execution takes longer than 10 seconds, a statement ID + * is returned. The statement ID can be used to fetch status and results in the same way as in the + * asynchronous mode. * *

Depending on the size, the result can be split into multiple chunks. If the statement * execution is successful, the statement response contains a manifest and the first chunk of the @@ -114,13 +115,61 @@ public StatementExecutionAPI(StatementExecutionService mock) { /** * Requests that an executing statement be canceled. Callers must poll for status to see the - * terminal state. + * terminal state. Cancel response is empty; receiving response indicates successful receipt. */ public void cancelExecution(CancelExecutionRequest request) { impl.cancelExecution(request); } - /** Execute a SQL statement */ + /** + * Execute a SQL statement and optionally await its results for a specified time. + * + *

**Use case: small result sets with INLINE + JSON_ARRAY** + * + *

For flows that generate small and predictable result sets (<= 25 MiB), `INLINE` responses of + * `JSON_ARRAY` result data are typically the simplest way to execute and fetch result data. + * + *

**Use case: large result sets with EXTERNAL_LINKS** + * + *

Using `EXTERNAL_LINKS` to fetch result data allows you to fetch large result sets + * efficiently. The main differences from using `INLINE` disposition are that the result data is + * accessed with URLs, and that there are 3 supported formats: `JSON_ARRAY`, `ARROW_STREAM` and + * `CSV` compared to only `JSON_ARRAY` with `INLINE`. + * + *

** URLs** + * + *

External links point to data stored within your workspace's internal storage, in the form of + * a URL. The URLs are valid for only a short period, <= 15 minutes. Alongside each + * `external_link` is an expiration field indicating the time at which the URL is no longer valid. + * In `EXTERNAL_LINKS` mode, chunks can be resolved and fetched multiple times and in parallel. + * + *

---- + * + *

### **Warning: Databricks strongly recommends that you protect the URLs that are returned by + * the `EXTERNAL_LINKS` disposition.** + * + *

When you use the `EXTERNAL_LINKS` disposition, a short-lived, URL is generated, which can be + * used to download the results directly from . As a short-lived is embedded in this URL, you + * should protect the URL. + * + *

Because URLs are already generated with embedded temporary s, you must not set an + * `Authorization` header in the download requests. + * + *

The `EXTERNAL_LINKS` disposition can be disabled upon request by creating a support case. + * + *

See also [Security best practices]. + * + *

---- + * + *

StatementResponse contains `statement_id` and `status`; other fields might be absent or + * present depending on context. If the SQL warehouse fails to execute the provided statement, a + * 200 response is returned with `status.state` set to `FAILED` (in contrast to a failure when + * accepting the request, which results in a non-200 response). Details of the error can be found + * at `status.error` in case of execution failures. + * + *

[Security best practices]: + * https://docs.databricks.com/sql/admin/sql-execution-tutorial.html#security-best-practices + */ public StatementResponse executeStatement(ExecuteStatementRequest request) { return impl.executeStatement(request); } @@ -130,11 +179,13 @@ public StatementResponse getStatement(String statementId) { } /** - * This request can be used to poll for the statement's status. When the `status.state` field is - * `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. - * When the statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP - * 200 with the state set. After at least 12 hours in terminal state, the statement is removed - * from the warehouse and further calls will receive an HTTP 404 response. + * This request can be used to poll for the statement's status. StatementResponse contains + * `statement_id` and `status`; other fields might be absent or present depending on context. When + * the `status.state` field is `SUCCEEDED` it will also return the result manifest and the first + * chunk of the result data. When the statement is in the terminal states `CANCELED`, `CLOSED` or + * `FAILED`, it returns HTTP 200 with the state set. After at least 12 hours in terminal state, + * the statement is removed from the warehouse and further calls will receive an HTTP 404 + * response. * *

**NOTE** This call currently might take up to 5 seconds to get the latest status and result. */ @@ -156,7 +207,8 @@ public ResultData getStatementResultChunkN(String statementId, long chunkIndex) * request can be used to fetch subsequent chunks. The response structure is identical to the * nested `result` element described in the :method:statementexecution/getStatement request, and * similarly includes the `next_chunk_index` and `next_chunk_internal_link` fields for simple - * iteration through the result set. + * iteration through the result set. Depending on `disposition`, the response returns chunks of + * data either inline, or as links. */ public ResultData getStatementResultChunkN(GetStatementResultChunkNRequest request) { return impl.getStatementResultChunkN(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java index 4c321af78..763e09ac2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java @@ -30,7 +30,7 @@ public void cancelExecution(CancelExecutionRequest request) { @Override public StatementResponse executeStatement(ExecuteStatementRequest request) { - String path = "/api/2.0/sql/statements/"; + String path = "/api/2.0/sql/statements"; try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java index 50fae0fc5..951af8946 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java @@ -30,18 +30,19 @@ * yet finished. This can be set to either `CONTINUE`, to fallback to asynchronous mode, or it can * be set to `CANCEL`, which cancels the statement. * - *

In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call + *

In summary: - **Synchronous mode** (`wait_timeout=30s` and `on_wait_timeout=CANCEL`): The call * waits up to 30 seconds; if the statement execution finishes within this time, the result data is * returned directly in the response. If the execution takes longer than 30 seconds, the execution - * is canceled and the call returns with a `CANCELED` state. - Asynchronous mode - `wait_timeout=0s` - * (`on_wait_timeout` is ignored) - The call doesn't wait for the statement to finish but returns - * directly with a statement ID. The status of the statement execution can be polled by issuing - * :method:statementexecution/getStatement with the statement ID. Once the execution has succeeded, - * this call also returns the result and metadata in the response. - Hybrid mode (default) - - * `wait_timeout=10s` and `on_wait_timeout=CONTINUE` - The call waits for up to 10 seconds; if the - * statement execution finishes within this time, the result data is returned directly in the - * response. If the execution takes longer than 10 seconds, a statement ID is returned. The - * statement ID can be used to fetch status and results in the same way as in the asynchronous mode. + * is canceled and the call returns with a `CANCELED` state. - **Asynchronous mode** + * (`wait_timeout=0s` and `on_wait_timeout` is ignored): The call doesn't wait for the statement to + * finish but returns directly with a statement ID. The status of the statement execution can be + * polled by issuing :method:statementexecution/getStatement with the statement ID. Once the + * execution has succeeded, this call also returns the result and metadata in the response. - + * **[Default] Hybrid mode** (`wait_timeout=10s` and `on_wait_timeout=CONTINUE`): The call waits for + * up to 10 seconds; if the statement execution finishes within this time, the result data is + * returned directly in the response. If the execution takes longer than 10 seconds, a statement ID + * is returned. The statement ID can be used to fetch status and results in the same way as in the + * asynchronous mode. * *

Depending on the size, the result can be split into multiple chunks. If the statement * execution is successful, the statement response contains a manifest and the first chunk of the @@ -101,19 +102,69 @@ public interface StatementExecutionService { /** * Requests that an executing statement be canceled. Callers must poll for status to see the - * terminal state. + * terminal state. Cancel response is empty; receiving response indicates successful receipt. */ void cancelExecution(CancelExecutionRequest cancelExecutionRequest); - /** Execute a SQL statement */ + /** + * Execute a SQL statement and optionally await its results for a specified time. + * + *

**Use case: small result sets with INLINE + JSON_ARRAY** + * + *

For flows that generate small and predictable result sets (<= 25 MiB), `INLINE` responses of + * `JSON_ARRAY` result data are typically the simplest way to execute and fetch result data. + * + *

**Use case: large result sets with EXTERNAL_LINKS** + * + *

Using `EXTERNAL_LINKS` to fetch result data allows you to fetch large result sets + * efficiently. The main differences from using `INLINE` disposition are that the result data is + * accessed with URLs, and that there are 3 supported formats: `JSON_ARRAY`, `ARROW_STREAM` and + * `CSV` compared to only `JSON_ARRAY` with `INLINE`. + * + *

** URLs** + * + *

External links point to data stored within your workspace's internal storage, in the form of + * a URL. The URLs are valid for only a short period, <= 15 minutes. Alongside each + * `external_link` is an expiration field indicating the time at which the URL is no longer valid. + * In `EXTERNAL_LINKS` mode, chunks can be resolved and fetched multiple times and in parallel. + * + *

---- + * + *

### **Warning: Databricks strongly recommends that you protect the URLs that are returned by + * the `EXTERNAL_LINKS` disposition.** + * + *

When you use the `EXTERNAL_LINKS` disposition, a short-lived, URL is generated, which can be + * used to download the results directly from . As a short-lived is embedded in this URL, you + * should protect the URL. + * + *

Because URLs are already generated with embedded temporary s, you must not set an + * `Authorization` header in the download requests. + * + *

The `EXTERNAL_LINKS` disposition can be disabled upon request by creating a support case. + * + *

See also [Security best practices]. + * + *

---- + * + *

StatementResponse contains `statement_id` and `status`; other fields might be absent or + * present depending on context. If the SQL warehouse fails to execute the provided statement, a + * 200 response is returned with `status.state` set to `FAILED` (in contrast to a failure when + * accepting the request, which results in a non-200 response). Details of the error can be found + * at `status.error` in case of execution failures. + * + *

[Security best practices]: + * https://docs.databricks.com/sql/admin/sql-execution-tutorial.html#security-best-practices + */ StatementResponse executeStatement(ExecuteStatementRequest executeStatementRequest); /** - * This request can be used to poll for the statement's status. When the `status.state` field is - * `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. - * When the statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP - * 200 with the state set. After at least 12 hours in terminal state, the statement is removed - * from the warehouse and further calls will receive an HTTP 404 response. + * This request can be used to poll for the statement's status. StatementResponse contains + * `statement_id` and `status`; other fields might be absent or present depending on context. When + * the `status.state` field is `SUCCEEDED` it will also return the result manifest and the first + * chunk of the result data. When the statement is in the terminal states `CANCELED`, `CLOSED` or + * `FAILED`, it returns HTTP 200 with the state set. After at least 12 hours in terminal state, + * the statement is removed from the warehouse and further calls will receive an HTTP 404 + * response. * *

**NOTE** This call currently might take up to 5 seconds to get the latest status and result. */ @@ -126,7 +177,8 @@ public interface StatementExecutionService { * request can be used to fetch subsequent chunks. The response structure is identical to the * nested `result` element described in the :method:statementexecution/getStatement request, and * similarly includes the `next_chunk_index` and `next_chunk_internal_link` fields for simple - * iteration through the result set. + * iteration through the result set. Depending on `disposition`, the response returns chunks of + * data either inline, or as links. */ ResultData getStatementResultChunkN( GetStatementResultChunkNRequest getStatementResultChunkNRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementState.java index 87cf79688..7d1d7a62c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementState.java @@ -4,21 +4,12 @@ import com.databricks.sdk.support.Generated; -/** - * Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - `SUCCEEDED`: - * execution was successful, result data available for fetch - `FAILED`: execution failed; reason - * for failure described in accomanying error message - `CANCELED`: user canceled; can come from - * explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: execution successful, - * and statement closed; result no longer available for fetch - */ @Generated public enum StatementState { - CANCELED, // user canceled; can come from explicit cancel call, or timeout with - // `on_wait_timeout=CANCEL` - CLOSED, // execution successful, and statement closed; result no longer available for - // fetch - FAILED, // execution failed; reason for failure described in accomanying error message - PENDING, // waiting for warehouse - RUNNING, // running - SUCCEEDED, // execution was successful, result data available for fetch + CANCELED, + CLOSED, + FAILED, + PENDING, + RUNNING, + SUCCEEDED, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java index ddbfd8aa0..0fd5f703d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java @@ -14,7 +14,13 @@ public class StatementStatus { @JsonProperty("error") private ServiceError error; - /** */ + /** + * Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - + * `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution + * failed; reason for failure described in accompanying error message - `CANCELED`: user canceled; + * can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: + * execution successful, and statement closed; result no longer available for fetch + */ @JsonProperty("state") private StatementState state; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Status.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Status.java index 6d33b75e1..9d1a89702 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Status.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Status.java @@ -4,11 +4,9 @@ import com.databricks.sdk.support.Generated; -/** Health status of the warehouse. */ @Generated public enum Status { DEGRADED, FAILED, HEALTHY, - STATUS_UNSPECIFIED, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java index 3ee502add..8bfe1b758 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java @@ -4,21 +4,36 @@ import com.databricks.sdk.support.Generated; -/** status code indicating why the cluster was terminated */ +/** The status code indicating why the cluster was terminated */ @Generated public enum TerminationReasonCode { ABUSE_DETECTED, + ACCESS_TOKEN_FAILURE, + ALLOCATION_TIMEOUT, + ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY, + ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS, + ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS, + ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS, + ALLOCATION_TIMEOUT_NO_READY_CLUSTERS, + ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS, + ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS, ATTACH_PROJECT_FAILURE, AWS_AUTHORIZATION_FAILURE, + AWS_INACCESSIBLE_KMS_KEY_FAILURE, + AWS_INSTANCE_PROFILE_UPDATE_FAILURE, AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE, AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE, + AWS_INVALID_KEY_PAIR, + AWS_INVALID_KMS_KEY_STATE, AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE, AWS_REQUEST_LIMIT_EXCEEDED, + AWS_RESOURCE_QUOTA_EXCEEDED, AWS_UNSUPPORTED_FAILURE, AZURE_BYOK_KEY_PERMISSION_FAILURE, AZURE_EPHEMERAL_DISK_FAILURE, AZURE_INVALID_DEPLOYMENT_TEMPLATE, AZURE_OPERATION_NOT_ALLOWED_EXCEPTION, + AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE, AZURE_QUOTA_EXCEEDED_EXCEPTION, AZURE_RESOURCE_MANAGER_THROTTLING, AZURE_RESOURCE_PROVIDER_THROTTLING, @@ -27,63 +42,148 @@ public enum TerminationReasonCode { AZURE_VNET_CONFIGURATION_FAILURE, BOOTSTRAP_TIMEOUT, BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION, + BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG, + BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED, + BUDGET_POLICY_RESOLUTION_FAILURE, + CLOUD_ACCOUNT_POD_QUOTA_EXCEEDED, + CLOUD_ACCOUNT_SETUP_FAILURE, + CLOUD_OPERATION_CANCELLED, CLOUD_PROVIDER_DISK_SETUP_FAILURE, + CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED, CLOUD_PROVIDER_LAUNCH_FAILURE, + CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG, CLOUD_PROVIDER_RESOURCE_STOCKOUT, + CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG, CLOUD_PROVIDER_SHUTDOWN, + CLUSTER_OPERATION_THROTTLED, + CLUSTER_OPERATION_TIMEOUT, COMMUNICATION_LOST, CONTAINER_LAUNCH_FAILURE, CONTROL_PLANE_REQUEST_FAILURE, + CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG, DATABASE_CONNECTION_FAILURE, + DATA_ACCESS_CONFIG_CHANGED, DBFS_COMPONENT_UNHEALTHY, + DISASTER_RECOVERY_REPLICATION, + DNS_RESOLUTION_ERROR, + DOCKER_CONTAINER_CREATION_EXCEPTION, DOCKER_IMAGE_PULL_FAILURE, + DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION, + DOCKER_INVALID_OS_EXCEPTION, + DRIVER_DNS_RESOLUTION_FAILURE, + DRIVER_EVICTION, + DRIVER_LAUNCH_TIMEOUT, + DRIVER_NODE_UNREACHABLE, + DRIVER_OUT_OF_DISK, + DRIVER_OUT_OF_MEMORY, + DRIVER_POD_CREATION_FAILURE, + DRIVER_UNEXPECTED_FAILURE, + DRIVER_UNHEALTHY, DRIVER_UNREACHABLE, DRIVER_UNRESPONSIVE, + DYNAMIC_SPARK_CONF_SIZE_EXCEEDED, + EOS_SPARK_IMAGE, EXECUTION_COMPONENT_UNHEALTHY, + EXECUTOR_POD_UNSCHEDULED, + GCP_API_RATE_QUOTA_EXCEEDED, + GCP_DENIED_BY_ORG_POLICY, + GCP_FORBIDDEN, + GCP_IAM_TIMEOUT, + GCP_INACCESSIBLE_KMS_KEY_FAILURE, + GCP_INSUFFICIENT_CAPACITY, + GCP_IP_SPACE_EXHAUSTED, + GCP_KMS_KEY_PERMISSION_DENIED, + GCP_NOT_FOUND, GCP_QUOTA_EXCEEDED, + GCP_RESOURCE_QUOTA_EXCEEDED, + GCP_SERVICE_ACCOUNT_ACCESS_DENIED, GCP_SERVICE_ACCOUNT_DELETED, + GCP_SERVICE_ACCOUNT_NOT_FOUND, + GCP_SUBNET_NOT_READY, + GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED, + GKE_BASED_CLUSTER_TERMINATION, GLOBAL_INIT_SCRIPT_FAILURE, HIVE_METASTORE_PROVISIONING_FAILURE, IMAGE_PULL_PERMISSION_DENIED, INACTIVITY, + INIT_CONTAINER_NOT_FINISHED, INIT_SCRIPT_FAILURE, INSTANCE_POOL_CLUSTER_FAILURE, + INSTANCE_POOL_MAX_CAPACITY_REACHED, + INSTANCE_POOL_NOT_FOUND, INSTANCE_UNREACHABLE, + INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG, + INTERNAL_CAPACITY_FAILURE, INTERNAL_ERROR, INVALID_ARGUMENT, + INVALID_AWS_PARAMETER, + INVALID_INSTANCE_PLACEMENT_PROTOCOL, INVALID_SPARK_IMAGE, + INVALID_WORKER_IMAGE_FAILURE, + IN_PENALTY_BOX, IP_EXHAUSTION_FAILURE, JOB_FINISHED, + K8S_ACTIVE_POD_QUOTA_EXCEEDED, K8S_AUTOSCALING_FAILURE, K8S_DBR_CLUSTER_LAUNCH_TIMEOUT, + LAZY_ALLOCATION_TIMEOUT, + MAINTENANCE_MODE, METASTORE_COMPONENT_UNHEALTHY, NEPHOS_RESOURCE_MANAGEMENT, + NETVISOR_SETUP_TIMEOUT, + NETWORK_CHECK_CONTROL_PLANE_FAILURE, + NETWORK_CHECK_DNS_SERVER_FAILURE, + NETWORK_CHECK_METADATA_ENDPOINT_FAILURE, + NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE, + NETWORK_CHECK_NIC_FAILURE, + NETWORK_CHECK_STORAGE_FAILURE, NETWORK_CONFIGURATION_FAILURE, NFS_MOUNT_FAILURE, + NO_ACTIVATED_K8S, + NO_ACTIVATED_K8S_TESTING_TAG, + NO_MATCHED_K8S, + NO_MATCHED_K8S_TESTING_TAG, NPIP_TUNNEL_SETUP_FAILURE, NPIP_TUNNEL_TOKEN_FAILURE, + POD_ASSIGNMENT_FAILURE, + POD_SCHEDULING_FAILURE, REQUEST_REJECTED, REQUEST_THROTTLED, + RESOURCE_USAGE_BLOCKED, + SECRET_CREATION_FAILURE, + SECRET_PERMISSION_DENIED, SECRET_RESOLUTION_ERROR, + SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION, SECURITY_DAEMON_REGISTRATION_EXCEPTION, SELF_BOOTSTRAP_FAILURE, + SERVERLESS_LONG_RUNNING_TERMINATED, SKIPPED_SLOW_NODES, SLOW_IMAGE_DOWNLOAD, SPARK_ERROR, SPARK_IMAGE_DOWNLOAD_FAILURE, + SPARK_IMAGE_DOWNLOAD_THROTTLED, + SPARK_IMAGE_NOT_FOUND, SPARK_STARTUP_FAILURE, SPOT_INSTANCE_TERMINATION, + SSH_BOOTSTRAP_FAILURE, STORAGE_DOWNLOAD_FAILURE, + STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG, + STORAGE_DOWNLOAD_FAILURE_SLOW, + STORAGE_DOWNLOAD_FAILURE_THROTTLED, STS_CLIENT_SETUP_FAILURE, SUBNET_EXHAUSTED_FAILURE, TEMPORARILY_UNAVAILABLE, TRIAL_EXPIRED, UNEXPECTED_LAUNCH_FAILURE, + UNEXPECTED_POD_RECREATION, UNKNOWN, UNSUPPORTED_INSTANCE_TYPE, UPDATE_INSTANCE_PROFILE_FAILURE, + USAGE_POLICY_ENTITLEMENT_DENIED, + USER_INITIATED_VM_TERMINATION, USER_REQUEST, WORKER_SETUP_FAILURE, WORKSPACE_CANCELLED_ERROR, WORKSPACE_CONFIGURATION_ERROR, + WORKSPACE_UPDATE, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePair.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePair.java index 562866b2e..bf221ba27 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePair.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePair.java @@ -7,6 +7,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** + * * Configuration values to enable or disable the access to specific warehouse types in the + * workspace. + */ @Generated public class WarehouseTypePair { /** @@ -16,7 +20,7 @@ public class WarehouseTypePair { @JsonProperty("enabled") private Boolean enabled; - /** Warehouse type: `PRO` or `CLASSIC`. */ + /** */ @JsonProperty("warehouse_type") private WarehouseTypePairWarehouseType warehouseType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePairWarehouseType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePairWarehouseType.java index a0d6f8870..563e75240 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePairWarehouseType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseTypePairWarehouseType.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** Warehouse type: `PRO` or `CLASSIC`. */ @Generated public enum WarehouseTypePairWarehouseType { CLASSIC, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java index 2382015a1..3253ff69d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java @@ -189,7 +189,16 @@ public GetWorkspaceWarehouseConfigResponse getWorkspaceWarehouseConfig() { /** Lists all SQL warehouses that a user has access to. */ public Iterable list(ListWarehousesRequest request) { return new Paginator<>( - request, impl::list, ListWarehousesResponse::getWarehouses, response -> null); + request, + impl::list, + ListWarehousesResponse::getWarehouses, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/CreateTagAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/CreateTagAssignmentRequest.java new file mode 100755 index 000000000..a5a64a8ce --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/CreateTagAssignmentRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.tags; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateTagAssignmentRequest { + /** */ + @JsonProperty("tag_assignment") + private TagAssignment tagAssignment; + + public CreateTagAssignmentRequest setTagAssignment(TagAssignment tagAssignment) { + this.tagAssignment = tagAssignment; + return this; + } + + public TagAssignment getTagAssignment() { + return tagAssignment; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateTagAssignmentRequest that = (CreateTagAssignmentRequest) o; + return Objects.equals(tagAssignment, that.tagAssignment); + } + + @Override + public int hashCode() { + return Objects.hash(tagAssignment); + } + + @Override + public String toString() { + return new ToStringer(CreateTagAssignmentRequest.class) + .add("tagAssignment", tagAssignment) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagAssignmentRequest.java new file mode 100755 index 000000000..bed446bf5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagAssignmentRequest.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.tags; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteTagAssignmentRequest { + /** The identifier of the entity to which the tag is assigned */ + @JsonIgnore private String entityId; + + /** The type of entity to which the tag is assigned. Allowed value is dashboards */ + @JsonIgnore private String entityType; + + /** The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed */ + @JsonIgnore private String tagKey; + + public DeleteTagAssignmentRequest setEntityId(String entityId) { + this.entityId = entityId; + return this; + } + + public String getEntityId() { + return entityId; + } + + public DeleteTagAssignmentRequest setEntityType(String entityType) { + this.entityType = entityType; + return this; + } + + public String getEntityType() { + return entityType; + } + + public DeleteTagAssignmentRequest setTagKey(String tagKey) { + this.tagKey = tagKey; + return this; + } + + public String getTagKey() { + return tagKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteTagAssignmentRequest that = (DeleteTagAssignmentRequest) o; + return Objects.equals(entityId, that.entityId) + && Objects.equals(entityType, that.entityType) + && Objects.equals(tagKey, that.tagKey); + } + + @Override + public int hashCode() { + return Objects.hash(entityId, entityType, tagKey); + } + + @Override + public String toString() { + return new ToStringer(DeleteTagAssignmentRequest.class) + .add("entityId", entityId) + .add("entityType", entityType) + .add("tagKey", tagKey) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/GetTagAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/GetTagAssignmentRequest.java new file mode 100755 index 000000000..cd2dc756d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/GetTagAssignmentRequest.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.tags; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetTagAssignmentRequest { + /** The identifier of the entity to which the tag is assigned */ + @JsonIgnore private String entityId; + + /** The type of entity to which the tag is assigned. Allowed value is dashboards */ + @JsonIgnore private String entityType; + + /** The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed */ + @JsonIgnore private String tagKey; + + public GetTagAssignmentRequest setEntityId(String entityId) { + this.entityId = entityId; + return this; + } + + public String getEntityId() { + return entityId; + } + + public GetTagAssignmentRequest setEntityType(String entityType) { + this.entityType = entityType; + return this; + } + + public String getEntityType() { + return entityType; + } + + public GetTagAssignmentRequest setTagKey(String tagKey) { + this.tagKey = tagKey; + return this; + } + + public String getTagKey() { + return tagKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetTagAssignmentRequest that = (GetTagAssignmentRequest) o; + return Objects.equals(entityId, that.entityId) + && Objects.equals(entityType, that.entityType) + && Objects.equals(tagKey, that.tagKey); + } + + @Override + public int hashCode() { + return Objects.hash(entityId, entityType, tagKey); + } + + @Override + public String toString() { + return new ToStringer(GetTagAssignmentRequest.class) + .add("entityId", entityId) + .add("entityType", entityType) + .add("tagKey", tagKey) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsRequest.java new file mode 100755 index 000000000..5c717ce96 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsRequest.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.tags; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListTagAssignmentsRequest { + /** The identifier of the entity to which the tag is assigned */ + @JsonIgnore private String entityId; + + /** The type of entity to which the tag is assigned. Allowed value is dashboards */ + @JsonIgnore private String entityType; + + /** Optional. Maximum number of tag assignments to return in a single page */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** Pagination token to go to the next page of tag assignments. Requests first page if absent. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListTagAssignmentsRequest setEntityId(String entityId) { + this.entityId = entityId; + return this; + } + + public String getEntityId() { + return entityId; + } + + public ListTagAssignmentsRequest setEntityType(String entityType) { + this.entityType = entityType; + return this; + } + + public String getEntityType() { + return entityType; + } + + public ListTagAssignmentsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListTagAssignmentsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListTagAssignmentsRequest that = (ListTagAssignmentsRequest) o; + return Objects.equals(entityId, that.entityId) + && Objects.equals(entityType, that.entityType) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(entityId, entityType, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListTagAssignmentsRequest.class) + .add("entityId", entityId) + .add("entityType", entityType) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsResponse.java new file mode 100755 index 000000000..33c01ed5c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.tags; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListTagAssignmentsResponse { + /** Pagination token to request the next page of tag assignments */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("tag_assignments") + private Collection tagAssignments; + + public ListTagAssignmentsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListTagAssignmentsResponse setTagAssignments(Collection tagAssignments) { + this.tagAssignments = tagAssignments; + return this; + } + + public Collection getTagAssignments() { + return tagAssignments; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListTagAssignmentsResponse that = (ListTagAssignmentsResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(tagAssignments, that.tagAssignments); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, tagAssignments); + } + + @Override + public String toString() { + return new ToStringer(ListTagAssignmentsResponse.class) + .add("nextPageToken", nextPageToken) + .add("tagAssignments", tagAssignments) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignment.java new file mode 100755 index 000000000..8ca0dfc99 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignment.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.tags; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class TagAssignment { + /** The identifier of the entity to which the tag is assigned */ + @JsonProperty("entity_id") + private String entityId; + + /** The type of entity to which the tag is assigned. Allowed value is dashboards */ + @JsonProperty("entity_type") + private String entityType; + + /** The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed */ + @JsonProperty("tag_key") + private String tagKey; + + /** The value of the tag */ + @JsonProperty("tag_value") + private String tagValue; + + public TagAssignment setEntityId(String entityId) { + this.entityId = entityId; + return this; + } + + public String getEntityId() { + return entityId; + } + + public TagAssignment setEntityType(String entityType) { + this.entityType = entityType; + return this; + } + + public String getEntityType() { + return entityType; + } + + public TagAssignment setTagKey(String tagKey) { + this.tagKey = tagKey; + return this; + } + + public String getTagKey() { + return tagKey; + } + + public TagAssignment setTagValue(String tagValue) { + this.tagValue = tagValue; + return this; + } + + public String getTagValue() { + return tagValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TagAssignment that = (TagAssignment) o; + return Objects.equals(entityId, that.entityId) + && Objects.equals(entityType, that.entityType) + && Objects.equals(tagKey, that.tagKey) + && Objects.equals(tagValue, that.tagValue); + } + + @Override + public int hashCode() { + return Objects.hash(entityId, entityType, tagKey, tagValue); + } + + @Override + public String toString() { + return new ToStringer(TagAssignment.class) + .add("entityId", entityId) + .add("entityType", entityType) + .add("tagKey", tagKey) + .add("tagValue", tagValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsAPI.java new file mode 100755 index 000000000..9e90894b2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsAPI.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.tags; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Manage tag assignments on workspace-scoped objects. */ +@Generated +public class TagAssignmentsAPI { + private static final Logger LOG = LoggerFactory.getLogger(TagAssignmentsAPI.class); + + private final TagAssignmentsService impl; + + /** Regular-use constructor */ + public TagAssignmentsAPI(ApiClient apiClient) { + impl = new TagAssignmentsImpl(apiClient); + } + + /** Constructor for mocks */ + public TagAssignmentsAPI(TagAssignmentsService mock) { + impl = mock; + } + + /** Create a tag assignment */ + public TagAssignment createTagAssignment(CreateTagAssignmentRequest request) { + return impl.createTagAssignment(request); + } + + public void deleteTagAssignment(String entityType, String entityId, String tagKey) { + deleteTagAssignment( + new DeleteTagAssignmentRequest() + .setEntityType(entityType) + .setEntityId(entityId) + .setTagKey(tagKey)); + } + + /** Delete a tag assignment */ + public void deleteTagAssignment(DeleteTagAssignmentRequest request) { + impl.deleteTagAssignment(request); + } + + public TagAssignment getTagAssignment(String entityType, String entityId, String tagKey) { + return getTagAssignment( + new GetTagAssignmentRequest() + .setEntityType(entityType) + .setEntityId(entityId) + .setTagKey(tagKey)); + } + + /** Get a tag assignment */ + public TagAssignment getTagAssignment(GetTagAssignmentRequest request) { + return impl.getTagAssignment(request); + } + + public Iterable listTagAssignments(String entityType, String entityId) { + return listTagAssignments( + new ListTagAssignmentsRequest().setEntityType(entityType).setEntityId(entityId)); + } + + /** List the tag assignments for an entity */ + public Iterable listTagAssignments(ListTagAssignmentsRequest request) { + return new Paginator<>( + request, + impl::listTagAssignments, + ListTagAssignmentsResponse::getTagAssignments, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + /** Update a tag assignment */ + public TagAssignment updateTagAssignment(UpdateTagAssignmentRequest request) { + return impl.updateTagAssignment(request); + } + + public TagAssignmentsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsImpl.java new file mode 100755 index 000000000..756fa1dd9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsImpl.java @@ -0,0 +1,97 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.tags; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of TagAssignments */ +@Generated +class TagAssignmentsImpl implements TagAssignmentsService { + private final ApiClient apiClient; + + public TagAssignmentsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public TagAssignment createTagAssignment(CreateTagAssignmentRequest request) { + String path = "/api/2.0/entity-tag-assignments"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getTagAssignment())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, TagAssignment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteTagAssignment(DeleteTagAssignmentRequest request) { + String path = + String.format( + "/api/2.0/entity-tag-assignments/%s/%s/tags/%s", + request.getEntityType(), request.getEntityId(), request.getTagKey()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public TagAssignment getTagAssignment(GetTagAssignmentRequest request) { + String path = + String.format( + "/api/2.0/entity-tag-assignments/%s/%s/tags/%s", + request.getEntityType(), request.getEntityId(), request.getTagKey()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, TagAssignment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListTagAssignmentsResponse listTagAssignments(ListTagAssignmentsRequest request) { + String path = + String.format( + "/api/2.0/entity-tag-assignments/%s/%s/tags", + request.getEntityType(), request.getEntityId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListTagAssignmentsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public TagAssignment updateTagAssignment(UpdateTagAssignmentRequest request) { + String path = + String.format( + "/api/2.0/entity-tag-assignments/%s/%s/tags/%s", + request.getEntityType(), request.getEntityId(), request.getTagKey()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getTagAssignment())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, TagAssignment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsService.java new file mode 100755 index 000000000..e8ae882dc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignmentsService.java @@ -0,0 +1,30 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.tags; + +import com.databricks.sdk.support.Generated; + +/** + * Manage tag assignments on workspace-scoped objects. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface TagAssignmentsService { + /** Create a tag assignment */ + TagAssignment createTagAssignment(CreateTagAssignmentRequest createTagAssignmentRequest); + + /** Delete a tag assignment */ + void deleteTagAssignment(DeleteTagAssignmentRequest deleteTagAssignmentRequest); + + /** Get a tag assignment */ + TagAssignment getTagAssignment(GetTagAssignmentRequest getTagAssignmentRequest); + + /** List the tag assignments for an entity */ + ListTagAssignmentsResponse listTagAssignments( + ListTagAssignmentsRequest listTagAssignmentsRequest); + + /** Update a tag assignment */ + TagAssignment updateTagAssignment(UpdateTagAssignmentRequest updateTagAssignmentRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/UpdateTagAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/UpdateTagAssignmentRequest.java new file mode 100755 index 000000000..1271e71be --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/UpdateTagAssignmentRequest.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.tags; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateTagAssignmentRequest { + /** The identifier of the entity to which the tag is assigned */ + @JsonIgnore private String entityId; + + /** The type of entity to which the tag is assigned. Allowed value is dashboards */ + @JsonIgnore private String entityType; + + /** */ + @JsonProperty("tag_assignment") + private TagAssignment tagAssignment; + + /** The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed */ + @JsonIgnore private String tagKey; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateTagAssignmentRequest setEntityId(String entityId) { + this.entityId = entityId; + return this; + } + + public String getEntityId() { + return entityId; + } + + public UpdateTagAssignmentRequest setEntityType(String entityType) { + this.entityType = entityType; + return this; + } + + public String getEntityType() { + return entityType; + } + + public UpdateTagAssignmentRequest setTagAssignment(TagAssignment tagAssignment) { + this.tagAssignment = tagAssignment; + return this; + } + + public TagAssignment getTagAssignment() { + return tagAssignment; + } + + public UpdateTagAssignmentRequest setTagKey(String tagKey) { + this.tagKey = tagKey; + return this; + } + + public String getTagKey() { + return tagKey; + } + + public UpdateTagAssignmentRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateTagAssignmentRequest that = (UpdateTagAssignmentRequest) o; + return Objects.equals(entityId, that.entityId) + && Objects.equals(entityType, that.entityType) + && Objects.equals(tagAssignment, that.tagAssignment) + && Objects.equals(tagKey, that.tagKey) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(entityId, entityType, tagAssignment, tagKey, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateTagAssignmentRequest.class) + .add("entityId", entityId) + .add("entityType", entityType) + .add("tagAssignment", tagAssignment) + .add("tagKey", tagKey) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java index 433feb92d..2c98abcde 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java @@ -21,6 +21,10 @@ public class CreateEndpoint { @JsonProperty("name") private String name; + /** The usage policy id to be applied once we've migrated to usage policies */ + @JsonProperty("usage_policy_id") + private String usagePolicyId; + public CreateEndpoint setBudgetPolicyId(String budgetPolicyId) { this.budgetPolicyId = budgetPolicyId; return this; @@ -48,6 +52,15 @@ public String getName() { return name; } + public CreateEndpoint setUsagePolicyId(String usagePolicyId) { + this.usagePolicyId = usagePolicyId; + return this; + } + + public String getUsagePolicyId() { + return usagePolicyId; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -55,12 +68,13 @@ public boolean equals(Object o) { CreateEndpoint that = (CreateEndpoint) o; return Objects.equals(budgetPolicyId, that.budgetPolicyId) && Objects.equals(endpointType, that.endpointType) - && Objects.equals(name, that.name); + && Objects.equals(name, that.name) + && Objects.equals(usagePolicyId, that.usagePolicyId); } @Override public int hashCode() { - return Objects.hash(budgetPolicyId, endpointType, name); + return Objects.hash(budgetPolicyId, endpointType, name, usagePolicyId); } @Override @@ -69,6 +83,7 @@ public String toString() { .add("budgetPolicyId", budgetPolicyId) .add("endpointType", endpointType) .add("name", name) + .add("usagePolicyId", usagePolicyId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java index e893e2b55..f49a22873 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java @@ -18,6 +18,14 @@ public class DeltaSyncVectorIndexSpecRequest { @JsonProperty("columns_to_sync") private Collection columnsToSync; + /** The budget policy id applied to the vector search index */ + @JsonProperty("effective_budget_policy_id") + private String effectiveBudgetPolicyId; + + /** */ + @JsonProperty("effective_usage_policy_id") + private String effectiveUsagePolicyId; + /** The columns that contain the embedding source. */ @JsonProperty("embedding_source_columns") private Collection embeddingSourceColumns; @@ -56,6 +64,25 @@ public Collection getColumnsToSync() { return columnsToSync; } + public DeltaSyncVectorIndexSpecRequest setEffectiveBudgetPolicyId( + String effectiveBudgetPolicyId) { + this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; + return this; + } + + public String getEffectiveBudgetPolicyId() { + return effectiveBudgetPolicyId; + } + + public DeltaSyncVectorIndexSpecRequest setEffectiveUsagePolicyId(String effectiveUsagePolicyId) { + this.effectiveUsagePolicyId = effectiveUsagePolicyId; + return this; + } + + public String getEffectiveUsagePolicyId() { + return effectiveUsagePolicyId; + } + public DeltaSyncVectorIndexSpecRequest setEmbeddingSourceColumns( Collection embeddingSourceColumns) { this.embeddingSourceColumns = embeddingSourceColumns; @@ -110,6 +137,8 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; DeltaSyncVectorIndexSpecRequest that = (DeltaSyncVectorIndexSpecRequest) o; return Objects.equals(columnsToSync, that.columnsToSync) + && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId) && Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns) && Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns) && Objects.equals(embeddingWritebackTable, that.embeddingWritebackTable) @@ -121,6 +150,8 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( columnsToSync, + effectiveBudgetPolicyId, + effectiveUsagePolicyId, embeddingSourceColumns, embeddingVectorColumns, embeddingWritebackTable, @@ -132,6 +163,8 @@ public int hashCode() { public String toString() { return new ToStringer(DeltaSyncVectorIndexSpecRequest.class) .add("columnsToSync", columnsToSync) + .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("effectiveUsagePolicyId", effectiveUsagePolicyId) .add("embeddingSourceColumns", embeddingSourceColumns) .add("embeddingVectorColumns", embeddingVectorColumns) .add("embeddingWritebackTable", embeddingWritebackTable) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java index 9f2f17700..19a6ca27a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java @@ -10,6 +10,14 @@ @Generated public class DeltaSyncVectorIndexSpecResponse { + /** The budget policy id applied to the vector search index */ + @JsonProperty("effective_budget_policy_id") + private String effectiveBudgetPolicyId; + + /** */ + @JsonProperty("effective_usage_policy_id") + private String effectiveUsagePolicyId; + /** The columns that contain the embedding source. */ @JsonProperty("embedding_source_columns") private Collection embeddingSourceColumns; @@ -43,6 +51,25 @@ public class DeltaSyncVectorIndexSpecResponse { @JsonProperty("source_table") private String sourceTable; + public DeltaSyncVectorIndexSpecResponse setEffectiveBudgetPolicyId( + String effectiveBudgetPolicyId) { + this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; + return this; + } + + public String getEffectiveBudgetPolicyId() { + return effectiveBudgetPolicyId; + } + + public DeltaSyncVectorIndexSpecResponse setEffectiveUsagePolicyId(String effectiveUsagePolicyId) { + this.effectiveUsagePolicyId = effectiveUsagePolicyId; + return this; + } + + public String getEffectiveUsagePolicyId() { + return effectiveUsagePolicyId; + } + public DeltaSyncVectorIndexSpecResponse setEmbeddingSourceColumns( Collection embeddingSourceColumns) { this.embeddingSourceColumns = embeddingSourceColumns; @@ -105,7 +132,9 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeltaSyncVectorIndexSpecResponse that = (DeltaSyncVectorIndexSpecResponse) o; - return Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns) + return Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId) + && Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns) && Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns) && Objects.equals(embeddingWritebackTable, that.embeddingWritebackTable) && Objects.equals(pipelineId, that.pipelineId) @@ -116,6 +145,8 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( + effectiveBudgetPolicyId, + effectiveUsagePolicyId, embeddingSourceColumns, embeddingVectorColumns, embeddingWritebackTable, @@ -127,6 +158,8 @@ public int hashCode() { @Override public String toString() { return new ToStringer(DeltaSyncVectorIndexSpecResponse.class) + .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("effectiveUsagePolicyId", effectiveUsagePolicyId) .add("embeddingSourceColumns", embeddingSourceColumns) .add("embeddingVectorColumns", embeddingVectorColumns) .add("embeddingWritebackTable", embeddingWritebackTable) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateVectorIndexUsagePolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateVectorIndexUsagePolicyRequest.java new file mode 100755 index 000000000..5e9b4d7dd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateVectorIndexUsagePolicyRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class UpdateVectorIndexUsagePolicyRequest { + /** Name of the vector search index */ + @JsonIgnore private String indexName; + + public UpdateVectorIndexUsagePolicyRequest setIndexName(String indexName) { + this.indexName = indexName; + return this; + } + + public String getIndexName() { + return indexName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateVectorIndexUsagePolicyRequest that = (UpdateVectorIndexUsagePolicyRequest) o; + return Objects.equals(indexName, that.indexName); + } + + @Override + public int hashCode() { + return Objects.hash(indexName); + } + + @Override + public String toString() { + return new ToStringer(UpdateVectorIndexUsagePolicyRequest.class) + .add("indexName", indexName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateVectorIndexUsagePolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateVectorIndexUsagePolicyResponse.java new file mode 100755 index 000000000..b0afc0a87 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateVectorIndexUsagePolicyResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class UpdateVectorIndexUsagePolicyResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateVectorIndexUsagePolicyResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java index 5333cb149..b2f146712 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java @@ -110,6 +110,12 @@ public void syncIndex(SyncIndexRequest request) { impl.syncIndex(request); } + /** Update the budget policy of an index */ + public UpdateVectorIndexUsagePolicyResponse updateIndexBudgetPolicy( + UpdateVectorIndexUsagePolicyRequest request) { + return impl.updateIndexBudgetPolicy(request); + } + /** Handles the upserting of data into a specified vector index. */ public UpsertDataVectorIndexResponse upsertDataVectorIndex(UpsertDataVectorIndexRequest request) { return impl.upsertDataVectorIndex(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java index 746a4999e..324e1c326 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java @@ -139,6 +139,21 @@ public void syncIndex(SyncIndexRequest request) { } } + @Override + public UpdateVectorIndexUsagePolicyResponse updateIndexBudgetPolicy( + UpdateVectorIndexUsagePolicyRequest request) { + String path = + String.format("/api/2.0/vector-search/indexes/%s/usage-policy", request.getIndexName()); + try { + Request req = new Request("PATCH", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, UpdateVectorIndexUsagePolicyResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public UpsertDataVectorIndexResponse upsertDataVectorIndex(UpsertDataVectorIndexRequest request) { String path = diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesService.java index 5aa6e7409..a5ea98200 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesService.java @@ -54,6 +54,10 @@ QueryVectorIndexResponse queryNextPage( /** Triggers a synchronization process for a specified vector index. */ void syncIndex(SyncIndexRequest syncIndexRequest); + /** Update the budget policy of an index */ + UpdateVectorIndexUsagePolicyResponse updateIndexBudgetPolicy( + UpdateVectorIndexUsagePolicyRequest updateVectorIndexUsagePolicyRequest); + /** Handles the upserting of data into a specified vector index. */ UpsertDataVectorIndexResponse upsertDataVectorIndex( UpsertDataVectorIndexRequest upsertDataVectorIndexRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java index 6b931cc8e..6cb906abd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java @@ -9,6 +9,15 @@ @Generated public class CreateCredentialsRequest { + /** + * The authenticating email associated with your Git provider user account. Used for + * authentication with the remote repository and also sets the author & committer identity for + * commits. Required for most Git providers except AWS CodeCommit. Learn more at + * https://docs.databricks.com/aws/en/repos/get-access-tokens-from-git-provider + */ + @JsonProperty("git_email") + private String gitEmail; + /** * Git provider. This field is case-insensitive. The available Git providers are `gitHub`, * `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, @@ -18,12 +27,10 @@ public class CreateCredentialsRequest { private String gitProvider; /** - * The username or email provided with your Git provider account, depending on which provider you - * are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or - * username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS - * CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers - * please see your provider's Personal Access Token authentication documentation to see what is - * supported. + * The username provided with your Git provider account and associated with the credential. For + * most Git providers it is only used to set the Git committer & author names for commits, however + * it may be required for authentication depending on your Git provider / token requirements. + * Required for AWS CodeCommit. */ @JsonProperty("git_username") private String gitUsername; @@ -45,6 +52,15 @@ public class CreateCredentialsRequest { @JsonProperty("personal_access_token") private String personalAccessToken; + public CreateCredentialsRequest setGitEmail(String gitEmail) { + this.gitEmail = gitEmail; + return this; + } + + public String getGitEmail() { + return gitEmail; + } + public CreateCredentialsRequest setGitProvider(String gitProvider) { this.gitProvider = gitProvider; return this; @@ -95,7 +111,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateCredentialsRequest that = (CreateCredentialsRequest) o; - return Objects.equals(gitProvider, that.gitProvider) + return Objects.equals(gitEmail, that.gitEmail) + && Objects.equals(gitProvider, that.gitProvider) && Objects.equals(gitUsername, that.gitUsername) && Objects.equals(isDefaultForProvider, that.isDefaultForProvider) && Objects.equals(name, that.name) @@ -104,12 +121,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(gitProvider, gitUsername, isDefaultForProvider, name, personalAccessToken); + return Objects.hash( + gitEmail, gitProvider, gitUsername, isDefaultForProvider, name, personalAccessToken); } @Override public String toString() { return new ToStringer(CreateCredentialsRequest.class) + .add("gitEmail", gitEmail) .add("gitProvider", gitProvider) .add("gitUsername", gitUsername) .add("isDefaultForProvider", isDefaultForProvider) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java index 43c72b571..d29f062ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java @@ -13,13 +13,24 @@ public class CreateCredentialsResponse { @JsonProperty("credential_id") private Long credentialId; + /** + * The authenticating email associated with your Git provider user account. Used for + * authentication with the remote repository and also sets the author & committer identity for + * commits. Required for most Git providers except AWS CodeCommit. Learn more at + * https://docs.databricks.com/aws/en/repos/get-access-tokens-from-git-provider + */ + @JsonProperty("git_email") + private String gitEmail; + /** The Git provider associated with the credential. */ @JsonProperty("git_provider") private String gitProvider; /** - * The username or email provided with your Git provider account and associated with the - * credential. + * The username provided with your Git provider account and associated with the credential. For + * most Git providers it is only used to set the Git committer & author names for commits, however + * it may be required for authentication depending on your Git provider / token requirements. + * Required for AWS CodeCommit. */ @JsonProperty("git_username") private String gitUsername; @@ -41,6 +52,15 @@ public Long getCredentialId() { return credentialId; } + public CreateCredentialsResponse setGitEmail(String gitEmail) { + this.gitEmail = gitEmail; + return this; + } + + public String getGitEmail() { + return gitEmail; + } + public CreateCredentialsResponse setGitProvider(String gitProvider) { this.gitProvider = gitProvider; return this; @@ -83,6 +103,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; CreateCredentialsResponse that = (CreateCredentialsResponse) o; return Objects.equals(credentialId, that.credentialId) + && Objects.equals(gitEmail, that.gitEmail) && Objects.equals(gitProvider, that.gitProvider) && Objects.equals(gitUsername, that.gitUsername) && Objects.equals(isDefaultForProvider, that.isDefaultForProvider) @@ -91,13 +112,15 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(credentialId, gitProvider, gitUsername, isDefaultForProvider, name); + return Objects.hash( + credentialId, gitEmail, gitProvider, gitUsername, isDefaultForProvider, name); } @Override public String toString() { return new ToStringer(CreateCredentialsResponse.class) .add("credentialId", credentialId) + .add("gitEmail", gitEmail) .add("gitProvider", gitProvider) .add("gitUsername", gitUsername) .add("isDefaultForProvider", isDefaultForProvider) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java index d98116e31..428ee5726 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java @@ -13,13 +13,24 @@ public class CredentialInfo { @JsonProperty("credential_id") private Long credentialId; + /** + * The authenticating email associated with your Git provider user account. Used for + * authentication with the remote repository and also sets the author & committer identity for + * commits. Required for most Git providers except AWS CodeCommit. Learn more at + * https://docs.databricks.com/aws/en/repos/get-access-tokens-from-git-provider + */ + @JsonProperty("git_email") + private String gitEmail; + /** The Git provider associated with the credential. */ @JsonProperty("git_provider") private String gitProvider; /** - * The username or email provided with your Git provider account and associated with the - * credential. + * The username provided with your Git provider account and associated with the credential. For + * most Git providers it is only used to set the Git committer & author names for commits, however + * it may be required for authentication depending on your Git provider / token requirements. + * Required for AWS CodeCommit. */ @JsonProperty("git_username") private String gitUsername; @@ -41,6 +52,15 @@ public Long getCredentialId() { return credentialId; } + public CredentialInfo setGitEmail(String gitEmail) { + this.gitEmail = gitEmail; + return this; + } + + public String getGitEmail() { + return gitEmail; + } + public CredentialInfo setGitProvider(String gitProvider) { this.gitProvider = gitProvider; return this; @@ -83,6 +103,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; CredentialInfo that = (CredentialInfo) o; return Objects.equals(credentialId, that.credentialId) + && Objects.equals(gitEmail, that.gitEmail) && Objects.equals(gitProvider, that.gitProvider) && Objects.equals(gitUsername, that.gitUsername) && Objects.equals(isDefaultForProvider, that.isDefaultForProvider) @@ -91,13 +112,15 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(credentialId, gitProvider, gitUsername, isDefaultForProvider, name); + return Objects.hash( + credentialId, gitEmail, gitProvider, gitUsername, isDefaultForProvider, name); } @Override public String toString() { return new ToStringer(CredentialInfo.class) .add("credentialId", credentialId) + .add("gitEmail", gitEmail) .add("gitProvider", gitProvider) .add("gitUsername", gitUsername) .add("isDefaultForProvider", isDefaultForProvider) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportOutputs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportOutputs.java new file mode 100755 index 000000000..deb61dec4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportOutputs.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum ExportOutputs { + ALL, + NONE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java index fd4ef88fc..5bda932fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java @@ -27,6 +27,16 @@ public class ExportRequest { @QueryParam("format") private ExportFormat format; + /** + * This specifies which cell outputs should be included in the export (if the export format allows + * it). If not specified, the behavior is determined by the format. For JUPYTER format, the + * default is to include all outputs. This is a public endpoint, but only ALL or NONE is + * documented publically, DATABRICKS is internal only + */ + @JsonIgnore + @QueryParam("outputs") + private ExportOutputs outputs; + /** * The absolute path of the object or directory. Exporting a directory is only supported for the * `DBC`, `SOURCE`, and `AUTO` format. @@ -44,6 +54,15 @@ public ExportFormat getFormat() { return format; } + public ExportRequest setOutputs(ExportOutputs outputs) { + this.outputs = outputs; + return this; + } + + public ExportOutputs getOutputs() { + return outputs; + } + public ExportRequest setPath(String path) { this.path = path; return this; @@ -58,16 +77,22 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ExportRequest that = (ExportRequest) o; - return Objects.equals(format, that.format) && Objects.equals(path, that.path); + return Objects.equals(format, that.format) + && Objects.equals(outputs, that.outputs) + && Objects.equals(path, that.path); } @Override public int hashCode() { - return Objects.hash(format, path); + return Objects.hash(format, outputs, path); } @Override public String toString() { - return new ToStringer(ExportRequest.class).add("format", format).add("path", path).toString(); + return new ToStringer(ExportRequest.class) + .add("format", format) + .add("outputs", outputs) + .add("path", path) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java index 9296f378e..d862a7873 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java @@ -13,13 +13,24 @@ public class GetCredentialsResponse { @JsonProperty("credential_id") private Long credentialId; + /** + * The authenticating email associated with your Git provider user account. Used for + * authentication with the remote repository and also sets the author & committer identity for + * commits. Required for most Git providers except AWS CodeCommit. Learn more at + * https://docs.databricks.com/aws/en/repos/get-access-tokens-from-git-provider + */ + @JsonProperty("git_email") + private String gitEmail; + /** The Git provider associated with the credential. */ @JsonProperty("git_provider") private String gitProvider; /** - * The username or email provided with your Git provider account and associated with the - * credential. + * The username provided with your Git provider account and associated with the credential. For + * most Git providers it is only used to set the Git committer & author names for commits, however + * it may be required for authentication depending on your Git provider / token requirements. + * Required for AWS CodeCommit. */ @JsonProperty("git_username") private String gitUsername; @@ -41,6 +52,15 @@ public Long getCredentialId() { return credentialId; } + public GetCredentialsResponse setGitEmail(String gitEmail) { + this.gitEmail = gitEmail; + return this; + } + + public String getGitEmail() { + return gitEmail; + } + public GetCredentialsResponse setGitProvider(String gitProvider) { this.gitProvider = gitProvider; return this; @@ -83,6 +103,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; GetCredentialsResponse that = (GetCredentialsResponse) o; return Objects.equals(credentialId, that.credentialId) + && Objects.equals(gitEmail, that.gitEmail) && Objects.equals(gitProvider, that.gitProvider) && Objects.equals(gitUsername, that.gitUsername) && Objects.equals(isDefaultForProvider, that.isDefaultForProvider) @@ -91,13 +112,15 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(credentialId, gitProvider, gitUsername, isDefaultForProvider, name); + return Objects.hash( + credentialId, gitEmail, gitProvider, gitUsername, isDefaultForProvider, name); } @Override public String toString() { return new ToStringer(GetCredentialsResponse.class) .add("credentialId", credentialId) + .add("gitEmail", gitEmail) .add("gitProvider", gitProvider) .add("gitUsername", gitUsername) .add("isDefaultForProvider", isDefaultForProvider) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java index d47305878..278aa881a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java @@ -13,6 +13,15 @@ public class UpdateCredentialsRequest { /** The ID for the corresponding credential to access. */ @JsonIgnore private Long credentialId; + /** + * The authenticating email associated with your Git provider user account. Used for + * authentication with the remote repository and also sets the author & committer identity for + * commits. Required for most Git providers except AWS CodeCommit. Learn more at + * https://docs.databricks.com/aws/en/repos/get-access-tokens-from-git-provider + */ + @JsonProperty("git_email") + private String gitEmail; + /** * Git provider. This field is case-insensitive. The available Git providers are `gitHub`, * `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, @@ -22,12 +31,10 @@ public class UpdateCredentialsRequest { private String gitProvider; /** - * The username or email provided with your Git provider account, depending on which provider you - * are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or - * username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS - * CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers - * please see your provider's Personal Access Token authentication documentation to see what is - * supported. + * The username provided with your Git provider account and associated with the credential. For + * most Git providers it is only used to set the Git committer & author names for commits, however + * it may be required for authentication depending on your Git provider / token requirements. + * Required for AWS CodeCommit. */ @JsonProperty("git_username") private String gitUsername; @@ -58,6 +65,15 @@ public Long getCredentialId() { return credentialId; } + public UpdateCredentialsRequest setGitEmail(String gitEmail) { + this.gitEmail = gitEmail; + return this; + } + + public String getGitEmail() { + return gitEmail; + } + public UpdateCredentialsRequest setGitProvider(String gitProvider) { this.gitProvider = gitProvider; return this; @@ -109,6 +125,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; UpdateCredentialsRequest that = (UpdateCredentialsRequest) o; return Objects.equals(credentialId, that.credentialId) + && Objects.equals(gitEmail, that.gitEmail) && Objects.equals(gitProvider, that.gitProvider) && Objects.equals(gitUsername, that.gitUsername) && Objects.equals(isDefaultForProvider, that.isDefaultForProvider) @@ -119,13 +136,20 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - credentialId, gitProvider, gitUsername, isDefaultForProvider, name, personalAccessToken); + credentialId, + gitEmail, + gitProvider, + gitUsername, + isDefaultForProvider, + name, + personalAccessToken); } @Override public String toString() { return new ToStringer(UpdateCredentialsRequest.class) .add("credentialId", credentialId) + .add("gitEmail", gitEmail) .add("gitProvider", gitProvider) .add("gitUsername", gitUsername) .add("isDefaultForProvider", isDefaultForProvider) diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/LroTestingAPITest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/LroTestingAPITest.java new file mode 100755 index 000000000..5ba5e2732 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/LroTestingAPITest.java @@ -0,0 +1,491 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.gentesting.unittests; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.service.common.lro.LroOptions; +import com.databricks.sdk.service.lrotesting.*; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; +import java.util.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class LroTestingAPITest { + @Mock private ApiClient mockApiClient; + private ObjectMapper objectMapper; + + static class HTTPFixture { + String method; + String resource; + Operation response; + + HTTPFixture(String method, String resource, Operation response) { + this.method = method; + this.resource = resource; + this.response = response; + } + } + + static class WaitTestCase { + String name; + List fixtures; + TestResource wantResult; + boolean wantErr; + + WaitTestCase( + String name, List fixtures, TestResource wantResult, boolean wantErr) { + this.name = name; + this.fixtures = fixtures; + this.wantResult = wantResult; + this.wantErr = wantErr; + } + } + + static class CancelTestCase { + String name; + List fixtures; + boolean wantErr; + + CancelTestCase(String name, List fixtures, boolean wantErr) { + this.name = name; + this.fixtures = fixtures; + this.wantErr = wantErr; + } + } + + static class NameTestCase { + String name; + List fixtures; + String wantName; + + NameTestCase(String name, List fixtures, String wantName) { + this.name = name; + this.fixtures = fixtures; + this.wantName = wantName; + } + } + + static class MetadataTestCase { + String name; + List fixtures; + TestResourceOperationMetadata wantMetadata; + boolean wantErr; + + MetadataTestCase( + String name, + List fixtures, + TestResourceOperationMetadata wantMetadata, + boolean wantErr) { + this.name = name; + this.fixtures = fixtures; + this.wantMetadata = wantMetadata; + this.wantErr = wantErr; + } + } + + static class DoneTestCase { + String name; + List fixtures; + boolean wantDone; + boolean wantErr; + + DoneTestCase(String name, List fixtures, boolean wantDone, boolean wantErr) { + this.name = name; + this.fixtures = fixtures; + this.wantDone = wantDone; + this.wantErr = wantErr; + } + } + + @BeforeEach + void setUp() { + objectMapper = new ObjectMapper(); + } + + private void applyFixtures(List fixtures) throws Exception { + // Create a custom Answer that validates requests and returns responses in sequence. + final int[] callCount = {0}; + when(mockApiClient.execute(any(Request.class), eq(Operation.class))) + .thenAnswer( + invocation -> { + Request request = invocation.getArgument(0); + if (callCount[0] >= fixtures.size()) { + throw new RuntimeException("More API calls than expected fixtures"); + } + HTTPFixture expectedFixture = fixtures.get(callCount[0]); + if (!expectedFixture.method.equals(request.getMethod())) { + throw new AssertionError( + String.format( + "Call %d: Expected method %s but got %s", + callCount[0], expectedFixture.method, request.getMethod())); + } + String expectedPath = expectedFixture.resource; + if (!request.getUrl().equals(expectedPath)) { + throw new AssertionError( + String.format( + "Call %d: Expected exact URL %s but got %s", + callCount[0], expectedPath, request.getUrl())); + } + Operation response = expectedFixture.response; + callCount[0]++; + return response; + }); + } + + static List waitTestCases() throws JsonProcessingException, JsonMappingException { + return Arrays.asList( + new WaitTestCase( + "Success", + Arrays.asList( + new HTTPFixture( + "POST", + "/api/2.0/lro-testing/resources", + new Operation() + .setDone(false) + .setMetadata( + new ObjectMapper() + .readValue( + "{\n" + + " \"resource_id\": \"test-resource-123\",\n" + + " \"progress_percent\": 5\n" + + "}", + Object.class)) + .setName("operations/test-resource-create-12345")), + new HTTPFixture( + "GET", + "/api/2.0/lro-testing/operations/operations/test-resource-create-12345", + new Operation() + .setDone(false) + .setMetadata( + new ObjectMapper() + .readValue( + "{\n" + + " \"resource_id\": \"test-resource-123\",\n" + + " \"progress_percent\": 75\n" + + "}", + Object.class)) + .setName("operations/test-resource-create-12345")), + new HTTPFixture( + "GET", + "/api/2.0/lro-testing/operations/operations/test-resource-create-12345", + new Operation() + .setDone(true) + .setMetadata( + new ObjectMapper() + .readValue( + "{\n" + + " \"resource_id\": \"test-resource-123\",\n" + + " \"progress_percent\": 100\n" + + "}", + Object.class)) + .setName("operations/test-resource-create-12345") + .setResponse( + new ObjectMapper() + .readValue( + "{\n" + + " \"id\": \"test-resource-123\",\n" + + " \"name\": \"test-resource\"\n" + + "}", + Object.class)))), + new TestResource().setId("test-resource-123").setName("test-resource"), + false), + new WaitTestCase( + "Error", + Arrays.asList( + new HTTPFixture( + "POST", + "/api/2.0/lro-testing/resources", + new Operation() + .setDone(false) + .setMetadata( + new ObjectMapper() + .readValue( + "{\n" + + " \"resource_id\": \"test-resource-123\",\n" + + " \"progress_percent\": 5\n" + + "}", + Object.class)) + .setName("operations/test-resource-create-12345")), + new HTTPFixture( + "GET", + "/api/2.0/lro-testing/operations/operations/test-resource-create-12345", + new Operation() + .setDone(true) + .setError( + new DatabricksServiceExceptionWithDetailsProto() + .setErrorCode(ErrorCode.INTERNAL_ERROR) + .setMessage("Test error message")) + .setName("operations/test-resource-create-12345"))), + null, + true)); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("waitTestCases") + void testLROCreateTestResourceWait(WaitTestCase testCase) throws Exception { + // Reset mock and apply fixtures. + reset(mockApiClient); + applyFixtures(testCase.fixtures); + // Create API and proper request. + LroTestingAPI api = new LroTestingAPI(mockApiClient); + CreateTestResourceOperation operation = + api.createTestResource(new CreateTestResourceRequest().setResource(new TestResource())); + if (testCase.wantErr) { + assertThrows( + Exception.class, + () -> + operation.waitForCompletion( + Optional.of(LroOptions.withTimeout(Duration.ofMinutes(1)))), + "Test case: " + testCase.name); + } else { + TestResource result = + operation.waitForCompletion(Optional.of(LroOptions.withTimeout(Duration.ofMinutes(1)))); + assertEquals(testCase.wantResult, result, "Test case: " + testCase.name); + } + } + + static List cancelTestCases() + throws JsonProcessingException, JsonMappingException { + return Arrays.asList( + new CancelTestCase( + "Success", + Arrays.asList( + new HTTPFixture( + "POST", + "/api/2.0/lro-testing/resources", + new Operation() + .setDone(false) + .setMetadata( + new ObjectMapper() + .readValue( + "{\n" + + " \"resource_id\": \"test-resource-123\",\n" + + " \"progress_percent\": 5\n" + + "}", + Object.class)) + .setName("operations/test-resource-create-12345")), + new HTTPFixture( + "POST", + "/api/2.0/lro-testing/operations/operations/test-resource-create-12345/cancel", + new Operation() + .setDone(true) + .setName("operations/test-resource-create-12345"))), + false)); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("cancelTestCases") + void testLROCancelTestResourceCancel(CancelTestCase testCase) throws Exception { + // Reset mock and apply fixtures. + reset(mockApiClient); + applyFixtures(testCase.fixtures); + // Create API and execute test. + LroTestingAPI api = new LroTestingAPI(mockApiClient); + CreateTestResourceOperation operation = + api.createTestResource(new CreateTestResourceRequest().setResource(new TestResource())); + if (testCase.wantErr) { + assertThrows( + Exception.class, + () -> operation.cancel(), + "Cancel should have failed for test case: " + testCase.name); + } else { + assertDoesNotThrow(() -> operation.cancel(), "Cancel failed for test case: " + testCase.name); + } + } + + static List nameTestCases() throws JsonProcessingException, JsonMappingException { + return Arrays.asList( + new NameTestCase( + "Success", + Arrays.asList( + new HTTPFixture( + "POST", + "/api/2.0/lro-testing/resources", + new Operation() + .setDone(false) + .setMetadata( + new ObjectMapper() + .readValue( + "{\n" + + " \"resource_id\": \"test-resource-123\",\n" + + " \"progress_percent\": 5\n" + + "}", + Object.class)) + .setName("operations/test-resource-create-12345"))), + "operations/test-resource-create-12345")); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("nameTestCases") + void testLROCreateTestResourceName(NameTestCase testCase) throws Exception { + // Reset mock and apply fixtures. + reset(mockApiClient); + applyFixtures(testCase.fixtures); + // Create API and execute test. + LroTestingAPI api = new LroTestingAPI(mockApiClient); + CreateTestResourceOperation operation = + api.createTestResource(new CreateTestResourceRequest().setResource(new TestResource())); + String name = operation.getName(); + assertEquals(testCase.wantName, name, "Name mismatch for test case: " + testCase.name); + } + + static List metadataTestCases() + throws JsonProcessingException, JsonMappingException { + return Arrays.asList( + new MetadataTestCase( + "Success", + Arrays.asList( + new HTTPFixture( + "POST", + "/api/2.0/lro-testing/resources", + new Operation() + .setDone(false) + .setMetadata( + new ObjectMapper() + .readValue( + "{\n" + + " \"resource_id\": \"test-resource-123\",\n" + + " \"progress_percent\": 5\n" + + "}", + Object.class)) + .setName("operations/test-resource-create-12345"))), + new TestResourceOperationMetadata() + .setProgressPercent(5L) + .setResourceId("test-resource-123"), + false)); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("metadataTestCases") + void testLROCreateTestResourceMetadata(MetadataTestCase testCase) throws Exception { + // Reset mock and apply fixtures. + reset(mockApiClient); + applyFixtures(testCase.fixtures); + // Create API and execute test. + LroTestingAPI api = new LroTestingAPI(mockApiClient); + CreateTestResourceOperation operation = + api.createTestResource(new CreateTestResourceRequest().setResource(new TestResource())); + if (testCase.wantErr) { + assertThrows( + Exception.class, + () -> operation.getMetadata(), + "Metadata should have failed for test case: " + testCase.name); + } else { + TestResourceOperationMetadata metadata = operation.getMetadata(); + assertNotNull(metadata, "Metadata should not be null for test case: " + testCase.name); + assertEquals( + testCase.wantMetadata, metadata, "Metadata mismatch for test case: " + testCase.name); + } + } + // Done test cases. + static List doneTestCases() throws JsonProcessingException, JsonMappingException { + return Arrays.asList( + new DoneTestCase( + "True", + Arrays.asList( + new HTTPFixture( + "POST", + "/api/2.0/lro-testing/resources", + new Operation() + .setDone(false) + .setMetadata( + new ObjectMapper() + .readValue( + "{\n" + + " \"resource_id\": \"test-resource-123\",\n" + + " \"progress_percent\": 5\n" + + "}", + Object.class)) + .setName("operations/test-resource-create-12345")), + new HTTPFixture( + "GET", + "/api/2.0/lro-testing/operations/operations/test-resource-create-12345", + new Operation() + .setDone(true) + .setMetadata( + new ObjectMapper() + .readValue( + "{\n" + + " \"resource_id\": \"test-resource-123\",\n" + + " \"progress_percent\": 100\n" + + "}", + Object.class)) + .setName("operations/test-resource-create-12345") + .setResponse( + new ObjectMapper() + .readValue( + "{\n" + + " \"id\": \"test-resource-123\",\n" + + " \"name\": \"test-resource\"\n" + + "}", + Object.class)))), + true, + false), + new DoneTestCase( + "False", + Arrays.asList( + new HTTPFixture( + "POST", + "/api/2.0/lro-testing/resources", + new Operation() + .setDone(false) + .setMetadata( + new ObjectMapper() + .readValue( + "{\n" + + " \"resource_id\": \"test-resource-123\",\n" + + " \"progress_percent\": 5\n" + + "}", + Object.class)) + .setName("operations/test-resource-create-12345")), + new HTTPFixture( + "GET", + "/api/2.0/lro-testing/operations/operations/test-resource-create-12345", + new Operation() + .setDone(false) + .setMetadata( + new ObjectMapper() + .readValue( + "{\n" + + " \"resource_id\": \"test-resource-123\",\n" + + " \"progress_percent\": 75\n" + + "}", + Object.class)) + .setName("operations/test-resource-create-12345"))), + false, + false)); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("doneTestCases") + void testLROCreateTestResourceDone(DoneTestCase testCase) throws Exception { + // Reset mock and apply fixtures. + reset(mockApiClient); + applyFixtures(testCase.fixtures); + // Create API and execute test. + LroTestingAPI api = new LroTestingAPI(mockApiClient); + CreateTestResourceOperation operation = + api.createTestResource(new CreateTestResourceRequest().setResource(new TestResource())); + if (testCase.wantErr) { + assertThrows( + Exception.class, + () -> operation.isDone(), + "Done should have failed for test case: " + testCase.name); + } else { + boolean done = operation.isDone(); + assertEquals(testCase.wantDone, done, "Done mismatch for test case: " + testCase.name); + } + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/ComplexQueryParam.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/ComplexQueryParam.java new file mode 100755 index 000000000..238de5ed6 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/ComplexQueryParam.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.httpcallv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ComplexQueryParam { + /** */ + @JsonProperty("nested_optional_query_param") + @QueryParam("nested_optional_query_param") + private String nestedOptionalQueryParam; + + /** */ + @JsonProperty("nested_repeated_query_param") + @QueryParam("nested_repeated_query_param") + private Collection nestedRepeatedQueryParam; + + public ComplexQueryParam setNestedOptionalQueryParam(String nestedOptionalQueryParam) { + this.nestedOptionalQueryParam = nestedOptionalQueryParam; + return this; + } + + public String getNestedOptionalQueryParam() { + return nestedOptionalQueryParam; + } + + public ComplexQueryParam setNestedRepeatedQueryParam( + Collection nestedRepeatedQueryParam) { + this.nestedRepeatedQueryParam = nestedRepeatedQueryParam; + return this; + } + + public Collection getNestedRepeatedQueryParam() { + return nestedRepeatedQueryParam; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ComplexQueryParam that = (ComplexQueryParam) o; + return Objects.equals(nestedOptionalQueryParam, that.nestedOptionalQueryParam) + && Objects.equals(nestedRepeatedQueryParam, that.nestedRepeatedQueryParam); + } + + @Override + public int hashCode() { + return Objects.hash(nestedOptionalQueryParam, nestedRepeatedQueryParam); + } + + @Override + public String toString() { + return new ToStringer(ComplexQueryParam.class) + .add("nestedOptionalQueryParam", nestedOptionalQueryParam) + .add("nestedRepeatedQueryParam", nestedRepeatedQueryParam) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/CreateResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/CreateResourceRequest.java new file mode 100755 index 000000000..336931156 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/CreateResourceRequest.java @@ -0,0 +1,88 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.httpcallv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** This mimics "old" style post requests which have the resource inlined. */ +@Generated +public class CreateResourceRequest { + /** Body element */ + @JsonProperty("body_field") + private String bodyField; + + /** */ + @JsonIgnore private Boolean pathParamBool; + + /** */ + @JsonIgnore private Long pathParamInt; + + /** */ + @JsonIgnore private String pathParamString; + + public CreateResourceRequest setBodyField(String bodyField) { + this.bodyField = bodyField; + return this; + } + + public String getBodyField() { + return bodyField; + } + + public CreateResourceRequest setPathParamBool(Boolean pathParamBool) { + this.pathParamBool = pathParamBool; + return this; + } + + public Boolean getPathParamBool() { + return pathParamBool; + } + + public CreateResourceRequest setPathParamInt(Long pathParamInt) { + this.pathParamInt = pathParamInt; + return this; + } + + public Long getPathParamInt() { + return pathParamInt; + } + + public CreateResourceRequest setPathParamString(String pathParamString) { + this.pathParamString = pathParamString; + return this; + } + + public String getPathParamString() { + return pathParamString; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateResourceRequest that = (CreateResourceRequest) o; + return Objects.equals(bodyField, that.bodyField) + && Objects.equals(pathParamBool, that.pathParamBool) + && Objects.equals(pathParamInt, that.pathParamInt) + && Objects.equals(pathParamString, that.pathParamString); + } + + @Override + public int hashCode() { + return Objects.hash(bodyField, pathParamBool, pathParamInt, pathParamString); + } + + @Override + public String toString() { + return new ToStringer(CreateResourceRequest.class) + .add("bodyField", bodyField) + .add("pathParamBool", pathParamBool) + .add("pathParamInt", pathParamInt) + .add("pathParamString", pathParamString) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java new file mode 100755 index 000000000..ac2e90ec1 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java @@ -0,0 +1,203 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.httpcallv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class GetResourceRequest { + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + */ + @JsonIgnore + @QueryParam("field_mask") + private String fieldMask; + + /** */ + @JsonIgnore + @QueryParam("optional_complex_query_param") + private ComplexQueryParam optionalComplexQueryParam; + + /** */ + @JsonIgnore private Boolean pathParamBool; + + /** */ + @JsonIgnore private Long pathParamInt; + + /** */ + @JsonIgnore private String pathParamString; + + /** */ + @JsonIgnore + @QueryParam("query_param_bool") + private Boolean queryParamBool; + + /** */ + @JsonIgnore + @QueryParam("query_param_int") + private Long queryParamInt; + + /** */ + @JsonIgnore + @QueryParam("query_param_string") + private String queryParamString; + + /** */ + @JsonIgnore + @QueryParam("repeated_complex_query_param") + private Collection repeatedComplexQueryParam; + + /** */ + @JsonIgnore + @QueryParam("repeated_query_param") + private Collection repeatedQueryParam; + + public GetResourceRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public GetResourceRequest setOptionalComplexQueryParam( + ComplexQueryParam optionalComplexQueryParam) { + this.optionalComplexQueryParam = optionalComplexQueryParam; + return this; + } + + public ComplexQueryParam getOptionalComplexQueryParam() { + return optionalComplexQueryParam; + } + + public GetResourceRequest setPathParamBool(Boolean pathParamBool) { + this.pathParamBool = pathParamBool; + return this; + } + + public Boolean getPathParamBool() { + return pathParamBool; + } + + public GetResourceRequest setPathParamInt(Long pathParamInt) { + this.pathParamInt = pathParamInt; + return this; + } + + public Long getPathParamInt() { + return pathParamInt; + } + + public GetResourceRequest setPathParamString(String pathParamString) { + this.pathParamString = pathParamString; + return this; + } + + public String getPathParamString() { + return pathParamString; + } + + public GetResourceRequest setQueryParamBool(Boolean queryParamBool) { + this.queryParamBool = queryParamBool; + return this; + } + + public Boolean getQueryParamBool() { + return queryParamBool; + } + + public GetResourceRequest setQueryParamInt(Long queryParamInt) { + this.queryParamInt = queryParamInt; + return this; + } + + public Long getQueryParamInt() { + return queryParamInt; + } + + public GetResourceRequest setQueryParamString(String queryParamString) { + this.queryParamString = queryParamString; + return this; + } + + public String getQueryParamString() { + return queryParamString; + } + + public GetResourceRequest setRepeatedComplexQueryParam( + Collection repeatedComplexQueryParam) { + this.repeatedComplexQueryParam = repeatedComplexQueryParam; + return this; + } + + public Collection getRepeatedComplexQueryParam() { + return repeatedComplexQueryParam; + } + + public GetResourceRequest setRepeatedQueryParam(Collection repeatedQueryParam) { + this.repeatedQueryParam = repeatedQueryParam; + return this; + } + + public Collection getRepeatedQueryParam() { + return repeatedQueryParam; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetResourceRequest that = (GetResourceRequest) o; + return Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(optionalComplexQueryParam, that.optionalComplexQueryParam) + && Objects.equals(pathParamBool, that.pathParamBool) + && Objects.equals(pathParamInt, that.pathParamInt) + && Objects.equals(pathParamString, that.pathParamString) + && Objects.equals(queryParamBool, that.queryParamBool) + && Objects.equals(queryParamInt, that.queryParamInt) + && Objects.equals(queryParamString, that.queryParamString) + && Objects.equals(repeatedComplexQueryParam, that.repeatedComplexQueryParam) + && Objects.equals(repeatedQueryParam, that.repeatedQueryParam); + } + + @Override + public int hashCode() { + return Objects.hash( + fieldMask, + optionalComplexQueryParam, + pathParamBool, + pathParamInt, + pathParamString, + queryParamBool, + queryParamInt, + queryParamString, + repeatedComplexQueryParam, + repeatedQueryParam); + } + + @Override + public String toString() { + return new ToStringer(GetResourceRequest.class) + .add("fieldMask", fieldMask) + .add("optionalComplexQueryParam", optionalComplexQueryParam) + .add("pathParamBool", pathParamBool) + .add("pathParamInt", pathParamInt) + .add("pathParamString", pathParamString) + .add("queryParamBool", queryParamBool) + .add("queryParamInt", queryParamInt) + .add("queryParamString", queryParamString) + .add("repeatedComplexQueryParam", repeatedComplexQueryParam) + .add("repeatedQueryParam", repeatedQueryParam) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java new file mode 100755 index 000000000..8a7345c5d --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java @@ -0,0 +1,51 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.httpcallv2; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Lorem Ipsum */ +@Generated +public class HttpCallV2API { + private static final Logger LOG = LoggerFactory.getLogger(HttpCallV2API.class); + + private final HttpCallV2Service impl; + + /** Regular-use constructor */ + public HttpCallV2API(ApiClient apiClient) { + impl = new HttpCallV2Impl(apiClient); + } + + /** Constructor for mocks */ + public HttpCallV2API(HttpCallV2Service mock) { + impl = mock; + } + + /** This mimics "old" style post requests which have the resource inlined. */ + public Resource createResource(CreateResourceRequest request) { + return impl.createResource(request); + } + + public Resource getResource(String pathParamString, long pathParamInt, boolean pathParamBool) { + return getResource( + new GetResourceRequest() + .setPathParamString(pathParamString) + .setPathParamInt(pathParamInt) + .setPathParamBool(pathParamBool)); + } + + public Resource getResource(GetResourceRequest request) { + return impl.getResource(request); + } + + /** This mimics "new" style post requests which have a body field. */ + public Resource updateResource(UpdateResourceRequest request) { + return impl.updateResource(request); + } + + public HttpCallV2Service impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Impl.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Impl.java new file mode 100755 index 000000000..fd0d11228 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Impl.java @@ -0,0 +1,70 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.httpcallv2; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of HttpCallV2 */ +@Generated +class HttpCallV2Impl implements HttpCallV2Service { + private final ApiClient apiClient; + + public HttpCallV2Impl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public Resource createResource(CreateResourceRequest request) { + String path = + String.format( + "/api/2.0/http-call/%s/%s/%s", + request.getPathParamString(), request.getPathParamInt(), request.getPathParamBool()); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Resource.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Resource getResource(GetResourceRequest request) { + String path = + String.format( + "/api/2.0/http-call/%s/%s/%s", + request.getPathParamString(), request.getPathParamInt(), request.getPathParamBool()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Resource.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Resource updateResource(UpdateResourceRequest request) { + String path = + String.format( + "/api/2.0/http-call/%s/%s/%s", + request.getNestedPathParamString(), + request.getNestedPathParamInt(), + request.getNestedPathParamBool()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getResource())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Resource.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Service.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Service.java new file mode 100755 index 000000000..9ae378e4b --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Service.java @@ -0,0 +1,22 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.httpcallv2; + +import com.databricks.sdk.support.Generated; + +/** + * Lorem Ipsum + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface HttpCallV2Service { + /** This mimics "old" style post requests which have the resource inlined. */ + Resource createResource(CreateResourceRequest createResourceRequest); + + Resource getResource(GetResourceRequest getResourceRequest); + + /** This mimics "new" style post requests which have a body field. */ + Resource updateResource(UpdateResourceRequest updateResourceRequest); +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/Resource.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/Resource.java new file mode 100755 index 000000000..94779c31b --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/Resource.java @@ -0,0 +1,105 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.httpcallv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class Resource { + /** */ + @JsonProperty("any_field") + private Object anyField; + + /** */ + @JsonProperty("body_field") + private String bodyField; + + /** */ + @JsonProperty("nested_path_param_bool") + private Boolean nestedPathParamBool; + + /** */ + @JsonProperty("nested_path_param_int") + private Long nestedPathParamInt; + + /** */ + @JsonProperty("nested_path_param_string") + private String nestedPathParamString; + + public Resource setAnyField(Object anyField) { + this.anyField = anyField; + return this; + } + + public Object getAnyField() { + return anyField; + } + + public Resource setBodyField(String bodyField) { + this.bodyField = bodyField; + return this; + } + + public String getBodyField() { + return bodyField; + } + + public Resource setNestedPathParamBool(Boolean nestedPathParamBool) { + this.nestedPathParamBool = nestedPathParamBool; + return this; + } + + public Boolean getNestedPathParamBool() { + return nestedPathParamBool; + } + + public Resource setNestedPathParamInt(Long nestedPathParamInt) { + this.nestedPathParamInt = nestedPathParamInt; + return this; + } + + public Long getNestedPathParamInt() { + return nestedPathParamInt; + } + + public Resource setNestedPathParamString(String nestedPathParamString) { + this.nestedPathParamString = nestedPathParamString; + return this; + } + + public String getNestedPathParamString() { + return nestedPathParamString; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Resource that = (Resource) o; + return Objects.equals(anyField, that.anyField) + && Objects.equals(bodyField, that.bodyField) + && Objects.equals(nestedPathParamBool, that.nestedPathParamBool) + && Objects.equals(nestedPathParamInt, that.nestedPathParamInt) + && Objects.equals(nestedPathParamString, that.nestedPathParamString); + } + + @Override + public int hashCode() { + return Objects.hash( + anyField, bodyField, nestedPathParamBool, nestedPathParamInt, nestedPathParamString); + } + + @Override + public String toString() { + return new ToStringer(Resource.class) + .add("anyField", anyField) + .add("bodyField", bodyField) + .add("nestedPathParamBool", nestedPathParamBool) + .add("nestedPathParamInt", nestedPathParamInt) + .add("nestedPathParamString", nestedPathParamString) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java new file mode 100755 index 000000000..96ccd80c3 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java @@ -0,0 +1,220 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.httpcallv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class UpdateResourceRequest { + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + */ + @JsonIgnore + @QueryParam("field_mask") + private String fieldMask; + + /** */ + @JsonIgnore private Boolean nestedPathParamBool; + + /** */ + @JsonIgnore private Long nestedPathParamInt; + + /** */ + @JsonIgnore private String nestedPathParamString; + + /** */ + @JsonIgnore + @QueryParam("optional_complex_query_param") + private ComplexQueryParam optionalComplexQueryParam; + + /** */ + @JsonIgnore + @QueryParam("query_param_bool") + private Boolean queryParamBool; + + /** */ + @JsonIgnore + @QueryParam("query_param_int") + private Long queryParamInt; + + /** */ + @JsonIgnore + @QueryParam("query_param_string") + private String queryParamString; + + /** */ + @JsonIgnore + @QueryParam("repeated_complex_query_param") + private Collection repeatedComplexQueryParam; + + /** */ + @JsonIgnore + @QueryParam("repeated_query_param") + private Collection repeatedQueryParam; + + /** Body element */ + @JsonProperty("resource") + private Resource resource; + + public UpdateResourceRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateResourceRequest setNestedPathParamBool(Boolean nestedPathParamBool) { + this.nestedPathParamBool = nestedPathParamBool; + return this; + } + + public Boolean getNestedPathParamBool() { + return nestedPathParamBool; + } + + public UpdateResourceRequest setNestedPathParamInt(Long nestedPathParamInt) { + this.nestedPathParamInt = nestedPathParamInt; + return this; + } + + public Long getNestedPathParamInt() { + return nestedPathParamInt; + } + + public UpdateResourceRequest setNestedPathParamString(String nestedPathParamString) { + this.nestedPathParamString = nestedPathParamString; + return this; + } + + public String getNestedPathParamString() { + return nestedPathParamString; + } + + public UpdateResourceRequest setOptionalComplexQueryParam( + ComplexQueryParam optionalComplexQueryParam) { + this.optionalComplexQueryParam = optionalComplexQueryParam; + return this; + } + + public ComplexQueryParam getOptionalComplexQueryParam() { + return optionalComplexQueryParam; + } + + public UpdateResourceRequest setQueryParamBool(Boolean queryParamBool) { + this.queryParamBool = queryParamBool; + return this; + } + + public Boolean getQueryParamBool() { + return queryParamBool; + } + + public UpdateResourceRequest setQueryParamInt(Long queryParamInt) { + this.queryParamInt = queryParamInt; + return this; + } + + public Long getQueryParamInt() { + return queryParamInt; + } + + public UpdateResourceRequest setQueryParamString(String queryParamString) { + this.queryParamString = queryParamString; + return this; + } + + public String getQueryParamString() { + return queryParamString; + } + + public UpdateResourceRequest setRepeatedComplexQueryParam( + Collection repeatedComplexQueryParam) { + this.repeatedComplexQueryParam = repeatedComplexQueryParam; + return this; + } + + public Collection getRepeatedComplexQueryParam() { + return repeatedComplexQueryParam; + } + + public UpdateResourceRequest setRepeatedQueryParam(Collection repeatedQueryParam) { + this.repeatedQueryParam = repeatedQueryParam; + return this; + } + + public Collection getRepeatedQueryParam() { + return repeatedQueryParam; + } + + public UpdateResourceRequest setResource(Resource resource) { + this.resource = resource; + return this; + } + + public Resource getResource() { + return resource; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateResourceRequest that = (UpdateResourceRequest) o; + return Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(nestedPathParamBool, that.nestedPathParamBool) + && Objects.equals(nestedPathParamInt, that.nestedPathParamInt) + && Objects.equals(nestedPathParamString, that.nestedPathParamString) + && Objects.equals(optionalComplexQueryParam, that.optionalComplexQueryParam) + && Objects.equals(queryParamBool, that.queryParamBool) + && Objects.equals(queryParamInt, that.queryParamInt) + && Objects.equals(queryParamString, that.queryParamString) + && Objects.equals(repeatedComplexQueryParam, that.repeatedComplexQueryParam) + && Objects.equals(repeatedQueryParam, that.repeatedQueryParam) + && Objects.equals(resource, that.resource); + } + + @Override + public int hashCode() { + return Objects.hash( + fieldMask, + nestedPathParamBool, + nestedPathParamInt, + nestedPathParamString, + optionalComplexQueryParam, + queryParamBool, + queryParamInt, + queryParamString, + repeatedComplexQueryParam, + repeatedQueryParam, + resource); + } + + @Override + public String toString() { + return new ToStringer(UpdateResourceRequest.class) + .add("fieldMask", fieldMask) + .add("nestedPathParamBool", nestedPathParamBool) + .add("nestedPathParamInt", nestedPathParamInt) + .add("nestedPathParamString", nestedPathParamString) + .add("optionalComplexQueryParam", optionalComplexQueryParam) + .add("queryParamBool", queryParamBool) + .add("queryParamInt", queryParamInt) + .add("queryParamString", queryParamString) + .add("repeatedComplexQueryParam", repeatedComplexQueryParam) + .add("repeatedQueryParam", repeatedQueryParam) + .add("resource", resource) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/GetResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/GetResourceRequest.java new file mode 100755 index 000000000..07c08db6c --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/GetResourceRequest.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jsonmarshallv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetResourceRequest { + /** */ + @JsonIgnore private String name; + + /** Description. */ + @JsonIgnore + @QueryParam("resource") + private Resource resource; + + public GetResourceRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GetResourceRequest setResource(Resource resource) { + this.resource = resource; + return this; + } + + public Resource getResource() { + return resource; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetResourceRequest that = (GetResourceRequest) o; + return Objects.equals(name, that.name) && Objects.equals(resource, that.resource); + } + + @Override + public int hashCode() { + return Objects.hash(name, resource); + } + + @Override + public String toString() { + return new ToStringer(GetResourceRequest.class) + .add("name", name) + .add("resource", resource) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java new file mode 100755 index 000000000..d84b553cb --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java @@ -0,0 +1,37 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.jsonmarshallv2; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Lorem Ipsum */ +@Generated +public class JsonMarshallV2API { + private static final Logger LOG = LoggerFactory.getLogger(JsonMarshallV2API.class); + + private final JsonMarshallV2Service impl; + + /** Regular-use constructor */ + public JsonMarshallV2API(ApiClient apiClient) { + impl = new JsonMarshallV2Impl(apiClient); + } + + /** Constructor for mocks */ + public JsonMarshallV2API(JsonMarshallV2Service mock) { + impl = mock; + } + + public Resource getResource(String name, Resource resource) { + return getResource(new GetResourceRequest().setName(name).setResource(resource)); + } + + public Resource getResource(GetResourceRequest request) { + return impl.getResource(request); + } + + public JsonMarshallV2Service impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Impl.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Impl.java new file mode 100755 index 000000000..e7ad2b273 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Impl.java @@ -0,0 +1,31 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.jsonmarshallv2; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of JsonMarshallV2 */ +@Generated +class JsonMarshallV2Impl implements JsonMarshallV2Service { + private final ApiClient apiClient; + + public JsonMarshallV2Impl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public Resource getResource(GetResourceRequest request) { + String path = String.format("/api/2.0/json-marshall/%s", request.getName()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Resource.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Service.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Service.java new file mode 100755 index 000000000..a88cba0f0 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Service.java @@ -0,0 +1,17 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.jsonmarshallv2; + +import com.databricks.sdk.support.Generated; + +/** + * Lorem Ipsum + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface JsonMarshallV2Service { + + Resource getResource(GetResourceRequest getResourceRequest); +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java new file mode 100755 index 000000000..d045ceb25 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jsonmarshallv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class NestedMessage { + /** */ + @JsonProperty("optional_duration") + @QueryParam("optional_duration") + private String optionalDuration; + + /** */ + @JsonProperty("optional_string") + @QueryParam("optional_string") + private String optionalString; + + /** */ + @JsonProperty("optional_timestamp") + @QueryParam("optional_timestamp") + private String optionalTimestamp; + + public NestedMessage setOptionalDuration(String optionalDuration) { + this.optionalDuration = optionalDuration; + return this; + } + + public String getOptionalDuration() { + return optionalDuration; + } + + public NestedMessage setOptionalString(String optionalString) { + this.optionalString = optionalString; + return this; + } + + public String getOptionalString() { + return optionalString; + } + + public NestedMessage setOptionalTimestamp(String optionalTimestamp) { + this.optionalTimestamp = optionalTimestamp; + return this; + } + + public String getOptionalTimestamp() { + return optionalTimestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NestedMessage that = (NestedMessage) o; + return Objects.equals(optionalDuration, that.optionalDuration) + && Objects.equals(optionalString, that.optionalString) + && Objects.equals(optionalTimestamp, that.optionalTimestamp); + } + + @Override + public int hashCode() { + return Objects.hash(optionalDuration, optionalString, optionalTimestamp); + } + + @Override + public String toString() { + return new ToStringer(NestedMessage.class) + .add("optionalDuration", optionalDuration) + .add("optionalString", optionalString) + .add("optionalTimestamp", optionalTimestamp) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java new file mode 100755 index 000000000..19caee741 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java @@ -0,0 +1,316 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jsonmarshallv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +public class OptionalFields { + /** */ + @JsonProperty("duration") + @QueryParam("duration") + private String duration; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + */ + @JsonProperty("field_mask") + @QueryParam("field_mask") + private String fieldMask; + + /** Legacy Well Known types */ + @JsonProperty("legacy_duration") + @QueryParam("legacy_duration") + private String legacyDuration; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + */ + @JsonProperty("legacy_field_mask") + @QueryParam("legacy_field_mask") + private String legacyFieldMask; + + /** */ + @JsonProperty("legacy_timestamp") + @QueryParam("legacy_timestamp") + private String legacyTimestamp; + + /** */ + @JsonProperty("list_value") + @QueryParam("list_value") + private Collection listValue; + + /** Lint disable reason: This is a dummy field used to test SDK Generation logic. */ + @JsonProperty("map") + @QueryParam("map") + private Map mapValue; + + /** */ + @JsonProperty("optional_bool") + @QueryParam("optional_bool") + private Boolean optionalBool; + + /** */ + @JsonProperty("optional_int32") + @QueryParam("optional_int32") + private Long optionalInt32; + + /** */ + @JsonProperty("optional_int64") + @QueryParam("optional_int64") + private Long optionalInt64; + + /** */ + @JsonProperty("optional_message") + @QueryParam("optional_message") + private NestedMessage optionalMessage; + + /** */ + @JsonProperty("optional_string") + @QueryParam("optional_string") + private String optionalString; + + /** */ + @JsonProperty("struct") + @QueryParam("struct") + private Map structValue; + + /** */ + @JsonProperty("test_enum") + @QueryParam("test_enum") + private TestEnum testEnum; + + /** */ + @JsonProperty("timestamp") + @QueryParam("timestamp") + private String timestamp; + + /** */ + @JsonProperty("value") + @QueryParam("value") + private Object /* MISSING TYPE */ value; + + public OptionalFields setDuration(String duration) { + this.duration = duration; + return this; + } + + public String getDuration() { + return duration; + } + + public OptionalFields setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public OptionalFields setLegacyDuration(String legacyDuration) { + this.legacyDuration = legacyDuration; + return this; + } + + public String getLegacyDuration() { + return legacyDuration; + } + + public OptionalFields setLegacyFieldMask(String legacyFieldMask) { + this.legacyFieldMask = legacyFieldMask; + return this; + } + + public String getLegacyFieldMask() { + return legacyFieldMask; + } + + public OptionalFields setLegacyTimestamp(String legacyTimestamp) { + this.legacyTimestamp = legacyTimestamp; + return this; + } + + public String getLegacyTimestamp() { + return legacyTimestamp; + } + + public OptionalFields setListValue(Collection listValue) { + this.listValue = listValue; + return this; + } + + public Collection getListValue() { + return listValue; + } + + public OptionalFields setMap(Map mapValue) { + this.mapValue = mapValue; + return this; + } + + public Map getMap() { + return mapValue; + } + + public OptionalFields setOptionalBool(Boolean optionalBool) { + this.optionalBool = optionalBool; + return this; + } + + public Boolean getOptionalBool() { + return optionalBool; + } + + public OptionalFields setOptionalInt32(Long optionalInt32) { + this.optionalInt32 = optionalInt32; + return this; + } + + public Long getOptionalInt32() { + return optionalInt32; + } + + public OptionalFields setOptionalInt64(Long optionalInt64) { + this.optionalInt64 = optionalInt64; + return this; + } + + public Long getOptionalInt64() { + return optionalInt64; + } + + public OptionalFields setOptionalMessage(NestedMessage optionalMessage) { + this.optionalMessage = optionalMessage; + return this; + } + + public NestedMessage getOptionalMessage() { + return optionalMessage; + } + + public OptionalFields setOptionalString(String optionalString) { + this.optionalString = optionalString; + return this; + } + + public String getOptionalString() { + return optionalString; + } + + public OptionalFields setStruct(Map structValue) { + this.structValue = structValue; + return this; + } + + public Map getStruct() { + return structValue; + } + + public OptionalFields setTestEnum(TestEnum testEnum) { + this.testEnum = testEnum; + return this; + } + + public TestEnum getTestEnum() { + return testEnum; + } + + public OptionalFields setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + public OptionalFields setValue(Object /* MISSING TYPE */ value) { + this.value = value; + return this; + } + + public Object /* MISSING TYPE */ getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OptionalFields that = (OptionalFields) o; + return Objects.equals(duration, that.duration) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(legacyDuration, that.legacyDuration) + && Objects.equals(legacyFieldMask, that.legacyFieldMask) + && Objects.equals(legacyTimestamp, that.legacyTimestamp) + && Objects.equals(listValue, that.listValue) + && Objects.equals(mapValue, that.mapValue) + && Objects.equals(optionalBool, that.optionalBool) + && Objects.equals(optionalInt32, that.optionalInt32) + && Objects.equals(optionalInt64, that.optionalInt64) + && Objects.equals(optionalMessage, that.optionalMessage) + && Objects.equals(optionalString, that.optionalString) + && Objects.equals(structValue, that.structValue) + && Objects.equals(testEnum, that.testEnum) + && Objects.equals(timestamp, that.timestamp) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash( + duration, + fieldMask, + legacyDuration, + legacyFieldMask, + legacyTimestamp, + listValue, + mapValue, + optionalBool, + optionalInt32, + optionalInt64, + optionalMessage, + optionalString, + structValue, + testEnum, + timestamp, + value); + } + + @Override + public String toString() { + return new ToStringer(OptionalFields.class) + .add("duration", duration) + .add("fieldMask", fieldMask) + .add("legacyDuration", legacyDuration) + .add("legacyFieldMask", legacyFieldMask) + .add("legacyTimestamp", legacyTimestamp) + .add("listValue", listValue) + .add("mapValue", mapValue) + .add("optionalBool", optionalBool) + .add("optionalInt32", optionalInt32) + .add("optionalInt64", optionalInt64) + .add("optionalMessage", optionalMessage) + .add("optionalString", optionalString) + .add("structValue", structValue) + .add("testEnum", testEnum) + .add("timestamp", timestamp) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java new file mode 100755 index 000000000..4595b4a73 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java @@ -0,0 +1,238 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jsonmarshallv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +public class RepeatedFields { + /** */ + @JsonProperty("repeated_bool") + @QueryParam("repeated_bool") + private Collection repeatedBool; + + /** */ + @JsonProperty("repeated_duration") + @QueryParam("repeated_duration") + private Collection repeatedDuration; + + /** */ + @JsonProperty("repeated_field_mask") + @QueryParam("repeated_field_mask") + private Collection repeatedFieldMask; + + /** */ + @JsonProperty("repeated_int32") + @QueryParam("repeated_int32") + private Collection repeatedInt32; + + /** */ + @JsonProperty("repeated_int64") + @QueryParam("repeated_int64") + private Collection repeatedInt64; + + /** */ + @JsonProperty("repeated_list_value") + @QueryParam("repeated_list_value") + private Collection> repeatedListValue; + + /** */ + @JsonProperty("repeated_message") + @QueryParam("repeated_message") + private Collection repeatedMessage; + + /** */ + @JsonProperty("repeated_string") + @QueryParam("repeated_string") + private Collection repeatedString; + + /** */ + @JsonProperty("repeated_struct") + @QueryParam("repeated_struct") + private Collection> repeatedStruct; + + /** */ + @JsonProperty("repeated_timestamp") + @QueryParam("repeated_timestamp") + private Collection repeatedTimestamp; + + /** */ + @JsonProperty("repeated_value") + @QueryParam("repeated_value") + private Collection repeatedValue; + + /** */ + @JsonProperty("test_repeated_enum") + @QueryParam("test_repeated_enum") + private Collection testRepeatedEnum; + + public RepeatedFields setRepeatedBool(Collection repeatedBool) { + this.repeatedBool = repeatedBool; + return this; + } + + public Collection getRepeatedBool() { + return repeatedBool; + } + + public RepeatedFields setRepeatedDuration(Collection repeatedDuration) { + this.repeatedDuration = repeatedDuration; + return this; + } + + public Collection getRepeatedDuration() { + return repeatedDuration; + } + + public RepeatedFields setRepeatedFieldMask(Collection repeatedFieldMask) { + this.repeatedFieldMask = repeatedFieldMask; + return this; + } + + public Collection getRepeatedFieldMask() { + return repeatedFieldMask; + } + + public RepeatedFields setRepeatedInt32(Collection repeatedInt32) { + this.repeatedInt32 = repeatedInt32; + return this; + } + + public Collection getRepeatedInt32() { + return repeatedInt32; + } + + public RepeatedFields setRepeatedInt64(Collection repeatedInt64) { + this.repeatedInt64 = repeatedInt64; + return this; + } + + public Collection getRepeatedInt64() { + return repeatedInt64; + } + + public RepeatedFields setRepeatedListValue( + Collection> repeatedListValue) { + this.repeatedListValue = repeatedListValue; + return this; + } + + public Collection> getRepeatedListValue() { + return repeatedListValue; + } + + public RepeatedFields setRepeatedMessage(Collection repeatedMessage) { + this.repeatedMessage = repeatedMessage; + return this; + } + + public Collection getRepeatedMessage() { + return repeatedMessage; + } + + public RepeatedFields setRepeatedString(Collection repeatedString) { + this.repeatedString = repeatedString; + return this; + } + + public Collection getRepeatedString() { + return repeatedString; + } + + public RepeatedFields setRepeatedStruct( + Collection> repeatedStruct) { + this.repeatedStruct = repeatedStruct; + return this; + } + + public Collection> getRepeatedStruct() { + return repeatedStruct; + } + + public RepeatedFields setRepeatedTimestamp(Collection repeatedTimestamp) { + this.repeatedTimestamp = repeatedTimestamp; + return this; + } + + public Collection getRepeatedTimestamp() { + return repeatedTimestamp; + } + + public RepeatedFields setRepeatedValue(Collection repeatedValue) { + this.repeatedValue = repeatedValue; + return this; + } + + public Collection getRepeatedValue() { + return repeatedValue; + } + + public RepeatedFields setTestRepeatedEnum(Collection testRepeatedEnum) { + this.testRepeatedEnum = testRepeatedEnum; + return this; + } + + public Collection getTestRepeatedEnum() { + return testRepeatedEnum; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepeatedFields that = (RepeatedFields) o; + return Objects.equals(repeatedBool, that.repeatedBool) + && Objects.equals(repeatedDuration, that.repeatedDuration) + && Objects.equals(repeatedFieldMask, that.repeatedFieldMask) + && Objects.equals(repeatedInt32, that.repeatedInt32) + && Objects.equals(repeatedInt64, that.repeatedInt64) + && Objects.equals(repeatedListValue, that.repeatedListValue) + && Objects.equals(repeatedMessage, that.repeatedMessage) + && Objects.equals(repeatedString, that.repeatedString) + && Objects.equals(repeatedStruct, that.repeatedStruct) + && Objects.equals(repeatedTimestamp, that.repeatedTimestamp) + && Objects.equals(repeatedValue, that.repeatedValue) + && Objects.equals(testRepeatedEnum, that.testRepeatedEnum); + } + + @Override + public int hashCode() { + return Objects.hash( + repeatedBool, + repeatedDuration, + repeatedFieldMask, + repeatedInt32, + repeatedInt64, + repeatedListValue, + repeatedMessage, + repeatedString, + repeatedStruct, + repeatedTimestamp, + repeatedValue, + testRepeatedEnum); + } + + @Override + public String toString() { + return new ToStringer(RepeatedFields.class) + .add("repeatedBool", repeatedBool) + .add("repeatedDuration", repeatedDuration) + .add("repeatedFieldMask", repeatedFieldMask) + .add("repeatedInt32", repeatedInt32) + .add("repeatedInt64", repeatedInt64) + .add("repeatedListValue", repeatedListValue) + .add("repeatedMessage", repeatedMessage) + .add("repeatedString", repeatedString) + .add("repeatedStruct", repeatedStruct) + .add("repeatedTimestamp", repeatedTimestamp) + .add("repeatedValue", repeatedValue) + .add("testRepeatedEnum", testRepeatedEnum) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RequiredFields.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RequiredFields.java new file mode 100755 index 000000000..1caa4fa2b --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RequiredFields.java @@ -0,0 +1,243 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jsonmarshallv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +public class RequiredFields { + /** */ + @JsonProperty("required_bool") + @QueryParam("required_bool") + private Boolean requiredBool; + + /** */ + @JsonProperty("required_duration") + @QueryParam("required_duration") + private String requiredDuration; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + */ + @JsonProperty("required_field_mask") + @QueryParam("required_field_mask") + private String requiredFieldMask; + + /** */ + @JsonProperty("required_int32") + @QueryParam("required_int32") + private Long requiredInt32; + + /** */ + @JsonProperty("required_int64") + @QueryParam("required_int64") + private Long requiredInt64; + + /** */ + @JsonProperty("required_list_value") + @QueryParam("required_list_value") + private Collection requiredListValue; + + /** */ + @JsonProperty("required_message") + @QueryParam("required_message") + private NestedMessage requiredMessage; + + /** */ + @JsonProperty("required_string") + @QueryParam("required_string") + private String requiredString; + + /** */ + @JsonProperty("required_struct") + @QueryParam("required_struct") + private Map requiredStruct; + + /** */ + @JsonProperty("required_timestamp") + @QueryParam("required_timestamp") + private String requiredTimestamp; + + /** */ + @JsonProperty("required_value") + @QueryParam("required_value") + private Object /* MISSING TYPE */ requiredValue; + + /** */ + @JsonProperty("test_required_enum") + @QueryParam("test_required_enum") + private TestEnum testRequiredEnum; + + public RequiredFields setRequiredBool(Boolean requiredBool) { + this.requiredBool = requiredBool; + return this; + } + + public Boolean getRequiredBool() { + return requiredBool; + } + + public RequiredFields setRequiredDuration(String requiredDuration) { + this.requiredDuration = requiredDuration; + return this; + } + + public String getRequiredDuration() { + return requiredDuration; + } + + public RequiredFields setRequiredFieldMask(String requiredFieldMask) { + this.requiredFieldMask = requiredFieldMask; + return this; + } + + public String getRequiredFieldMask() { + return requiredFieldMask; + } + + public RequiredFields setRequiredInt32(Long requiredInt32) { + this.requiredInt32 = requiredInt32; + return this; + } + + public Long getRequiredInt32() { + return requiredInt32; + } + + public RequiredFields setRequiredInt64(Long requiredInt64) { + this.requiredInt64 = requiredInt64; + return this; + } + + public Long getRequiredInt64() { + return requiredInt64; + } + + public RequiredFields setRequiredListValue( + Collection requiredListValue) { + this.requiredListValue = requiredListValue; + return this; + } + + public Collection getRequiredListValue() { + return requiredListValue; + } + + public RequiredFields setRequiredMessage(NestedMessage requiredMessage) { + this.requiredMessage = requiredMessage; + return this; + } + + public NestedMessage getRequiredMessage() { + return requiredMessage; + } + + public RequiredFields setRequiredString(String requiredString) { + this.requiredString = requiredString; + return this; + } + + public String getRequiredString() { + return requiredString; + } + + public RequiredFields setRequiredStruct(Map requiredStruct) { + this.requiredStruct = requiredStruct; + return this; + } + + public Map getRequiredStruct() { + return requiredStruct; + } + + public RequiredFields setRequiredTimestamp(String requiredTimestamp) { + this.requiredTimestamp = requiredTimestamp; + return this; + } + + public String getRequiredTimestamp() { + return requiredTimestamp; + } + + public RequiredFields setRequiredValue(Object /* MISSING TYPE */ requiredValue) { + this.requiredValue = requiredValue; + return this; + } + + public Object /* MISSING TYPE */ getRequiredValue() { + return requiredValue; + } + + public RequiredFields setTestRequiredEnum(TestEnum testRequiredEnum) { + this.testRequiredEnum = testRequiredEnum; + return this; + } + + public TestEnum getTestRequiredEnum() { + return testRequiredEnum; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RequiredFields that = (RequiredFields) o; + return Objects.equals(requiredBool, that.requiredBool) + && Objects.equals(requiredDuration, that.requiredDuration) + && Objects.equals(requiredFieldMask, that.requiredFieldMask) + && Objects.equals(requiredInt32, that.requiredInt32) + && Objects.equals(requiredInt64, that.requiredInt64) + && Objects.equals(requiredListValue, that.requiredListValue) + && Objects.equals(requiredMessage, that.requiredMessage) + && Objects.equals(requiredString, that.requiredString) + && Objects.equals(requiredStruct, that.requiredStruct) + && Objects.equals(requiredTimestamp, that.requiredTimestamp) + && Objects.equals(requiredValue, that.requiredValue) + && Objects.equals(testRequiredEnum, that.testRequiredEnum); + } + + @Override + public int hashCode() { + return Objects.hash( + requiredBool, + requiredDuration, + requiredFieldMask, + requiredInt32, + requiredInt64, + requiredListValue, + requiredMessage, + requiredString, + requiredStruct, + requiredTimestamp, + requiredValue, + testRequiredEnum); + } + + @Override + public String toString() { + return new ToStringer(RequiredFields.class) + .add("requiredBool", requiredBool) + .add("requiredDuration", requiredDuration) + .add("requiredFieldMask", requiredFieldMask) + .add("requiredInt32", requiredInt32) + .add("requiredInt64", requiredInt64) + .add("requiredListValue", requiredListValue) + .add("requiredMessage", requiredMessage) + .add("requiredString", requiredString) + .add("requiredStruct", requiredStruct) + .add("requiredTimestamp", requiredTimestamp) + .add("requiredValue", requiredValue) + .add("testRequiredEnum", testRequiredEnum) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/Resource.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/Resource.java new file mode 100755 index 000000000..09141d80a --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/Resource.java @@ -0,0 +1,82 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jsonmarshallv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * We separate this into 3 submessages to simplify test cases. E.g., any required top level field + * needs to be included in the expected json for each test case. + */ +@Generated +public class Resource { + /** */ + @JsonProperty("optional_fields") + @QueryParam("optional_fields") + private OptionalFields optionalFields; + + /** */ + @JsonProperty("repeated_fields") + @QueryParam("repeated_fields") + private RepeatedFields repeatedFields; + + /** */ + @JsonProperty("required_fields") + @QueryParam("required_fields") + private RequiredFields requiredFields; + + public Resource setOptionalFields(OptionalFields optionalFields) { + this.optionalFields = optionalFields; + return this; + } + + public OptionalFields getOptionalFields() { + return optionalFields; + } + + public Resource setRepeatedFields(RepeatedFields repeatedFields) { + this.repeatedFields = repeatedFields; + return this; + } + + public RepeatedFields getRepeatedFields() { + return repeatedFields; + } + + public Resource setRequiredFields(RequiredFields requiredFields) { + this.requiredFields = requiredFields; + return this; + } + + public RequiredFields getRequiredFields() { + return requiredFields; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Resource that = (Resource) o; + return Objects.equals(optionalFields, that.optionalFields) + && Objects.equals(repeatedFields, that.repeatedFields) + && Objects.equals(requiredFields, that.requiredFields); + } + + @Override + public int hashCode() { + return Objects.hash(optionalFields, repeatedFields, requiredFields); + } + + @Override + public String toString() { + return new ToStringer(Resource.class) + .add("optionalFields", optionalFields) + .add("repeatedFields", repeatedFields) + .add("requiredFields", requiredFields) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/TestEnum.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/TestEnum.java new file mode 100755 index 000000000..48684e38a --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/TestEnum.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jsonmarshallv2; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum TestEnum { + TEST_ENUM_ONE, + TEST_ENUM_TWO, +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CancelOperationRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CancelOperationRequest.java new file mode 100755 index 000000000..41ead0280 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CancelOperationRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class CancelOperationRequest { + /** The name of the operation resource to be cancelled. */ + @JsonIgnore private String name; + + public CancelOperationRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CancelOperationRequest that = (CancelOperationRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(CancelOperationRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java new file mode 100755 index 000000000..b3e1f28ed --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java @@ -0,0 +1,166 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.common.lro.LroOptions; +import com.databricks.sdk.support.Generated; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.TimeoutException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wrapper for interacting with a long-running createTestResource operation. Provides methods to + * wait for completion, check status, cancel, and access metadata. + */ +@Generated +public class CreateTestResourceOperation { + private static final Logger LOG = LoggerFactory.getLogger(CreateTestResourceOperation.class); + private static final Duration DEFAULT_TIMEOUT = Duration.ofMinutes(20); + + private final LroTestingService impl; + private Operation operation; + private final ObjectMapper objectMapper; + + public CreateTestResourceOperation(LroTestingService impl, Operation operation) { + this.impl = impl; + this.operation = operation; + this.objectMapper = SerDeUtils.createMapper(); + } + + /** + * Wait for the operation to complete and return the resulting TestResource. Uses the default + * timeout of 20 minutes. + * + * @return the created TestResource + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public TestResource waitForCompletion() throws TimeoutException { + return waitForCompletion(Optional.empty()); + } + + /** + * Wait for the operation to complete and return the resulting TestResource. + * + * @param options the options for configuring the wait behavior, can be empty for defaults + * @return the created TestResource + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public TestResource waitForCompletion(Optional options) throws TimeoutException { + Duration timeout = options.flatMap(LroOptions::getTimeout).orElse(DEFAULT_TIMEOUT); + long deadline = System.currentTimeMillis() + timeout.toMillis(); + String statusMessage = "polling operation..."; + int attempt = 1; + + while (System.currentTimeMillis() < deadline) { + // Refresh the operation state + refreshOperation(); + + if (operation.getDone() != null && operation.getDone()) { + // Operation completed, check for success or failure + if (operation.getError() != null) { + String errorMsg = "unknown error"; + if (operation.getError().getMessage() != null + && !operation.getError().getMessage().isEmpty()) { + errorMsg = operation.getError().getMessage(); + } + + if (operation.getError().getErrorCode() != null) { + errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg); + } + + throw new DatabricksException("Operation failed: " + errorMsg); + } + + // Operation completed successfully, unmarshal response + if (operation.getResponse() == null) { + throw new DatabricksException("Operation completed but no response available"); + } + + try { + JsonNode responseJson = objectMapper.valueToTree(operation.getResponse()); + return objectMapper.treeToValue(responseJson, TestResource.class); + } catch (JsonProcessingException e) { + throw new DatabricksException( + "Failed to unmarshal testResource response: " + e.getMessage(), e); + } + } + + // Operation still in progress, wait before polling again + String prefix = String.format("operation=%s", operation.getName()); + int sleep = Math.min(attempt, 10); // sleep 10s max per attempt + LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep); + + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + + throw new TimeoutException( + String.format("Operation timed out after %s: %s", timeout, statusMessage)); + } + + /** + * Cancel the operation. + * + * @throws DatabricksException if the cancellation fails + */ + public void cancel() { + impl.cancelOperation(new CancelOperationRequest().setName(operation.getName())); + } + + /** + * Get the operation name. + * + * @return the operation name + */ + public String getName() { + return operation.getName(); + } + + /** + * Get the operation metadata. + * + * @return the operation metadata, or null if not available + * @throws DatabricksException if the metadata cannot be deserialized + */ + public TestResourceOperationMetadata getMetadata() { + if (operation.getMetadata() == null) { + return null; + } + + try { + JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata()); + return objectMapper.treeToValue(metadataJson, TestResourceOperationMetadata.class); + } catch (JsonProcessingException e) { + throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e); + } + } + + /** + * Check if the operation is done. This method refreshes the operation state before checking. + * + * @return true if the operation is complete, false otherwise + * @throws DatabricksException if the status check fails + */ + public boolean isDone() { + refreshOperation(); + return operation.getDone() != null && operation.getDone(); + } + + /** Refresh the operation state by polling the server. */ + private void refreshOperation() { + operation = impl.getOperation(new GetOperationRequest().setName(operation.getName())); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceRequest.java new file mode 100755 index 000000000..428aca8f1 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateTestResourceRequest { + /** The resource to create */ + @JsonProperty("resource") + private TestResource resource; + + public CreateTestResourceRequest setResource(TestResource resource) { + this.resource = resource; + return this; + } + + public TestResource getResource() { + return resource; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateTestResourceRequest that = (CreateTestResourceRequest) o; + return Objects.equals(resource, that.resource); + } + + @Override + public int hashCode() { + return Objects.hash(resource); + } + + @Override + public String toString() { + return new ToStringer(CreateTestResourceRequest.class).add("resource", resource).toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DatabricksServiceExceptionWithDetailsProto.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DatabricksServiceExceptionWithDetailsProto.java new file mode 100755 index 000000000..e2254a076 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DatabricksServiceExceptionWithDetailsProto.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * Serialization format for DatabricksServiceException with error details. This message doesn't work + * for ScalaPB-04 as google.protobuf.Any is only available to ScalaPB-09. Note the definition of + * this message should be in sync with DatabricksServiceExceptionProto defined in + * /api-base/proto/legacy/databricks.proto except the later one doesn't have the error details field + * defined. + */ +@Generated +public class DatabricksServiceExceptionWithDetailsProto { + /** + * @pbjson-skip + */ + @JsonProperty("details") + private Collection details; + + /** */ + @JsonProperty("error_code") + private ErrorCode errorCode; + + /** */ + @JsonProperty("message") + private String message; + + /** */ + @JsonProperty("stack_trace") + private String stackTrace; + + public DatabricksServiceExceptionWithDetailsProto setDetails(Collection details) { + this.details = details; + return this; + } + + public Collection getDetails() { + return details; + } + + public DatabricksServiceExceptionWithDetailsProto setErrorCode(ErrorCode errorCode) { + this.errorCode = errorCode; + return this; + } + + public ErrorCode getErrorCode() { + return errorCode; + } + + public DatabricksServiceExceptionWithDetailsProto setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public DatabricksServiceExceptionWithDetailsProto setStackTrace(String stackTrace) { + this.stackTrace = stackTrace; + return this; + } + + public String getStackTrace() { + return stackTrace; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabricksServiceExceptionWithDetailsProto that = + (DatabricksServiceExceptionWithDetailsProto) o; + return Objects.equals(details, that.details) + && Objects.equals(errorCode, that.errorCode) + && Objects.equals(message, that.message) + && Objects.equals(stackTrace, that.stackTrace); + } + + @Override + public int hashCode() { + return Objects.hash(details, errorCode, message, stackTrace); + } + + @Override + public String toString() { + return new ToStringer(DatabricksServiceExceptionWithDetailsProto.class) + .add("details", details) + .add("errorCode", errorCode) + .add("message", message) + .add("stackTrace", stackTrace) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/ErrorCode.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/ErrorCode.java new file mode 100755 index 000000000..5e05b357b --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/ErrorCode.java @@ -0,0 +1,95 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.support.Generated; + +/** + * Legacy definition of the ErrorCode enum. Please keep in sync with api-base/proto/error_code.proto + * (except status code mapping annotations as this file doesn't have them). Will be removed + * eventually, pending the ScalaPB 0.4 cleanup. + */ +@Generated +public enum ErrorCode { + ABORTED, + ALREADY_EXISTS, + BAD_REQUEST, + CANCELLED, + CATALOG_ALREADY_EXISTS, + CATALOG_DOES_NOT_EXIST, + CATALOG_NOT_EMPTY, + COULD_NOT_ACQUIRE_LOCK, + CUSTOMER_UNAUTHORIZED, + DAC_ALREADY_EXISTS, + DAC_DOES_NOT_EXIST, + DATA_LOSS, + DEADLINE_EXCEEDED, + DEPLOYMENT_TIMEOUT, + DIRECTORY_NOT_EMPTY, + DIRECTORY_PROTECTED, + DRY_RUN_FAILED, + ENDPOINT_NOT_FOUND, + EXTERNAL_LOCATION_ALREADY_EXISTS, + EXTERNAL_LOCATION_DOES_NOT_EXIST, + FEATURE_DISABLED, + GIT_CONFLICT, + GIT_REMOTE_ERROR, + GIT_SENSITIVE_TOKEN_DETECTED, + GIT_UNKNOWN_REF, + GIT_URL_NOT_ON_ALLOW_LIST, + INSECURE_PARTNER_RESPONSE, + INTERNAL_ERROR, + INVALID_PARAMETER_VALUE, + INVALID_STATE, + INVALID_STATE_TRANSITION, + IO_ERROR, + IPYNB_FILE_IN_REPO, + MALFORMED_PARTNER_RESPONSE, + MALFORMED_REQUEST, + MANAGED_RESOURCE_GROUP_DOES_NOT_EXIST, + MAX_BLOCK_SIZE_EXCEEDED, + MAX_CHILD_NODE_SIZE_EXCEEDED, + MAX_LIST_SIZE_EXCEEDED, + MAX_NOTEBOOK_SIZE_EXCEEDED, + MAX_READ_SIZE_EXCEEDED, + METASTORE_ALREADY_EXISTS, + METASTORE_DOES_NOT_EXIST, + METASTORE_NOT_EMPTY, + NOT_FOUND, + NOT_IMPLEMENTED, + PARTIAL_DELETE, + PERMISSION_DENIED, + PERMISSION_NOT_PROPAGATED, + PRINCIPAL_DOES_NOT_EXIST, + PROJECTS_OPERATION_TIMEOUT, + PROVIDER_ALREADY_EXISTS, + PROVIDER_DOES_NOT_EXIST, + PROVIDER_SHARE_NOT_ACCESSIBLE, + QUOTA_EXCEEDED, + RECIPIENT_ALREADY_EXISTS, + RECIPIENT_DOES_NOT_EXIST, + REQUEST_LIMIT_EXCEEDED, + RESOURCE_ALREADY_EXISTS, + RESOURCE_CONFLICT, + RESOURCE_DOES_NOT_EXIST, + RESOURCE_EXHAUSTED, + RESOURCE_LIMIT_EXCEEDED, + SCHEMA_ALREADY_EXISTS, + SCHEMA_DOES_NOT_EXIST, + SCHEMA_NOT_EMPTY, + SEARCH_QUERY_TOO_LONG, + SEARCH_QUERY_TOO_SHORT, + SERVICE_UNDER_MAINTENANCE, + SHARE_ALREADY_EXISTS, + SHARE_DOES_NOT_EXIST, + STORAGE_CREDENTIAL_ALREADY_EXISTS, + STORAGE_CREDENTIAL_DOES_NOT_EXIST, + TABLE_ALREADY_EXISTS, + TABLE_DOES_NOT_EXIST, + TEMPORARILY_UNAVAILABLE, + UNAUTHENTICATED, + UNAVAILABLE, + UNKNOWN, + UNPARSEABLE_HTTP_ERROR, + WORKSPACE_TEMPORARILY_UNAVAILABLE, +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetOperationRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetOperationRequest.java new file mode 100755 index 000000000..0574de175 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetOperationRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetOperationRequest { + /** The name of the operation resource. */ + @JsonIgnore private String name; + + public GetOperationRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetOperationRequest that = (GetOperationRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetOperationRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetTestResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetTestResourceRequest.java new file mode 100755 index 000000000..10e280619 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetTestResourceRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetTestResourceRequest { + /** Resource ID to get */ + @JsonIgnore private String resourceId; + + public GetTestResourceRequest setResourceId(String resourceId) { + this.resourceId = resourceId; + return this; + } + + public String getResourceId() { + return resourceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetTestResourceRequest that = (GetTestResourceRequest) o; + return Objects.equals(resourceId, that.resourceId); + } + + @Override + public int hashCode() { + return Objects.hash(resourceId); + } + + @Override + public String toString() { + return new ToStringer(GetTestResourceRequest.class).add("resourceId", resourceId).toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java new file mode 100755 index 000000000..82cdda20e --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Test service for Long Running Operations */ +@Generated +public class LroTestingAPI { + private static final Logger LOG = LoggerFactory.getLogger(LroTestingAPI.class); + + private final LroTestingService impl; + + /** Regular-use constructor */ + public LroTestingAPI(ApiClient apiClient) { + impl = new LroTestingImpl(apiClient); + } + + /** Constructor for mocks */ + public LroTestingAPI(LroTestingService mock) { + impl = mock; + } + + public void cancelOperation(CancelOperationRequest request) { + impl.cancelOperation(request); + } + + /** Simple method to create test resource for LRO testing */ + public CreateTestResourceOperation createTestResource(CreateTestResourceRequest request) { + Operation operation = impl.createTestResource(request); + return new CreateTestResourceOperation(impl, operation); + } + + public Operation getOperation(String name) { + return getOperation(new GetOperationRequest().setName(name)); + } + + public Operation getOperation(GetOperationRequest request) { + return impl.getOperation(request); + } + + public TestResource getTestResource(String resourceId) { + return getTestResource(new GetTestResourceRequest().setResourceId(resourceId)); + } + + /** Simple method to get test resource */ + public TestResource getTestResource(GetTestResourceRequest request) { + return impl.getTestResource(request); + } + + public LroTestingService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingImpl.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingImpl.java new file mode 100755 index 000000000..b1bd471df --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingImpl.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of LroTesting */ +@Generated +class LroTestingImpl implements LroTestingService { + private final ApiClient apiClient; + + public LroTestingImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public void cancelOperation(CancelOperationRequest request) { + String path = String.format("/api/2.0/lro-testing/operations/%s/cancel", request.getName()); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Operation createTestResource(CreateTestResourceRequest request) { + String path = "/api/2.0/lro-testing/resources"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getResource())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Operation getOperation(GetOperationRequest request) { + String path = String.format("/api/2.0/lro-testing/operations/%s", request.getName()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public TestResource getTestResource(GetTestResourceRequest request) { + String path = String.format("/api/2.0/lro-testing/resources/%s", request.getResourceId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, TestResource.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingService.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingService.java new file mode 100755 index 000000000..769d17f0c --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingService.java @@ -0,0 +1,25 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.support.Generated; + +/** + * Test service for Long Running Operations + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface LroTestingService { + + void cancelOperation(CancelOperationRequest cancelOperationRequest); + + /** Simple method to create test resource for LRO testing */ + Operation createTestResource(CreateTestResourceRequest createTestResourceRequest); + + Operation getOperation(GetOperationRequest getOperationRequest); + + /** Simple method to get test resource */ + TestResource getTestResource(GetTestResourceRequest getTestResourceRequest); +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/Operation.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/Operation.java new file mode 100755 index 000000000..025034b47 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/Operation.java @@ -0,0 +1,127 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** This resource represents a long-running operation that is the result of a network API call. */ +@Generated +public class Operation { + /** + * If the value is `false`, it means the operation is still in progress. If `true`, the operation + * is completed, and either `error` or `response` is available. + */ + @JsonProperty("done") + private Boolean done; + + /** The error result of the operation in case of failure or cancellation. */ + @JsonProperty("error") + private DatabricksServiceExceptionWithDetailsProto error; + + /** + * Service-specific metadata associated with the operation. It typically contains progress + * information and common metadata such as create time. Some services might not provide such + * metadata. Any method that returns a long-running operation should document the metadata type, + * if any. + */ + @JsonProperty("metadata") + private Object metadata; + + /** + * The server-assigned name, which is only unique within the same service that originally returns + * it. If you use the default HTTP mapping, the `name` should be a resource name ending with + * `operations/{unique_id}`. + * + *

Note: multi-segment resource names are not yet supported in the RPC framework and SDK/TF. + * Until that support is added, `name` must be string without internal `/` separators. + */ + @JsonProperty("name") + private String name; + + /** + * The normal, successful response of the operation. If the original method returns no data on + * success, such as `Delete`, the response is `google.protobuf.Empty`. If the original method is + * standard `Get`/`Create`/`Update`, the response should be the resource. For other methods, the + * response should have the type `XxxResponse`, where `Xxx` is the original method name. For + * example, if the original method name is `TakeSnapshot()`, the inferred response type is + * `TakeSnapshotResponse`. + */ + @JsonProperty("response") + private Object response; + + public Operation setDone(Boolean done) { + this.done = done; + return this; + } + + public Boolean getDone() { + return done; + } + + public Operation setError(DatabricksServiceExceptionWithDetailsProto error) { + this.error = error; + return this; + } + + public DatabricksServiceExceptionWithDetailsProto getError() { + return error; + } + + public Operation setMetadata(Object metadata) { + this.metadata = metadata; + return this; + } + + public Object getMetadata() { + return metadata; + } + + public Operation setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public Operation setResponse(Object response) { + this.response = response; + return this; + } + + public Object getResponse() { + return response; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Operation that = (Operation) o; + return Objects.equals(done, that.done) + && Objects.equals(error, that.error) + && Objects.equals(metadata, that.metadata) + && Objects.equals(name, that.name) + && Objects.equals(response, that.response); + } + + @Override + public int hashCode() { + return Objects.hash(done, error, metadata, name, response); + } + + @Override + public String toString() { + return new ToStringer(Operation.class) + .add("done", done) + .add("error", error) + .add("metadata", metadata) + .add("name", name) + .add("response", response) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/TestResource.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/TestResource.java new file mode 100755 index 000000000..d5d12a517 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/TestResource.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Test resource for LRO operations */ +@Generated +public class TestResource { + /** Unique identifier for the resource */ + @JsonProperty("id") + private String id; + + /** Name of the resource */ + @JsonProperty("name") + private String name; + + public TestResource setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public TestResource setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestResource that = (TestResource) o; + return Objects.equals(id, that.id) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + + @Override + public String toString() { + return new ToStringer(TestResource.class).add("id", id).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/TestResourceOperationMetadata.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/TestResourceOperationMetadata.java new file mode 100755 index 000000000..486500f7e --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/TestResourceOperationMetadata.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Metadata for test resource operations */ +@Generated +public class TestResourceOperationMetadata { + /** Progress percentage (0-100) */ + @JsonProperty("progress_percent") + private Long progressPercent; + + /** ID of the resource being operated on */ + @JsonProperty("resource_id") + private String resourceId; + + public TestResourceOperationMetadata setProgressPercent(Long progressPercent) { + this.progressPercent = progressPercent; + return this; + } + + public Long getProgressPercent() { + return progressPercent; + } + + public TestResourceOperationMetadata setResourceId(String resourceId) { + this.resourceId = resourceId; + return this; + } + + public String getResourceId() { + return resourceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestResourceOperationMetadata that = (TestResourceOperationMetadata) o; + return Objects.equals(progressPercent, that.progressPercent) + && Objects.equals(resourceId, that.resourceId); + } + + @Override + public int hashCode() { + return Objects.hash(progressPercent, resourceId); + } + + @Override + public String toString() { + return new ToStringer(TestResourceOperationMetadata.class) + .add("progressPercent", progressPercent) + .add("resourceId", resourceId) + .toString(); + } +}