From 1dbc086e5a6d7dc69525bd02942b1281e4516c78 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 23 Feb 2024 11:41:42 +0100 Subject: [PATCH] Upgrade Terraform provider to 1.37.0 (#1235) ## Changes Upgrade Terraform provider to 1.37.0 Currently we're using 1.36.2 version which uses Go SDK 0.30 which does not have U2M enabled for all clouds. Upgrading to 1.37.0 allows TF provider (and thus DABs) to use U2M Fixes #1231 --- bundle/internal/tf/codegen/schema/version.go | 2 +- .../internal/tf/schema/data_source_cluster.go | 2 +- bundle/internal/tf/schema/data_source_job.go | 377 +++++++++++++++++- .../schema/data_source_storage_credential.go | 57 +++ .../schema/data_source_storage_credentials.go | 8 + bundle/internal/tf/schema/data_sources.go | 4 + bundle/internal/tf/schema/resource_cluster.go | 2 +- bundle/internal/tf/schema/resource_file.go | 14 + bundle/internal/tf/schema/resource_job.go | 377 +++++++++++++++++- .../internal/tf/schema/resource_pipeline.go | 2 +- .../schema/resource_vector_search_endpoint.go | 16 + bundle/internal/tf/schema/resource_volume.go | 1 + bundle/internal/tf/schema/resources.go | 4 + bundle/internal/tf/schema/root.go | 2 +- 14 files changed, 855 insertions(+), 13 deletions(-) create mode 100644 bundle/internal/tf/schema/data_source_storage_credential.go create mode 100644 bundle/internal/tf/schema/data_source_storage_credentials.go create mode 100644 bundle/internal/tf/schema/resource_file.go create mode 100644 bundle/internal/tf/schema/resource_vector_search_endpoint.go diff --git a/bundle/internal/tf/codegen/schema/version.go b/bundle/internal/tf/codegen/schema/version.go index c79319eda..a41b62257 100644 --- a/bundle/internal/tf/codegen/schema/version.go +++ b/bundle/internal/tf/codegen/schema/version.go @@ -1,3 +1,3 @@ package schema -const ProviderVersion = "1.36.2" +const ProviderVersion = "1.37.0" diff --git a/bundle/internal/tf/schema/data_source_cluster.go b/bundle/internal/tf/schema/data_source_cluster.go index d34d63a79..fff66dc93 100644 --- a/bundle/internal/tf/schema/data_source_cluster.go +++ b/bundle/internal/tf/schema/data_source_cluster.go @@ -122,7 +122,7 @@ type DataSourceClusterClusterInfoInitScriptsS3 struct { } type DataSourceClusterClusterInfoInitScriptsVolumes struct { - Destination string `json:"destination,omitempty"` + Destination string `json:"destination"` } type DataSourceClusterClusterInfoInitScriptsWorkspace struct { diff --git a/bundle/internal/tf/schema/data_source_job.go b/bundle/internal/tf/schema/data_source_job.go index f9a316d78..6e67b285f 100644 --- a/bundle/internal/tf/schema/data_source_job.go +++ b/bundle/internal/tf/schema/data_source_job.go @@ -21,6 +21,7 @@ type DataSourceJobJobSettingsSettingsDbtTask struct { ProfilesDirectory string `json:"profiles_directory,omitempty"` ProjectDirectory string `json:"project_directory,omitempty"` Schema string `json:"schema,omitempty"` + Source string `json:"source,omitempty"` WarehouseId string `json:"warehouse_id,omitempty"` } @@ -160,7 +161,7 @@ type DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3 struct { } type DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsVolumes struct { - Destination string `json:"destination,omitempty"` + Destination string `json:"destination"` } type DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspace struct { @@ -347,7 +348,7 @@ type DataSourceJobJobSettingsSettingsNewClusterInitScriptsS3 struct { } type DataSourceJobJobSettingsSettingsNewClusterInitScriptsVolumes struct { - Destination string `json:"destination,omitempty"` + Destination string `json:"destination"` } type DataSourceJobJobSettingsSettingsNewClusterInitScriptsWorkspace struct { @@ -482,6 +483,7 @@ type DataSourceJobJobSettingsSettingsTaskDbtTask struct { ProfilesDirectory string `json:"profiles_directory,omitempty"` ProjectDirectory string `json:"project_directory,omitempty"` Schema string `json:"schema,omitempty"` + Source string `json:"source,omitempty"` WarehouseId string `json:"warehouse_id,omitempty"` } @@ -497,6 +499,371 @@ type DataSourceJobJobSettingsSettingsTaskEmailNotifications struct { OnSuccess []string `json:"on_success,omitempty"` } +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskConditionTask struct { + Left string `json:"left,omitempty"` + Op string `json:"op,omitempty"` + Right string `json:"right,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskDbtTask struct { + Catalog string `json:"catalog,omitempty"` + Commands []string `json:"commands"` + ProfilesDirectory string `json:"profiles_directory,omitempty"` + ProjectDirectory string `json:"project_directory,omitempty"` + Schema string `json:"schema,omitempty"` + Source string `json:"source,omitempty"` + WarehouseId string `json:"warehouse_id,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskDependsOn struct { + Outcome string `json:"outcome,omitempty"` + TaskKey string `json:"task_key"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskEmailNotifications struct { + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskHealth struct { + Rules []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskHealthRules `json:"rules,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskLibraryCran struct { + Package string `json:"package"` + Repo string `json:"repo,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskLibraryMaven struct { + Coordinates string `json:"coordinates"` + Exclusions []string `json:"exclusions,omitempty"` + Repo string `json:"repo,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskLibraryPypi struct { + Package string `json:"package"` + Repo string `json:"repo,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskLibrary struct { + Egg string `json:"egg,omitempty"` + Jar string `json:"jar,omitempty"` + Whl string `json:"whl,omitempty"` + Cran *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskLibraryCran `json:"cran,omitempty"` + Maven *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskLibraryMaven `json:"maven,omitempty"` + Pypi *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskLibraryPypi `json:"pypi,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterAutoscale struct { + MaxWorkers int `json:"max_workers,omitempty"` + MinWorkers int `json:"min_workers,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterAwsAttributes struct { + Availability string `json:"availability,omitempty"` + EbsVolumeCount int `json:"ebs_volume_count,omitempty"` + EbsVolumeSize int `json:"ebs_volume_size,omitempty"` + EbsVolumeType string `json:"ebs_volume_type,omitempty"` + FirstOnDemand int `json:"first_on_demand,omitempty"` + InstanceProfileArn string `json:"instance_profile_arn,omitempty"` + SpotBidPricePercent int `json:"spot_bid_price_percent,omitempty"` + ZoneId string `json:"zone_id,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterAzureAttributes struct { + Availability string `json:"availability,omitempty"` + FirstOnDemand int `json:"first_on_demand,omitempty"` + SpotBidMaxPrice int `json:"spot_bid_max_price,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterLogConfDbfs struct { + Destination string `json:"destination"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterLogConfS3 struct { + CannedAcl string `json:"canned_acl,omitempty"` + Destination string `json:"destination"` + EnableEncryption bool `json:"enable_encryption,omitempty"` + EncryptionType string `json:"encryption_type,omitempty"` + Endpoint string `json:"endpoint,omitempty"` + KmsKey string `json:"kms_key,omitempty"` + Region string `json:"region,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterLogConf struct { + Dbfs *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterLogConfDbfs `json:"dbfs,omitempty"` + S3 *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterLogConfS3 `json:"s3,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterMountInfoNetworkFilesystemInfo struct { + MountOptions string `json:"mount_options,omitempty"` + ServerAddress string `json:"server_address"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterMountInfo struct { + LocalMountDirPath string `json:"local_mount_dir_path"` + RemoteMountDirPath string `json:"remote_mount_dir_path,omitempty"` + NetworkFilesystemInfo *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterMountInfoNetworkFilesystemInfo `json:"network_filesystem_info,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterDockerImageBasicAuth struct { + Password string `json:"password"` + Username string `json:"username"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterDockerImage struct { + Url string `json:"url"` + BasicAuth *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterDockerImageBasicAuth `json:"basic_auth,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterGcpAttributes struct { + Availability string `json:"availability,omitempty"` + BootDiskSize int `json:"boot_disk_size,omitempty"` + GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` + UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` + ZoneId string `json:"zone_id,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsAbfss struct { + Destination string `json:"destination"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsDbfs struct { + Destination string `json:"destination"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsFile struct { + Destination string `json:"destination"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsGcs struct { + Destination string `json:"destination"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsS3 struct { + CannedAcl string `json:"canned_acl,omitempty"` + Destination string `json:"destination"` + EnableEncryption bool `json:"enable_encryption,omitempty"` + EncryptionType string `json:"encryption_type,omitempty"` + Endpoint string `json:"endpoint,omitempty"` + KmsKey string `json:"kms_key,omitempty"` + Region string `json:"region,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsVolumes struct { + Destination string `json:"destination"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsWorkspace struct { + Destination string `json:"destination"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScripts struct { + Abfss *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsAbfss `json:"abfss,omitempty"` + Dbfs *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsDbfs `json:"dbfs,omitempty"` + File *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsFile `json:"file,omitempty"` + Gcs *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsGcs `json:"gcs,omitempty"` + S3 *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsS3 `json:"s3,omitempty"` + Volumes *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsVolumes `json:"volumes,omitempty"` + Workspace *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScriptsWorkspace `json:"workspace,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterWorkloadTypeClients struct { + Jobs bool `json:"jobs,omitempty"` + Notebooks bool `json:"notebooks,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterWorkloadType struct { + Clients *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterWorkloadTypeClients `json:"clients,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewCluster struct { + ApplyPolicyDefaultValues bool `json:"apply_policy_default_values,omitempty"` + AutoterminationMinutes int `json:"autotermination_minutes,omitempty"` + ClusterId string `json:"cluster_id,omitempty"` + ClusterName string `json:"cluster_name,omitempty"` + CustomTags map[string]string `json:"custom_tags,omitempty"` + DataSecurityMode string `json:"data_security_mode,omitempty"` + DriverInstancePoolId string `json:"driver_instance_pool_id,omitempty"` + DriverNodeTypeId string `json:"driver_node_type_id,omitempty"` + EnableElasticDisk bool `json:"enable_elastic_disk,omitempty"` + EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"` + IdempotencyToken string `json:"idempotency_token,omitempty"` + InstancePoolId string `json:"instance_pool_id,omitempty"` + NodeTypeId string `json:"node_type_id,omitempty"` + NumWorkers int `json:"num_workers"` + PolicyId string `json:"policy_id,omitempty"` + RuntimeEngine string `json:"runtime_engine,omitempty"` + SingleUserName string `json:"single_user_name,omitempty"` + SparkConf map[string]string `json:"spark_conf,omitempty"` + SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"` + SparkVersion string `json:"spark_version"` + SshPublicKeys []string `json:"ssh_public_keys,omitempty"` + Autoscale *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterAutoscale `json:"autoscale,omitempty"` + AwsAttributes *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterAwsAttributes `json:"aws_attributes,omitempty"` + AzureAttributes *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterAzureAttributes `json:"azure_attributes,omitempty"` + ClusterLogConf *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterLogConf `json:"cluster_log_conf,omitempty"` + ClusterMountInfo []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterMountInfo `json:"cluster_mount_info,omitempty"` + DockerImage *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterDockerImage `json:"docker_image,omitempty"` + GcpAttributes *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterGcpAttributes `json:"gcp_attributes,omitempty"` + InitScripts []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterInitScripts `json:"init_scripts,omitempty"` + WorkloadType *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewClusterWorkloadType `json:"workload_type,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNotebookTask struct { + BaseParameters map[string]string `json:"base_parameters,omitempty"` + NotebookPath string `json:"notebook_path"` + Source string `json:"source,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNotificationSettings struct { + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForCanceledRuns bool `json:"no_alert_for_canceled_runs,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskPipelineTask struct { + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskPythonWheelTask struct { + EntryPoint string `json:"entry_point,omitempty"` + NamedParameters map[string]string `json:"named_parameters,omitempty"` + PackageName string `json:"package_name,omitempty"` + Parameters []string `json:"parameters,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskRunJobTask struct { + JobId int `json:"job_id"` + JobParameters map[string]string `json:"job_parameters,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSparkJarTask struct { + JarUri string `json:"jar_uri,omitempty"` + MainClassName string `json:"main_class_name,omitempty"` + Parameters []string `json:"parameters,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSparkPythonTask struct { + Parameters []string `json:"parameters,omitempty"` + PythonFile string `json:"python_file"` + Source string `json:"source,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSparkSubmitTask struct { + Parameters []string `json:"parameters,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskAlertSubscriptions struct { + DestinationId string `json:"destination_id,omitempty"` + UserName string `json:"user_name,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskAlert struct { + AlertId string `json:"alert_id"` + PauseSubscriptions bool `json:"pause_subscriptions,omitempty"` + Subscriptions []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskAlertSubscriptions `json:"subscriptions,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskDashboardSubscriptions struct { + DestinationId string `json:"destination_id,omitempty"` + UserName string `json:"user_name,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskDashboard struct { + CustomSubject string `json:"custom_subject,omitempty"` + DashboardId string `json:"dashboard_id"` + PauseSubscriptions bool `json:"pause_subscriptions,omitempty"` + Subscriptions []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskDashboardSubscriptions `json:"subscriptions,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskFile struct { + Path string `json:"path"` + Source string `json:"source,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskQuery struct { + QueryId string `json:"query_id"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTask struct { + Parameters map[string]string `json:"parameters,omitempty"` + WarehouseId string `json:"warehouse_id,omitempty"` + Alert *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskAlert `json:"alert,omitempty"` + Dashboard *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskDashboard `json:"dashboard,omitempty"` + File *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskFile `json:"file,omitempty"` + Query *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskQuery `json:"query,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceeded struct { + Id string `json:"id,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnFailure struct { + Id string `json:"id,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnStart struct { + Id string `json:"id,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSuccess struct { + Id string `json:"id,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotifications struct { + OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"` + OnStart []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnSuccess []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTaskTask struct { + ComputeKey string `json:"compute_key,omitempty"` + Description string `json:"description,omitempty"` + ExistingClusterId string `json:"existing_cluster_id,omitempty"` + JobClusterKey string `json:"job_cluster_key,omitempty"` + MaxRetries int `json:"max_retries,omitempty"` + MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` + RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` + RunIf string `json:"run_if,omitempty"` + TaskKey string `json:"task_key,omitempty"` + TimeoutSeconds int `json:"timeout_seconds,omitempty"` + ConditionTask *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskConditionTask `json:"condition_task,omitempty"` + DbtTask *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskDbtTask `json:"dbt_task,omitempty"` + DependsOn []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskDependsOn `json:"depends_on,omitempty"` + EmailNotifications *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskEmailNotifications `json:"email_notifications,omitempty"` + Health *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskHealth `json:"health,omitempty"` + Library []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskLibrary `json:"library,omitempty"` + NewCluster *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNewCluster `json:"new_cluster,omitempty"` + NotebookTask *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNotebookTask `json:"notebook_task,omitempty"` + NotificationSettings *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskNotificationSettings `json:"notification_settings,omitempty"` + PipelineTask *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskPipelineTask `json:"pipeline_task,omitempty"` + PythonWheelTask *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskPythonWheelTask `json:"python_wheel_task,omitempty"` + RunJobTask *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskRunJobTask `json:"run_job_task,omitempty"` + SparkJarTask *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSparkJarTask `json:"spark_jar_task,omitempty"` + SparkPythonTask *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSparkPythonTask `json:"spark_python_task,omitempty"` + SparkSubmitTask *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` + SqlTask *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskSqlTask `json:"sql_task,omitempty"` + WebhookNotifications *DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotifications `json:"webhook_notifications,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskForEachTask struct { + Concurrency int `json:"concurrency,omitempty"` + Inputs string `json:"inputs"` + Task *DataSourceJobJobSettingsSettingsTaskForEachTaskTask `json:"task,omitempty"` +} + type DataSourceJobJobSettingsSettingsTaskHealthRules struct { Metric string `json:"metric,omitempty"` Op string `json:"op,omitempty"` @@ -630,7 +997,7 @@ type DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsS3 struct { } type DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsVolumes struct { - Destination string `json:"destination,omitempty"` + Destination string `json:"destination"` } type DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspace struct { @@ -758,7 +1125,8 @@ type DataSourceJobJobSettingsSettingsTaskSqlTaskDashboard struct { } type DataSourceJobJobSettingsSettingsTaskSqlTaskFile struct { - Path string `json:"path"` + Path string `json:"path"` + Source string `json:"source,omitempty"` } type DataSourceJobJobSettingsSettingsTaskSqlTaskQuery struct { @@ -812,6 +1180,7 @@ type DataSourceJobJobSettingsSettingsTask struct { DbtTask *DataSourceJobJobSettingsSettingsTaskDbtTask `json:"dbt_task,omitempty"` DependsOn []DataSourceJobJobSettingsSettingsTaskDependsOn `json:"depends_on,omitempty"` EmailNotifications *DataSourceJobJobSettingsSettingsTaskEmailNotifications `json:"email_notifications,omitempty"` + ForEachTask *DataSourceJobJobSettingsSettingsTaskForEachTask `json:"for_each_task,omitempty"` Health *DataSourceJobJobSettingsSettingsTaskHealth `json:"health,omitempty"` Library []DataSourceJobJobSettingsSettingsTaskLibrary `json:"library,omitempty"` NewCluster *DataSourceJobJobSettingsSettingsTaskNewCluster `json:"new_cluster,omitempty"` diff --git a/bundle/internal/tf/schema/data_source_storage_credential.go b/bundle/internal/tf/schema/data_source_storage_credential.go new file mode 100644 index 000000000..c7045d445 --- /dev/null +++ b/bundle/internal/tf/schema/data_source_storage_credential.go @@ -0,0 +1,57 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type DataSourceStorageCredentialStorageCredentialInfoAwsIamRole struct { + ExternalId string `json:"external_id,omitempty"` + RoleArn string `json:"role_arn"` + UnityCatalogIamArn string `json:"unity_catalog_iam_arn,omitempty"` +} + +type DataSourceStorageCredentialStorageCredentialInfoAzureManagedIdentity struct { + AccessConnectorId string `json:"access_connector_id"` + CredentialId string `json:"credential_id,omitempty"` + ManagedIdentityId string `json:"managed_identity_id,omitempty"` +} + +type DataSourceStorageCredentialStorageCredentialInfoAzureServicePrincipal struct { + ApplicationId string `json:"application_id"` + ClientSecret string `json:"client_secret"` + DirectoryId string `json:"directory_id"` +} + +type DataSourceStorageCredentialStorageCredentialInfoCloudflareApiToken struct { + AccessKeyId string `json:"access_key_id"` + AccountId string `json:"account_id"` + SecretAccessKey string `json:"secret_access_key"` +} + +type DataSourceStorageCredentialStorageCredentialInfoDatabricksGcpServiceAccount struct { + CredentialId string `json:"credential_id,omitempty"` + Email string `json:"email,omitempty"` +} + +type DataSourceStorageCredentialStorageCredentialInfo struct { + Comment string `json:"comment,omitempty"` + CreatedAt int `json:"created_at,omitempty"` + CreatedBy string `json:"created_by,omitempty"` + Id string `json:"id,omitempty"` + MetastoreId string `json:"metastore_id,omitempty"` + Name string `json:"name,omitempty"` + Owner string `json:"owner,omitempty"` + ReadOnly bool `json:"read_only,omitempty"` + UpdatedAt int `json:"updated_at,omitempty"` + UpdatedBy string `json:"updated_by,omitempty"` + UsedForManagedStorage bool `json:"used_for_managed_storage,omitempty"` + AwsIamRole *DataSourceStorageCredentialStorageCredentialInfoAwsIamRole `json:"aws_iam_role,omitempty"` + AzureManagedIdentity *DataSourceStorageCredentialStorageCredentialInfoAzureManagedIdentity `json:"azure_managed_identity,omitempty"` + AzureServicePrincipal *DataSourceStorageCredentialStorageCredentialInfoAzureServicePrincipal `json:"azure_service_principal,omitempty"` + CloudflareApiToken *DataSourceStorageCredentialStorageCredentialInfoCloudflareApiToken `json:"cloudflare_api_token,omitempty"` + DatabricksGcpServiceAccount *DataSourceStorageCredentialStorageCredentialInfoDatabricksGcpServiceAccount `json:"databricks_gcp_service_account,omitempty"` +} + +type DataSourceStorageCredential struct { + Id string `json:"id,omitempty"` + Name string `json:"name"` + StorageCredentialInfo *DataSourceStorageCredentialStorageCredentialInfo `json:"storage_credential_info,omitempty"` +} diff --git a/bundle/internal/tf/schema/data_source_storage_credentials.go b/bundle/internal/tf/schema/data_source_storage_credentials.go new file mode 100644 index 000000000..153def357 --- /dev/null +++ b/bundle/internal/tf/schema/data_source_storage_credentials.go @@ -0,0 +1,8 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type DataSourceStorageCredentials struct { + Id string `json:"id,omitempty"` + Names []string `json:"names,omitempty"` +} diff --git a/bundle/internal/tf/schema/data_sources.go b/bundle/internal/tf/schema/data_sources.go index a88fa2e2e..698cbec93 100644 --- a/bundle/internal/tf/schema/data_sources.go +++ b/bundle/internal/tf/schema/data_sources.go @@ -39,6 +39,8 @@ type DataSources struct { SparkVersion map[string]any `json:"databricks_spark_version,omitempty"` SqlWarehouse map[string]any `json:"databricks_sql_warehouse,omitempty"` SqlWarehouses map[string]any `json:"databricks_sql_warehouses,omitempty"` + StorageCredential map[string]any `json:"databricks_storage_credential,omitempty"` + StorageCredentials map[string]any `json:"databricks_storage_credentials,omitempty"` Tables map[string]any `json:"databricks_tables,omitempty"` User map[string]any `json:"databricks_user,omitempty"` Views map[string]any `json:"databricks_views,omitempty"` @@ -84,6 +86,8 @@ func NewDataSources() *DataSources { SparkVersion: make(map[string]any), SqlWarehouse: make(map[string]any), SqlWarehouses: make(map[string]any), + StorageCredential: make(map[string]any), + StorageCredentials: make(map[string]any), Tables: make(map[string]any), User: make(map[string]any), Views: make(map[string]any), diff --git a/bundle/internal/tf/schema/resource_cluster.go b/bundle/internal/tf/schema/resource_cluster.go index 1a73b35a4..111efe8d5 100644 --- a/bundle/internal/tf/schema/resource_cluster.go +++ b/bundle/internal/tf/schema/resource_cluster.go @@ -90,7 +90,7 @@ type ResourceClusterInitScriptsDbfs struct { } type ResourceClusterInitScriptsFile struct { - Destination string `json:"destination,omitempty"` + Destination string `json:"destination"` } type ResourceClusterInitScriptsGcs struct { diff --git a/bundle/internal/tf/schema/resource_file.go b/bundle/internal/tf/schema/resource_file.go new file mode 100644 index 000000000..40a307c9b --- /dev/null +++ b/bundle/internal/tf/schema/resource_file.go @@ -0,0 +1,14 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type ResourceFile struct { + ContentBase64 string `json:"content_base64,omitempty"` + FileSize int `json:"file_size,omitempty"` + Id string `json:"id,omitempty"` + Md5 string `json:"md5,omitempty"` + ModificationTime string `json:"modification_time,omitempty"` + Path string `json:"path"` + RemoteFileModified bool `json:"remote_file_modified,omitempty"` + Source string `json:"source,omitempty"` +} diff --git a/bundle/internal/tf/schema/resource_job.go b/bundle/internal/tf/schema/resource_job.go index 96c0c2970..f8d08aefa 100644 --- a/bundle/internal/tf/schema/resource_job.go +++ b/bundle/internal/tf/schema/resource_job.go @@ -21,6 +21,7 @@ type ResourceJobDbtTask struct { ProfilesDirectory string `json:"profiles_directory,omitempty"` ProjectDirectory string `json:"project_directory,omitempty"` Schema string `json:"schema,omitempty"` + Source string `json:"source,omitempty"` WarehouseId string `json:"warehouse_id,omitempty"` } @@ -160,7 +161,7 @@ type ResourceJobJobClusterNewClusterInitScriptsS3 struct { } type ResourceJobJobClusterNewClusterInitScriptsVolumes struct { - Destination string `json:"destination,omitempty"` + Destination string `json:"destination"` } type ResourceJobJobClusterNewClusterInitScriptsWorkspace struct { @@ -347,7 +348,7 @@ type ResourceJobNewClusterInitScriptsS3 struct { } type ResourceJobNewClusterInitScriptsVolumes struct { - Destination string `json:"destination,omitempty"` + Destination string `json:"destination"` } type ResourceJobNewClusterInitScriptsWorkspace struct { @@ -482,6 +483,7 @@ type ResourceJobTaskDbtTask struct { ProfilesDirectory string `json:"profiles_directory,omitempty"` ProjectDirectory string `json:"project_directory,omitempty"` Schema string `json:"schema,omitempty"` + Source string `json:"source,omitempty"` WarehouseId string `json:"warehouse_id,omitempty"` } @@ -497,6 +499,371 @@ type ResourceJobTaskEmailNotifications struct { OnSuccess []string `json:"on_success,omitempty"` } +type ResourceJobTaskForEachTaskTaskConditionTask struct { + Left string `json:"left,omitempty"` + Op string `json:"op,omitempty"` + Right string `json:"right,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskDbtTask struct { + Catalog string `json:"catalog,omitempty"` + Commands []string `json:"commands"` + ProfilesDirectory string `json:"profiles_directory,omitempty"` + ProjectDirectory string `json:"project_directory,omitempty"` + Schema string `json:"schema,omitempty"` + Source string `json:"source,omitempty"` + WarehouseId string `json:"warehouse_id,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskDependsOn struct { + Outcome string `json:"outcome,omitempty"` + TaskKey string `json:"task_key"` +} + +type ResourceJobTaskForEachTaskTaskEmailNotifications struct { + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskHealth struct { + Rules []ResourceJobTaskForEachTaskTaskHealthRules `json:"rules,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskLibraryCran struct { + Package string `json:"package"` + Repo string `json:"repo,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskLibraryMaven struct { + Coordinates string `json:"coordinates"` + Exclusions []string `json:"exclusions,omitempty"` + Repo string `json:"repo,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskLibraryPypi struct { + Package string `json:"package"` + Repo string `json:"repo,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskLibrary struct { + Egg string `json:"egg,omitempty"` + Jar string `json:"jar,omitempty"` + Whl string `json:"whl,omitempty"` + Cran *ResourceJobTaskForEachTaskTaskLibraryCran `json:"cran,omitempty"` + Maven *ResourceJobTaskForEachTaskTaskLibraryMaven `json:"maven,omitempty"` + Pypi *ResourceJobTaskForEachTaskTaskLibraryPypi `json:"pypi,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterAutoscale struct { + MaxWorkers int `json:"max_workers,omitempty"` + MinWorkers int `json:"min_workers,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterAwsAttributes struct { + Availability string `json:"availability,omitempty"` + EbsVolumeCount int `json:"ebs_volume_count,omitempty"` + EbsVolumeSize int `json:"ebs_volume_size,omitempty"` + EbsVolumeType string `json:"ebs_volume_type,omitempty"` + FirstOnDemand int `json:"first_on_demand,omitempty"` + InstanceProfileArn string `json:"instance_profile_arn,omitempty"` + SpotBidPricePercent int `json:"spot_bid_price_percent,omitempty"` + ZoneId string `json:"zone_id,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterAzureAttributes struct { + Availability string `json:"availability,omitempty"` + FirstOnDemand int `json:"first_on_demand,omitempty"` + SpotBidMaxPrice int `json:"spot_bid_max_price,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterClusterLogConfDbfs struct { + Destination string `json:"destination"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterClusterLogConfS3 struct { + CannedAcl string `json:"canned_acl,omitempty"` + Destination string `json:"destination"` + EnableEncryption bool `json:"enable_encryption,omitempty"` + EncryptionType string `json:"encryption_type,omitempty"` + Endpoint string `json:"endpoint,omitempty"` + KmsKey string `json:"kms_key,omitempty"` + Region string `json:"region,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterClusterLogConf struct { + Dbfs *ResourceJobTaskForEachTaskTaskNewClusterClusterLogConfDbfs `json:"dbfs,omitempty"` + S3 *ResourceJobTaskForEachTaskTaskNewClusterClusterLogConfS3 `json:"s3,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterClusterMountInfoNetworkFilesystemInfo struct { + MountOptions string `json:"mount_options,omitempty"` + ServerAddress string `json:"server_address"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterClusterMountInfo struct { + LocalMountDirPath string `json:"local_mount_dir_path"` + RemoteMountDirPath string `json:"remote_mount_dir_path,omitempty"` + NetworkFilesystemInfo *ResourceJobTaskForEachTaskTaskNewClusterClusterMountInfoNetworkFilesystemInfo `json:"network_filesystem_info,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterDockerImageBasicAuth struct { + Password string `json:"password"` + Username string `json:"username"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterDockerImage struct { + Url string `json:"url"` + BasicAuth *ResourceJobTaskForEachTaskTaskNewClusterDockerImageBasicAuth `json:"basic_auth,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterGcpAttributes struct { + Availability string `json:"availability,omitempty"` + BootDiskSize int `json:"boot_disk_size,omitempty"` + GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` + UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` + ZoneId string `json:"zone_id,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterInitScriptsAbfss struct { + Destination string `json:"destination"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterInitScriptsDbfs struct { + Destination string `json:"destination"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterInitScriptsFile struct { + Destination string `json:"destination"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterInitScriptsGcs struct { + Destination string `json:"destination"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterInitScriptsS3 struct { + CannedAcl string `json:"canned_acl,omitempty"` + Destination string `json:"destination"` + EnableEncryption bool `json:"enable_encryption,omitempty"` + EncryptionType string `json:"encryption_type,omitempty"` + Endpoint string `json:"endpoint,omitempty"` + KmsKey string `json:"kms_key,omitempty"` + Region string `json:"region,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterInitScriptsVolumes struct { + Destination string `json:"destination"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterInitScriptsWorkspace struct { + Destination string `json:"destination"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterInitScripts struct { + Abfss *ResourceJobTaskForEachTaskTaskNewClusterInitScriptsAbfss `json:"abfss,omitempty"` + Dbfs *ResourceJobTaskForEachTaskTaskNewClusterInitScriptsDbfs `json:"dbfs,omitempty"` + File *ResourceJobTaskForEachTaskTaskNewClusterInitScriptsFile `json:"file,omitempty"` + Gcs *ResourceJobTaskForEachTaskTaskNewClusterInitScriptsGcs `json:"gcs,omitempty"` + S3 *ResourceJobTaskForEachTaskTaskNewClusterInitScriptsS3 `json:"s3,omitempty"` + Volumes *ResourceJobTaskForEachTaskTaskNewClusterInitScriptsVolumes `json:"volumes,omitempty"` + Workspace *ResourceJobTaskForEachTaskTaskNewClusterInitScriptsWorkspace `json:"workspace,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterWorkloadTypeClients struct { + Jobs bool `json:"jobs,omitempty"` + Notebooks bool `json:"notebooks,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewClusterWorkloadType struct { + Clients *ResourceJobTaskForEachTaskTaskNewClusterWorkloadTypeClients `json:"clients,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNewCluster struct { + ApplyPolicyDefaultValues bool `json:"apply_policy_default_values,omitempty"` + AutoterminationMinutes int `json:"autotermination_minutes,omitempty"` + ClusterId string `json:"cluster_id,omitempty"` + ClusterName string `json:"cluster_name,omitempty"` + CustomTags map[string]string `json:"custom_tags,omitempty"` + DataSecurityMode string `json:"data_security_mode,omitempty"` + DriverInstancePoolId string `json:"driver_instance_pool_id,omitempty"` + DriverNodeTypeId string `json:"driver_node_type_id,omitempty"` + EnableElasticDisk bool `json:"enable_elastic_disk,omitempty"` + EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"` + IdempotencyToken string `json:"idempotency_token,omitempty"` + InstancePoolId string `json:"instance_pool_id,omitempty"` + NodeTypeId string `json:"node_type_id,omitempty"` + NumWorkers int `json:"num_workers"` + PolicyId string `json:"policy_id,omitempty"` + RuntimeEngine string `json:"runtime_engine,omitempty"` + SingleUserName string `json:"single_user_name,omitempty"` + SparkConf map[string]string `json:"spark_conf,omitempty"` + SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"` + SparkVersion string `json:"spark_version"` + SshPublicKeys []string `json:"ssh_public_keys,omitempty"` + Autoscale *ResourceJobTaskForEachTaskTaskNewClusterAutoscale `json:"autoscale,omitempty"` + AwsAttributes *ResourceJobTaskForEachTaskTaskNewClusterAwsAttributes `json:"aws_attributes,omitempty"` + AzureAttributes *ResourceJobTaskForEachTaskTaskNewClusterAzureAttributes `json:"azure_attributes,omitempty"` + ClusterLogConf *ResourceJobTaskForEachTaskTaskNewClusterClusterLogConf `json:"cluster_log_conf,omitempty"` + ClusterMountInfo []ResourceJobTaskForEachTaskTaskNewClusterClusterMountInfo `json:"cluster_mount_info,omitempty"` + DockerImage *ResourceJobTaskForEachTaskTaskNewClusterDockerImage `json:"docker_image,omitempty"` + GcpAttributes *ResourceJobTaskForEachTaskTaskNewClusterGcpAttributes `json:"gcp_attributes,omitempty"` + InitScripts []ResourceJobTaskForEachTaskTaskNewClusterInitScripts `json:"init_scripts,omitempty"` + WorkloadType *ResourceJobTaskForEachTaskTaskNewClusterWorkloadType `json:"workload_type,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNotebookTask struct { + BaseParameters map[string]string `json:"base_parameters,omitempty"` + NotebookPath string `json:"notebook_path"` + Source string `json:"source,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskNotificationSettings struct { + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForCanceledRuns bool `json:"no_alert_for_canceled_runs,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskPipelineTask struct { + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` +} + +type ResourceJobTaskForEachTaskTaskPythonWheelTask struct { + EntryPoint string `json:"entry_point,omitempty"` + NamedParameters map[string]string `json:"named_parameters,omitempty"` + PackageName string `json:"package_name,omitempty"` + Parameters []string `json:"parameters,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskRunJobTask struct { + JobId int `json:"job_id"` + JobParameters map[string]string `json:"job_parameters,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskSparkJarTask struct { + JarUri string `json:"jar_uri,omitempty"` + MainClassName string `json:"main_class_name,omitempty"` + Parameters []string `json:"parameters,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskSparkPythonTask struct { + Parameters []string `json:"parameters,omitempty"` + PythonFile string `json:"python_file"` + Source string `json:"source,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskSparkSubmitTask struct { + Parameters []string `json:"parameters,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskSqlTaskAlertSubscriptions struct { + DestinationId string `json:"destination_id,omitempty"` + UserName string `json:"user_name,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskSqlTaskAlert struct { + AlertId string `json:"alert_id"` + PauseSubscriptions bool `json:"pause_subscriptions,omitempty"` + Subscriptions []ResourceJobTaskForEachTaskTaskSqlTaskAlertSubscriptions `json:"subscriptions,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskSqlTaskDashboardSubscriptions struct { + DestinationId string `json:"destination_id,omitempty"` + UserName string `json:"user_name,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskSqlTaskDashboard struct { + CustomSubject string `json:"custom_subject,omitempty"` + DashboardId string `json:"dashboard_id"` + PauseSubscriptions bool `json:"pause_subscriptions,omitempty"` + Subscriptions []ResourceJobTaskForEachTaskTaskSqlTaskDashboardSubscriptions `json:"subscriptions,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskSqlTaskFile struct { + Path string `json:"path"` + Source string `json:"source,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskSqlTaskQuery struct { + QueryId string `json:"query_id"` +} + +type ResourceJobTaskForEachTaskTaskSqlTask struct { + Parameters map[string]string `json:"parameters,omitempty"` + WarehouseId string `json:"warehouse_id,omitempty"` + Alert *ResourceJobTaskForEachTaskTaskSqlTaskAlert `json:"alert,omitempty"` + Dashboard *ResourceJobTaskForEachTaskTaskSqlTaskDashboard `json:"dashboard,omitempty"` + File *ResourceJobTaskForEachTaskTaskSqlTaskFile `json:"file,omitempty"` + Query *ResourceJobTaskForEachTaskTaskSqlTaskQuery `json:"query,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceeded struct { + Id string `json:"id,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnFailure struct { + Id string `json:"id,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStart struct { + Id string `json:"id,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnSuccess struct { + Id string `json:"id,omitempty"` +} + +type ResourceJobTaskForEachTaskTaskWebhookNotifications struct { + OnDurationWarningThresholdExceeded []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"` + OnStart []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnSuccess []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"` +} + +type ResourceJobTaskForEachTaskTask struct { + ComputeKey string `json:"compute_key,omitempty"` + Description string `json:"description,omitempty"` + ExistingClusterId string `json:"existing_cluster_id,omitempty"` + JobClusterKey string `json:"job_cluster_key,omitempty"` + MaxRetries int `json:"max_retries,omitempty"` + MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` + RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` + RunIf string `json:"run_if,omitempty"` + TaskKey string `json:"task_key,omitempty"` + TimeoutSeconds int `json:"timeout_seconds,omitempty"` + ConditionTask *ResourceJobTaskForEachTaskTaskConditionTask `json:"condition_task,omitempty"` + DbtTask *ResourceJobTaskForEachTaskTaskDbtTask `json:"dbt_task,omitempty"` + DependsOn []ResourceJobTaskForEachTaskTaskDependsOn `json:"depends_on,omitempty"` + EmailNotifications *ResourceJobTaskForEachTaskTaskEmailNotifications `json:"email_notifications,omitempty"` + Health *ResourceJobTaskForEachTaskTaskHealth `json:"health,omitempty"` + Library []ResourceJobTaskForEachTaskTaskLibrary `json:"library,omitempty"` + NewCluster *ResourceJobTaskForEachTaskTaskNewCluster `json:"new_cluster,omitempty"` + NotebookTask *ResourceJobTaskForEachTaskTaskNotebookTask `json:"notebook_task,omitempty"` + NotificationSettings *ResourceJobTaskForEachTaskTaskNotificationSettings `json:"notification_settings,omitempty"` + PipelineTask *ResourceJobTaskForEachTaskTaskPipelineTask `json:"pipeline_task,omitempty"` + PythonWheelTask *ResourceJobTaskForEachTaskTaskPythonWheelTask `json:"python_wheel_task,omitempty"` + RunJobTask *ResourceJobTaskForEachTaskTaskRunJobTask `json:"run_job_task,omitempty"` + SparkJarTask *ResourceJobTaskForEachTaskTaskSparkJarTask `json:"spark_jar_task,omitempty"` + SparkPythonTask *ResourceJobTaskForEachTaskTaskSparkPythonTask `json:"spark_python_task,omitempty"` + SparkSubmitTask *ResourceJobTaskForEachTaskTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` + SqlTask *ResourceJobTaskForEachTaskTaskSqlTask `json:"sql_task,omitempty"` + WebhookNotifications *ResourceJobTaskForEachTaskTaskWebhookNotifications `json:"webhook_notifications,omitempty"` +} + +type ResourceJobTaskForEachTask struct { + Concurrency int `json:"concurrency,omitempty"` + Inputs string `json:"inputs"` + Task *ResourceJobTaskForEachTaskTask `json:"task,omitempty"` +} + type ResourceJobTaskHealthRules struct { Metric string `json:"metric,omitempty"` Op string `json:"op,omitempty"` @@ -630,7 +997,7 @@ type ResourceJobTaskNewClusterInitScriptsS3 struct { } type ResourceJobTaskNewClusterInitScriptsVolumes struct { - Destination string `json:"destination,omitempty"` + Destination string `json:"destination"` } type ResourceJobTaskNewClusterInitScriptsWorkspace struct { @@ -758,7 +1125,8 @@ type ResourceJobTaskSqlTaskDashboard struct { } type ResourceJobTaskSqlTaskFile struct { - Path string `json:"path"` + Path string `json:"path"` + Source string `json:"source,omitempty"` } type ResourceJobTaskSqlTaskQuery struct { @@ -812,6 +1180,7 @@ type ResourceJobTask struct { DbtTask *ResourceJobTaskDbtTask `json:"dbt_task,omitempty"` DependsOn []ResourceJobTaskDependsOn `json:"depends_on,omitempty"` EmailNotifications *ResourceJobTaskEmailNotifications `json:"email_notifications,omitempty"` + ForEachTask *ResourceJobTaskForEachTask `json:"for_each_task,omitempty"` Health *ResourceJobTaskHealth `json:"health,omitempty"` Library []ResourceJobTaskLibrary `json:"library,omitempty"` NewCluster *ResourceJobTaskNewCluster `json:"new_cluster,omitempty"` diff --git a/bundle/internal/tf/schema/resource_pipeline.go b/bundle/internal/tf/schema/resource_pipeline.go index 8737985c9..3cad9ac41 100644 --- a/bundle/internal/tf/schema/resource_pipeline.go +++ b/bundle/internal/tf/schema/resource_pipeline.go @@ -78,7 +78,7 @@ type ResourcePipelineClusterInitScriptsS3 struct { } type ResourcePipelineClusterInitScriptsVolumes struct { - Destination string `json:"destination,omitempty"` + Destination string `json:"destination"` } type ResourcePipelineClusterInitScriptsWorkspace struct { diff --git a/bundle/internal/tf/schema/resource_vector_search_endpoint.go b/bundle/internal/tf/schema/resource_vector_search_endpoint.go new file mode 100644 index 000000000..392c78611 --- /dev/null +++ b/bundle/internal/tf/schema/resource_vector_search_endpoint.go @@ -0,0 +1,16 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type ResourceVectorSearchEndpoint struct { + CreationTimestamp int `json:"creation_timestamp,omitempty"` + Creator string `json:"creator,omitempty"` + EndpointId string `json:"endpoint_id,omitempty"` + EndpointStatus []any `json:"endpoint_status,omitempty"` + EndpointType string `json:"endpoint_type"` + Id string `json:"id,omitempty"` + LastUpdatedTimestamp int `json:"last_updated_timestamp,omitempty"` + LastUpdatedUser string `json:"last_updated_user,omitempty"` + Name string `json:"name"` + NumIndexes int `json:"num_indexes,omitempty"` +} diff --git a/bundle/internal/tf/schema/resource_volume.go b/bundle/internal/tf/schema/resource_volume.go index 77d499a69..4a82d8e85 100644 --- a/bundle/internal/tf/schema/resource_volume.go +++ b/bundle/internal/tf/schema/resource_volume.go @@ -10,5 +10,6 @@ type ResourceVolume struct { Owner string `json:"owner,omitempty"` SchemaName string `json:"schema_name"` StorageLocation string `json:"storage_location,omitempty"` + VolumePath string `json:"volume_path,omitempty"` VolumeType string `json:"volume_type"` } diff --git a/bundle/internal/tf/schema/resources.go b/bundle/internal/tf/schema/resources.go index 57f11d4b4..4cc81e7e7 100644 --- a/bundle/internal/tf/schema/resources.go +++ b/bundle/internal/tf/schema/resources.go @@ -19,6 +19,7 @@ type Resources struct { Directory map[string]any `json:"databricks_directory,omitempty"` Entitlements map[string]any `json:"databricks_entitlements,omitempty"` ExternalLocation map[string]any `json:"databricks_external_location,omitempty"` + File map[string]any `json:"databricks_file,omitempty"` GitCredential map[string]any `json:"databricks_git_credential,omitempty"` GlobalInitScript map[string]any `json:"databricks_global_init_script,omitempty"` Grant map[string]any `json:"databricks_grant,omitempty"` @@ -82,6 +83,7 @@ type Resources struct { User map[string]any `json:"databricks_user,omitempty"` UserInstanceProfile map[string]any `json:"databricks_user_instance_profile,omitempty"` UserRole map[string]any `json:"databricks_user_role,omitempty"` + VectorSearchEndpoint map[string]any `json:"databricks_vector_search_endpoint,omitempty"` Volume map[string]any `json:"databricks_volume,omitempty"` WorkspaceConf map[string]any `json:"databricks_workspace_conf,omitempty"` WorkspaceFile map[string]any `json:"databricks_workspace_file,omitempty"` @@ -105,6 +107,7 @@ func NewResources() *Resources { Directory: make(map[string]any), Entitlements: make(map[string]any), ExternalLocation: make(map[string]any), + File: make(map[string]any), GitCredential: make(map[string]any), GlobalInitScript: make(map[string]any), Grant: make(map[string]any), @@ -168,6 +171,7 @@ func NewResources() *Resources { User: make(map[string]any), UserInstanceProfile: make(map[string]any), UserRole: make(map[string]any), + VectorSearchEndpoint: make(map[string]any), Volume: make(map[string]any), WorkspaceConf: make(map[string]any), WorkspaceFile: make(map[string]any), diff --git a/bundle/internal/tf/schema/root.go b/bundle/internal/tf/schema/root.go index 963ae1460..f0253c285 100644 --- a/bundle/internal/tf/schema/root.go +++ b/bundle/internal/tf/schema/root.go @@ -25,7 +25,7 @@ func NewRoot() *Root { "required_providers": map[string]interface{}{ "databricks": map[string]interface{}{ "source": "databricks/databricks", - "version": "1.36.2", + "version": "1.37.0", }, }, },