From 5ef341430aa7e165e10c611054e3af678571ce92 Mon Sep 17 00:00:00 2001 From: Vuong Date: Fri, 28 Jul 2023 18:05:44 +0100 Subject: [PATCH 01/22] first draft --- catalog/resource_connection.go | 100 ++++++++ catalog/resource_connection_test.go | 315 +++++++++++++++++++++++++ docs/resources/connection.md | 49 ++++ internal/acceptance/connection_test.go | 23 ++ provider/provider.go | 1 + 5 files changed, 488 insertions(+) create mode 100644 catalog/resource_connection.go create mode 100644 catalog/resource_connection_test.go create mode 100644 docs/resources/connection.md create mode 100644 internal/acceptance/connection_test.go diff --git a/catalog/resource_connection.go b/catalog/resource_connection.go new file mode 100644 index 0000000000..0f59bf4947 --- /dev/null +++ b/catalog/resource_connection.go @@ -0,0 +1,100 @@ +package catalog + +import ( + "context" + + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/databricks/terraform-provider-databricks/common" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +// This structure contains the fields of catalog.UpdateConnection and catalog.CreateConnection +// We need to create this because we need Owner, FullNameArg, SchemaName and CatalogName which aren't present in a single of them. +// We also need to annotate tf:"computed" for the Owner field. +type ConnectionInfo struct { + // User-provided free-form text description. + Comment string `json:"comment,omitempty" tf:"force_new"` + // The type of connection. + ConnectionType string `json:"connection_type" tf:"force_new"` + // Name of the connection. + Name string `json:"name"` + // Name of the connection. + NameArg string `json:"-" url:"-"` + // A map of key-value properties attached to the securable. + OptionsKvpairs map[string]string `json:"options_kvpairs" tf:"alias:options,sensitive"` + // Username of current owner of the connection. + Owner string `json:"owner,omitempty" tf:"force_new"` + // An object containing map of key-value properties attached to the + // connection. + PropertiesKvpairs map[string]string `json:"properties_kvpairs,omitempty" tf:"alias:properties,force_new"` + // If the connection is read only. + ReadOnly bool `json:"read_only,omitempty" tf:"force_new"` +} + +func ResourceConnection() *schema.Resource { + s := common.StructToSchema(ConnectionInfo{}, + func(m map[string]*schema.Schema) map[string]*schema.Schema { + return m + }) + return common.Resource{ + Schema: s, + Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + var createConnectionRequest catalog.CreateConnection + var alias ConnectionInfo + common.DataToStructPointer(d, s, &createConnectionRequest) + common.DataToStructPointer(d, s, &alias) + //workaround as cannot set tf:"alias" for the Go SDK struct + createConnectionRequest.OptionsKvpairs = alias.OptionsKvpairs + createConnectionRequest.PropertiesKvpairs = alias.PropertiesKvpairs + conn, err := w.Connections.Create(ctx, createConnectionRequest) + if err != nil { + return err + } + d.SetId(conn.Name) + return nil + }, + Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + conn, err := w.Connections.GetByNameArg(ctx, d.Id()) + if err != nil { + return err + } + return common.StructToData(conn, s, d) + }, + Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + var updateConnectionRequest catalog.UpdateConnection + var alias ConnectionInfo + common.DataToStructPointer(d, s, &updateConnectionRequest) + common.DataToStructPointer(d, s, &alias) + //workaround as cannot set tf:"alias" for the Go SDK struct + updateConnectionRequest.OptionsKvpairs = alias.OptionsKvpairs + updateConnectionRequest.NameArg = d.Id() + conn, err := w.Connections.Update(ctx, updateConnectionRequest) + if err != nil { + return err + } + // We need to update the resource Id because Name is updatable and FullName consists of Name, + // So if we don't update the field then the requests would be made to old FullName which doesn't exists. + d.SetId(conn.Name) + return nil + }, + Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + return w.Connections.DeleteByNameArg(ctx, d.Id()) + }, + }.ToResource() +} diff --git a/catalog/resource_connection_test.go b/catalog/resource_connection_test.go new file mode 100644 index 0000000000..082c8e36c9 --- /dev/null +++ b/catalog/resource_connection_test.go @@ -0,0 +1,315 @@ +package catalog + +import ( + "net/http" + "testing" + + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/databricks/terraform-provider-databricks/qa" + "github.com/stretchr/testify/assert" +) + +func TestConnectionsCornerCases(t *testing.T) { + qa.ResourceCornerCases(t, ResourceExternalLocation()) +} + +func TestConnectionsCreate(t *testing.T) { + d, err := qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: http.MethodPost, + Resource: "/api/2.1/unity-catalog/connections", + ExpectedRequest: catalog.CreateConnection{ + Name: "testConnectionName", + ConnectionType: catalog.ConnectionType("testConnectionType"), + Comment: "This is a test comment.", + OptionsKvpairs: map[string]string{ + "host": "test.com", + }, + PropertiesKvpairs: map[string]string{ + "purpose": "testing", + }, + Owner: "InitialOwner", + }, + Response: catalog.ConnectionInfo{ + Name: "testConnectionName", + ConnectionType: catalog.ConnectionType("testConnectionType"), + Comment: "This is a test comment.", + FullName: "testConnectionName", + Owner: "InitialOwner", + OptionsKvpairs: map[string]string{ + "host": "test.com", + }, + PropertiesKvpairs: map[string]string{ + "purpose": "testing", + }, + }, + }, + { + Method: http.MethodGet, + Resource: "/api/2.1/unity-catalog/connections/testConnectionName?", + Response: catalog.ConnectionInfo{ + Name: "testConnectionName", + ConnectionType: catalog.ConnectionType("testConnectionType"), + Comment: "This is a test comment.", + FullName: "testConnectionName", + Owner: "InitialOwner", + OptionsKvpairs: map[string]string{ + "host": "test.com", + }, + PropertiesKvpairs: map[string]string{ + "purpose": "testing", + }, + }, + }, + }, + Resource: ResourceConnection(), + Create: true, + HCL: ` + name = "testConnectionName" + connection_type = "testConnectionType" + options = { + host = "test.com" + } + properties = { + purpose = "testing" + } + comment = "This is a test comment." + owner = "InitialOwner" + `, + }.Apply(t) + assert.NoError(t, err) + assert.Equal(t, "testConnectionName", d.Get("name")) + assert.Equal(t, "testConnectionType", d.Get("connection_type")) + assert.Equal(t, "This is a test comment.", d.Get("comment")) + assert.Equal(t, map[string]interface{}{"host": "test.com"}, d.Get("options")) + assert.Equal(t, map[string]interface{}{"purpose": "testing"}, d.Get("properties")) +} + +func TestConnectionsCreate_Error(t *testing.T) { + _, err := qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: http.MethodPost, + Resource: "/api/2.1/unity-catalog/connections", + ExpectedRequest: catalog.CreateConnection{ + Name: "testConnectionName", + ConnectionType: catalog.ConnectionType("testConnectionType"), + Comment: "This is a test comment.", + OptionsKvpairs: map[string]string{ + "host": "test.com", + }, + Owner: "testOwner", + }, + Response: apierr.APIErrorBody{ + ErrorCode: "SERVER_ERROR", + Message: "Something unexpected happened", + }, + Status: 500, + }, + }, + Resource: ResourceConnection(), + Create: true, + HCL: ` + name = "testConnectionName" + owner = "testOwner" + connection_type = "testConnectionType" + options = { + host = "test.com" + } + comment = "This is a test comment." + `, + }.Apply(t) + qa.AssertErrorStartsWith(t, err, "Something unexpected") +} + +func TestConnectionsRead(t *testing.T) { + d, err := qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: http.MethodGet, + Resource: "/api/2.1/unity-catalog/connections/testConnectionName?", + Response: catalog.ConnectionInfo{ + Name: "testConnectionName", + ConnectionType: catalog.ConnectionType("testConnectionType"), + Comment: "This is a test comment.", + FullName: "testConnectionName", + OptionsKvpairs: map[string]string{ + "host": "test.com", + }, + }, + }, + }, + Resource: ResourceConnection(), + Read: true, + ID: "testConnectionName", + HCL: ` + name = "testConnectionName" + connection_type = "testConnectionType" + options = { + host = "test.com" + } + comment = "This is a test comment." + `, + }.Apply(t) + assert.NoError(t, err) + assert.Equal(t, "testConnectionName", d.Get("name")) + assert.Equal(t, "testConnectionType", d.Get("connection_type")) + assert.Equal(t, "This is a test comment.", d.Get("comment")) + assert.Equal(t, map[string]interface{}{"host": "test.com"}, d.Get("options")) +} + +func TestResourceConnectionRead_Error(t *testing.T) { + d, err := qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.1/unity-catalog/connections/testConnectionName?", + Response: apierr.APIErrorBody{ + ErrorCode: "INVALID_REQUEST", + Message: "Internal error happened", + }, + Status: 400, + }, + }, + Resource: ResourceConnection(), + Read: true, + ID: "testConnectionName", + }.Apply(t) + qa.AssertErrorStartsWith(t, err, "Internal error happened") + assert.Equal(t, "testConnectionName", d.Id(), "Id should not be empty for error reads") +} + +func TestConnectionsUpdate(t *testing.T) { + d, err := qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: http.MethodGet, + Resource: "/api/2.1/unity-catalog/connections/testConnectionName?", + Response: catalog.ConnectionInfo{ + Name: "testConnectionName", + ConnectionType: catalog.ConnectionType("testConnectionType"), + Comment: "testComment", + }, + }, + { + Method: http.MethodPatch, + Resource: "/api/2.1/unity-catalog/connections/testConnectionName", + ExpectedRequest: catalog.UpdateConnection{ + Name: "testConnectionNameNew", + OptionsKvpairs: map[string]string{ + "host": "test.com", + }, + }, + Response: catalog.ConnectionInfo{ + Name: "testConnectionNameNew", + ConnectionType: catalog.ConnectionType("testConnectionType"), + Comment: "testComment", + }, + }, + { + Method: http.MethodGet, + Resource: "/api/2.1/unity-catalog/connections/testConnectionNameNew?", + Response: catalog.ConnectionInfo{ + Name: "testConnectionNameNew", + ConnectionType: catalog.ConnectionType("testConnectionType"), + Comment: "testComment", + }, + }, + }, + Resource: ResourceConnection(), + Update: true, + ID: "testConnectionName", + InstanceState: map[string]string{ + "connection_type": "testConnectionType", + "comment": "testComment", + }, + HCL: ` + name = "testConnectionNameNew" + connection_type = "testConnectionType" + comment = "testComment" + options = { + host = "test.com" + } + `, + }.Apply(t) + assert.NoError(t, err) + assert.Equal(t, "testConnectionNameNew", d.Get("name")) + assert.Equal(t, "testConnectionType", d.Get("connection_type")) + assert.Equal(t, "testComment", d.Get("comment")) +} + +func TestConnectionUpdate_Error(t *testing.T) { + _, err := qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: http.MethodPatch, + Resource: "/api/2.1/unity-catalog/connections/testConnectionName", + ExpectedRequest: catalog.UpdateConnection{ + Name: "testConnectionNameNew", + OptionsKvpairs: map[string]string{ + "host": "test.com", + }, + }, + Response: apierr.APIErrorBody{ + ErrorCode: "SERVER_ERROR", + Message: "Something unexpected happened", + }, + Status: 500, + }, + }, + Resource: ResourceConnection(), + Update: true, + ID: "testConnectionName", + InstanceState: map[string]string{ + "connection_type": "testConnectionType", + "comment": "testComment", + }, + HCL: ` + name = "testConnectionNameNew" + connection_type = "testConnectionType" + options = { + host = "test.com" + } + comment = "testComment" + `, + }.Apply(t) + qa.AssertErrorStartsWith(t, err, "Something unexpected") +} + +func TestConnectionDelete(t *testing.T) { + d, err := qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: http.MethodDelete, + Resource: "/api/2.1/unity-catalog/connections/testConnectionName?", + }, + }, + Resource: ResourceConnection(), + Delete: true, + ID: "testConnectionName", + }.Apply(t) + assert.NoError(t, err) + assert.Equal(t, "testConnectionName", d.Id()) +} + +func TestConnectionDelete_Error(t *testing.T) { + qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: http.MethodDelete, + Resource: "/api/2.1/unity-catalog/connections/testConnectionName?", + Response: apierr.APIErrorBody{ + ErrorCode: "INVALID_STATE", + Message: "Something went wrong", + }, + Status: 400, + }, + }, + Resource: ResourceConnection(), + Delete: true, + Removed: true, + ID: "testConnectionName", + }.ExpectError(t, "Something went wrong") +} diff --git a/docs/resources/connection.md b/docs/resources/connection.md new file mode 100644 index 0000000000..80f59880b0 --- /dev/null +++ b/docs/resources/connection.md @@ -0,0 +1,49 @@ +--- +subcategory: "Unity Catalog" +--- +# databricks_connection (Resource) + +Lakehouse Federation is the query federation platform for Databricks. Databricks uses Unity Catalog to manage query federation. To make a dataset available for read-only querying using Lakehouse Federation, you create the following: + +- A connection, a securable object in Unity Catalog that specifies a path and credentials for accessing an external database system. +- A foreign [catalog](catalog.md) + +This resource manages connections in Unity Catalog + +## Example Usage + +```hcl +resource "databricks_connection" "mysql" { + name = "mysql_connection" + connection_type = "MYSQL" + comment = "this is a connection to mysql db" + options = { + host = "test.mysql.database.azure.com" + port = "3306" + user = "user" + password = "password" + } + properties = { + purpose = "testing" + } +} +``` + +## Argument Reference + +The following arguments are supported: + +- `name` - Name of the Connection +- `connection_type` - Connection type. `MYSQL` `POSTGRESQL` `SNOWFLAKE` `REDSHIFT` `SQLDW` `SQLSERVER` or `DATABRICKS`. +- `options` - The key value of options required by the connection, e.g. `host`, `port`, `user` and `password`. +- `owner` - (Optional) Name of the connection owner. +- `properties` - (Optional) Free-form connection properties. +- `comment` - (Optional) Free-form text. + +## Import + +This resource can be imported by `name` + +```bash +terraform import databricks_connection.this +``` diff --git a/internal/acceptance/connection_test.go b/internal/acceptance/connection_test.go new file mode 100644 index 0000000000..85299253e2 --- /dev/null +++ b/internal/acceptance/connection_test.go @@ -0,0 +1,23 @@ +package acceptance + +import ( + "testing" +) + +func TestUcAccConnectionsResourceFullLifecycle(t *testing.T) { + unityWorkspaceLevel(t, step{ + Template: ` + resource "databricks_volume" "this" { + name = "name-{var.STICKY_RANDOM}" + comment = "comment-{var.STICKY_RANDOM}" + connection_type = "MYSQL" + comment = "this is a connection to mysql db" + options = { + host = "test.mysql.database.azure.com" + port = "3306" + user = "user" + password = "password" + } + }`, + }) +} diff --git a/provider/provider.go b/provider/provider.go index 0fd95cd5ad..105787ed9c 100644 --- a/provider/provider.go +++ b/provider/provider.go @@ -91,6 +91,7 @@ func DatabricksProvider() *schema.Provider { "databricks_azure_blob_mount": storage.ResourceAzureBlobMount(), "databricks_catalog": catalog.ResourceCatalog(), "databricks_catalog_workspace_binding": catalog.ResourceCatalogWorkspaceBinding(), + "databricks_connection": catalog.ResourceConnection(), "databricks_cluster": clusters.ResourceCluster(), "databricks_cluster_policy": policies.ResourceClusterPolicy(), "databricks_dbfs_file": storage.ResourceDbfsFile(), From a007f4a2d2e221b147e2928f2c20ca8d61873198 Mon Sep 17 00:00:00 2001 From: Vuong Date: Fri, 28 Jul 2023 18:09:01 +0100 Subject: [PATCH 02/22] add foreign catalog --- catalog/resource_catalog.go | 19 ++++++++++--------- docs/resources/catalog.md | 1 + go.mod | 2 +- go.sum | 2 ++ 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/catalog/resource_catalog.go b/catalog/resource_catalog.go index 9a89352a57..7353c0bafa 100644 --- a/catalog/resource_catalog.go +++ b/catalog/resource_catalog.go @@ -27,15 +27,16 @@ func ucDirectoryPathSlashAndEmptySuppressDiff(k, old, new string, d *schema.Reso } type CatalogInfo struct { - Name string `json:"name"` - Comment string `json:"comment,omitempty"` - StorageRoot string `json:"storage_root,omitempty" tf:"force_new"` - ProviderName string `json:"provider_name,omitempty" tf:"force_new,conflicts:storage_root"` - ShareName string `json:"share_name,omitempty" tf:"force_new,conflicts:storage_root"` - Properties map[string]string `json:"properties,omitempty"` - Owner string `json:"owner,omitempty" tf:"computed"` - IsolationMode string `json:"isolation_mode,omitempty" tf:"computed"` - MetastoreID string `json:"metastore_id,omitempty" tf:"computed"` + Name string `json:"name"` + Comment string `json:"comment,omitempty"` + StorageRoot string `json:"storage_root,omitempty" tf:"force_new"` + ProviderName string `json:"provider_name,omitempty" tf:"force_new,conflicts:storage_root"` + ShareName string `json:"share_name,omitempty" tf:"force_new,conflicts:storage_root"` + ConnectionName string `json:"connection_name,omitempty" tf:"force_new,conflicts:storage_root"` + Properties map[string]string `json:"properties,omitempty"` + Owner string `json:"owner,omitempty" tf:"computed"` + IsolationMode string `json:"isolation_mode,omitempty" tf:"computed"` + MetastoreID string `json:"metastore_id,omitempty" tf:"computed"` } func ResourceCatalog() *schema.Resource { diff --git a/docs/resources/catalog.md b/docs/resources/catalog.md index b070fa36bc..3e0434edc1 100644 --- a/docs/resources/catalog.md +++ b/docs/resources/catalog.md @@ -28,6 +28,7 @@ The following arguments are required: * `storage_root` - (Optional) Managed location of the catalog. Location in cloud storage where data for managed tables will be stored. If not specified, the location will default to the metastore root location. Change forces creation of a new resource. * `provider_name` - (Optional) For Delta Sharing Catalogs: the name of the delta sharing provider. Change forces creation of a new resource. * `share_name` - (Optional) For Delta Sharing Catalogs: the name of the share under the share provider. Change forces creation of a new resource. +* `connection_name` - (Optional) For Foreign Catalogs: the name of the connection to an external data source. Changes forces creation of a new resource. * `owner` - (Optional) Username/groupname/sp application_id of the catalog owner. * `isolation_mode` - (Optional) Whether the catalog is accessible from all workspaces or a specific set of workspaces. Can be `ISOLATED` or `OPEN`. Setting the catalog to `ISOLATED` will automatically allow access from the current workspace. * `comment` - (Optional) User-supplied free-form text. diff --git a/go.mod b/go.mod index b8261a0306..f3107f5d3c 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/databricks/terraform-provider-databricks go 1.19 require ( - github.com/databricks/databricks-sdk-go v0.13.0 + github.com/databricks/databricks-sdk-go v0.14.1 github.com/golang-jwt/jwt/v4 v4.5.0 github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 github.com/hashicorp/hcl v1.0.0 diff --git a/go.sum b/go.sum index 53257ae08e..4167dc6a54 100644 --- a/go.sum +++ b/go.sum @@ -51,6 +51,8 @@ github.com/databricks/databricks-sdk-go v0.12.0 h1:VgMJpvEiyRRrJ0mQx22Rkc73zjxUe github.com/databricks/databricks-sdk-go v0.12.0/go.mod h1:h/oWnnfWcJQAotAhZS/GMnlcaE/8WhuZ5Vj7el/6Gn8= github.com/databricks/databricks-sdk-go v0.13.0 h1:Npi4laUUmcOPDPdJf2ZMGFUtybpf4LK6n5NQY56Ya2Q= github.com/databricks/databricks-sdk-go v0.13.0/go.mod h1:0iuEtPIoD6oqw7OuFbPskhlEryt2FPH+Ies1UYiiDy8= +github.com/databricks/databricks-sdk-go v0.14.1 h1:s9x18c2i6XbJxem6zKdTrrwEUXQX/Nzn0iVM+qGlRus= +github.com/databricks/databricks-sdk-go v0.14.1/go.mod h1:0iuEtPIoD6oqw7OuFbPskhlEryt2FPH+Ies1UYiiDy8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= From 207ec02da7a445a685ab7c73db1744ab05591da3 Mon Sep 17 00:00:00 2001 From: Vuong Date: Fri, 28 Jul 2023 20:22:43 +0100 Subject: [PATCH 03/22] update doc --- docs/resources/connection.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/resources/connection.md b/docs/resources/connection.md index 80f59880b0..b19f31780e 100644 --- a/docs/resources/connection.md +++ b/docs/resources/connection.md @@ -33,8 +33,8 @@ resource "databricks_connection" "mysql" { The following arguments are supported: -- `name` - Name of the Connection -- `connection_type` - Connection type. `MYSQL` `POSTGRESQL` `SNOWFLAKE` `REDSHIFT` `SQLDW` `SQLSERVER` or `DATABRICKS`. +- `name` - Name of the Connection. +- `connection_type` - Connection type. `MYSQL` `POSTGRESQL` `SNOWFLAKE` `REDSHIFT` `SQLDW` `SQLSERVER` or `DATABRICKS` are supported. [Up-to-date list of connection type supported](https://docs.databricks.com/query-federation/index.html#supported-data-sources) - `options` - The key value of options required by the connection, e.g. `host`, `port`, `user` and `password`. - `owner` - (Optional) Name of the connection owner. - `properties` - (Optional) Free-form connection properties. From bacb45cd39026462e5215560fe13fa718ff614b3 Mon Sep 17 00:00:00 2001 From: marekbrysa <53767523+marekbrysa@users.noreply.github.com> Date: Sat, 29 Jul 2023 12:41:58 +0200 Subject: [PATCH 04/22] Fixed `databricks_job` resource to clear instance-specific attributes when `instance_pool_id` is specified (#2507) NodeTypeID cannot be set in jobsAPI.Update() if InstancePoolID is specified. If both are specified, assume InstancePoolID takes precedence and NodeTypeID is only computed. Closes #2502. Closes #2141. --- clusters/clusters_api.go | 2 +- clusters/resource_cluster_test.go | 2 +- jobs/resource_job.go | 19 ++++++ jobs/resource_job_test.go | 107 ++++++++++++++++++++++++++++++ 4 files changed, 128 insertions(+), 2 deletions(-) diff --git a/clusters/clusters_api.go b/clusters/clusters_api.go index c97f44dbd6..9576ad1b08 100644 --- a/clusters/clusters_api.go +++ b/clusters/clusters_api.go @@ -458,7 +458,7 @@ func (cluster *Cluster) ModifyRequestOnInstancePool() { cluster.AwsAttributes = &awsAttributes } if cluster.AzureAttributes != nil { - cluster.AzureAttributes = nil + cluster.AzureAttributes = &AzureAttributes{} } if cluster.GcpAttributes != nil { gcpAttributes := GcpAttributes{ diff --git a/clusters/resource_cluster_test.go b/clusters/resource_cluster_test.go index 1a254e0f8d..71b5a868d8 100644 --- a/clusters/resource_cluster_test.go +++ b/clusters/resource_cluster_test.go @@ -1567,7 +1567,7 @@ func TestModifyClusterRequestAzure(t *testing.T) { DriverNodeTypeID: "e", } c.ModifyRequestOnInstancePool() - assert.Nil(t, c.AzureAttributes) + assert.Equal(t, &AzureAttributes{}, c.AzureAttributes) assert.Equal(t, "", c.NodeTypeID) assert.Equal(t, "", c.DriverNodeTypeID) assert.Equal(t, false, c.EnableElasticDisk) diff --git a/jobs/resource_job.go b/jobs/resource_job.go index c62a790860..81b6c94d85 100644 --- a/jobs/resource_job.go +++ b/jobs/resource_job.go @@ -747,6 +747,22 @@ func (c controlRunStateLifecycleManager) OnUpdate(ctx context.Context) error { return api.StopActiveRun(jobID, c.d.Timeout(schema.TimeoutUpdate)) } +func prepareJobSettingsForUpdate(js JobSettings) { + if js.NewCluster != nil { + js.NewCluster.ModifyRequestOnInstancePool() + } + for _, task := range js.Tasks { + if task.NewCluster != nil { + task.NewCluster.ModifyRequestOnInstancePool() + } + } + for _, jc := range js.JobClusters { + if jc.NewCluster != nil { + jc.NewCluster.ModifyRequestOnInstancePool() + } + } +} + func ResourceJob() *schema.Resource { getReadCtx := func(ctx context.Context, d *schema.ResourceData) context.Context { var js JobSettings @@ -823,6 +839,9 @@ func ResourceJob() *schema.Resource { if js.isMultiTask() { ctx = context.WithValue(ctx, common.Api, common.API_2_1) } + + prepareJobSettingsForUpdate(js) + jobsAPI := NewJobsAPI(ctx, c) err := jobsAPI.Update(d.Id(), js) if err != nil { diff --git a/jobs/resource_job_test.go b/jobs/resource_job_test.go index b86ebeb8d6..3ddc416c7f 100644 --- a/jobs/resource_job_test.go +++ b/jobs/resource_job_test.go @@ -1419,6 +1419,113 @@ func TestResourceJobUpdate(t *testing.T) { assert.Equal(t, "Featurizer New", d.Get("name")) } +func TestResourceJobUpdate_NodeTypeToInstancePool(t *testing.T) { + d, err := qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: "POST", + Resource: "/api/2.1/jobs/reset", + ExpectedRequest: UpdateJobRequest{ + JobID: 789, + NewSettings: &JobSettings{ + NewCluster: &clusters.Cluster{ + InstancePoolID: "instance-pool-worker", + DriverInstancePoolID: "instance-pool-driver", + SparkVersion: "spark-1", + NumWorkers: 1, + }, + Tasks: []JobTaskSettings{ + { + NewCluster: &clusters.Cluster{ + InstancePoolID: "instance-pool-worker-task", + DriverInstancePoolID: "instance-pool-driver-task", + SparkVersion: "spark-2", + NumWorkers: 2, + }, + }, + }, + JobClusters: []JobCluster{ + { + NewCluster: &clusters.Cluster{ + InstancePoolID: "instance-pool-worker-job", + DriverInstancePoolID: "instance-pool-driver-job", + SparkVersion: "spark-3", + NumWorkers: 3, + }, + }, + }, + Name: "Featurizer New", + MaxRetries: 3, + MinRetryIntervalMillis: 5000, + RetryOnTimeout: true, + MaxConcurrentRuns: 1, + }, + }, + }, + { + Method: "GET", + Resource: "/api/2.1/jobs/get?job_id=789", + Response: Job{ + JobID: 789, + Settings: &JobSettings{ + NewCluster: &clusters.Cluster{ + NodeTypeID: "node-type-id", + DriverNodeTypeID: "driver-node-type-id", + }, + Name: "Featurizer New", + MaxRetries: 3, + MinRetryIntervalMillis: 5000, + RetryOnTimeout: true, + MaxConcurrentRuns: 1, + }, + }, + }, + }, + ID: "789", + Update: true, + Resource: ResourceJob(), + InstanceState: map[string]string{ + "new_cluster.0.node_type_id": "node-type-id-worker", + "new_cluster.0.driver_node_type_id": "node-type-id-driver", + "task.0.new_cluster.0.node_type_id": "node-type-id-worker-task", + "task.0.new_cluster.0.driver_node_type_id": "node-type-id-driver-task", + "job_cluster.0.new_cluster.0.node_type_id": "node-type-id-worker-job", + "job_cluster.0.new_cluster.0.driver_node_type_id": "node-type-id-driver-job", + }, + HCL: ` + new_cluster = { + instance_pool_id = "instance-pool-worker" + driver_instance_pool_id = "instance-pool-driver" + spark_version = "spark-1" + num_workers = 1 + } + task = { + new_cluster = { + instance_pool_id = "instance-pool-worker-task" + driver_instance_pool_id = "instance-pool-driver-task" + spark_version = "spark-2" + num_workers = 2 + } + } + job_cluster = { + new_cluster = { + instance_pool_id = "instance-pool-worker-job" + driver_instance_pool_id = "instance-pool-driver-job" + spark_version = "spark-3" + num_workers = 3 + } + } + max_concurrent_runs = 1 + max_retries = 3 + min_retry_interval_millis = 5000 + name = "Featurizer New" + retry_on_timeout = true`, + }.Apply(t) + assert.NoError(t, err) + assert.Equal(t, "789", d.Id(), "Id should be the same as in reading") + assert.Equal(t, "Featurizer New", d.Get("name")) +} + func TestResourceJobUpdate_Tasks(t *testing.T) { qa.ResourceFixture{ Fixtures: []qa.HTTPFixture{ From 9bb853f8fb4e965d8da183f4af89f5bda6a736a4 Mon Sep 17 00:00:00 2001 From: Alex Ott Date: Mon, 31 Jul 2023 10:36:13 +0200 Subject: [PATCH 05/22] Added `full_refresh` attribute to the `pipeline_task` in `databricks_job` (#2444) This allows to force full refresh of the pipeline from the job. This fixes #2362 --- docs/resources/job.md | 1 + jobs/resource_job.go | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/resources/job.md b/docs/resources/job.md index c242216107..56027c1133 100644 --- a/docs/resources/job.md +++ b/docs/resources/job.md @@ -233,6 +233,7 @@ You can invoke Spark submit tasks only on new clusters. **In the `new_cluster` s ### pipeline_task Configuration Block * `pipeline_id` - (Required) The pipeline's unique ID. +* `full_refresh` - (Optional) (Bool) Specifies if there should be full refresh of the pipeline. -> **Note** The following configuration blocks are only supported inside a `task` block diff --git a/jobs/resource_job.go b/jobs/resource_job.go index 81b6c94d85..85d2800229 100644 --- a/jobs/resource_job.go +++ b/jobs/resource_job.go @@ -60,7 +60,8 @@ type PythonWheelTask struct { // PipelineTask contains the information for pipeline jobs type PipelineTask struct { - PipelineID string `json:"pipeline_id"` + PipelineID string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` } type SqlQueryTask struct { From 1087e85ac6e5a6cac313b3c7c77816352752a2b0 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Mon, 31 Jul 2023 10:51:51 +0200 Subject: [PATCH 06/22] Configured merge queue for the provider (#2533) --- .github/workflows/push.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 6efaa5fc26..7b656109e5 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -3,8 +3,8 @@ name: build on: pull_request: types: [opened, synchronize] - push: - branches: [master] + merge_group: + types: [checks_requested] jobs: tests: From 60b98d15b5f0f932c02cf27eedd7affb859bd1eb Mon Sep 17 00:00:00 2001 From: vuong-nguyen <44292934+nkvuong@users.noreply.github.com> Date: Mon, 31 Jul 2023 15:56:29 +0100 Subject: [PATCH 07/22] misc doc updates (#2516) --- docs/data-sources/aws_bucket_policy.md | 14 ++++++---- docs/guides/aws-workspace.md | 29 +++++++++++--------- docs/guides/unity-catalog.md | 22 ++++++++++----- docs/resources/mws_log_delivery.md | 10 +++++-- docs/resources/mws_networks.md | 4 +-- docs/resources/mws_storage_configurations.md | 8 ++++-- docs/resources/mws_workspaces.md | 10 +++++-- docs/resources/volume.md | 3 ++ 8 files changed, 64 insertions(+), 36 deletions(-) diff --git a/docs/data-sources/aws_bucket_policy.md b/docs/data-sources/aws_bucket_policy.md index 2d49b78bf2..10efc0f8e8 100644 --- a/docs/data-sources/aws_bucket_policy.md +++ b/docs/data-sources/aws_bucket_policy.md @@ -3,7 +3,7 @@ subcategory: "Deployment" --- # databricks_aws_bucket_policy Data Source -This datasource configures a simple access policy for AWS S3 buckets, so that Databricks can access data in it. +This datasource configures a simple access policy for AWS S3 buckets, so that Databricks can access data in it. ## Example Usage @@ -30,15 +30,19 @@ Bucket policy with full access: resource "aws_s3_bucket" "ds" { bucket = "${var.prefix}-ds" acl = "private" - versioning { - enabled = false - } force_destroy = true tags = merge(var.tags, { Name = "${var.prefix}-ds" }) } +resource "aws_s3_bucket_versioning" "ds_versioning" { + bucket = aws_s3_bucket.ds.id + versioning_configuration { + status = "Disabled" + } +} + data "aws_iam_policy_document" "assume_role_for_ec2" { statement { effect = "Allow" @@ -74,7 +78,7 @@ resource "aws_s3_bucket_policy" "ds" { * `bucket` - (Required) AWS S3 Bucket name for which to generate the policy document. * `full_access_role` - (Optional) Data access role that can have full access for this bucket -* `databricks_e2_account_id` - (Optional) Your Databricks E2 account ID. Used to generate restrictive IAM policies that will increase the security of your root bucket +* `databricks_e2_account_id` - (Optional) Your Databricks E2 account ID. Used to generate restrictive IAM policies that will increase the security of your root bucket ## Attribute Reference diff --git a/docs/guides/aws-workspace.md b/docs/guides/aws-workspace.md index a1ae58bfe4..370bd43909 100644 --- a/docs/guides/aws-workspace.md +++ b/docs/guides/aws-workspace.md @@ -41,11 +41,12 @@ locals { ``` Before [managing workspace](workspace-management.md), you have to create: - - [VPC](#vpc) - - [Root bucket](#root-bucket) - - [Cross-account role](#cross-account-iam-role) - - [Databricks E2 workspace](#databricks-e2-workspace) - - [Host and Token outputs](#provider-configuration) + +- [VPC](#vpc) +- [Root bucket](#root-bucket) +- [Cross-account role](#cross-account-iam-role) +- [Databricks E2 workspace](#databricks-e2-workspace) +- [Host and Token outputs](#provider-configuration) > Initialize provider with `alias = "mws"` and use `provider = databricks.mws` for all `databricks_mws_*` resources. We require all `databricks_mws_*` resources to be created within its own dedicated terraform module of your environment. Usually this module creates VPC and IAM roles as well. @@ -203,9 +204,6 @@ Once [VPC](#vpc) is ready, create AWS S3 bucket for DBFS workspace storage, whic resource "aws_s3_bucket" "root_storage_bucket" { bucket = "${local.prefix}-rootbucket" acl = "private" - versioning { - enabled = false - } force_destroy = true tags = merge(var.tags, { Name = "${local.prefix}-rootbucket" @@ -241,6 +239,13 @@ resource "aws_s3_bucket_policy" "root_bucket_policy" { depends_on = [aws_s3_bucket_public_access_block.root_storage_bucket] } +resource "aws_s3_bucket_versioning" "root_bucket_versioning" { + bucket = aws_s3_bucket.root_storage_bucket.id + versioning_configuration { + status = "Disabled" + } +} + resource "databricks_mws_storage_configurations" "this" { provider = databricks.mws account_id = var.databricks_account_id @@ -303,14 +308,14 @@ provider "databricks" { token = module.e2.token_value } ``` -We assume that you have a terraform module in your project that creats a workspace (using [Databricks E2 Workspace](#databricks-e2-workspace) section) and you named it as `e2` while calling it in the **main.tf** file of your terraform project. And `workspace_url` and `token_value` are the output attributes of that module. This provider configuration will allow you to use the generated token during workspace creation to authenticate to the created workspace. +We assume that you have a terraform module in your project that creats a workspace (using [Databricks E2 Workspace](#databricks-e2-workspace) section) and you named it as `e2` while calling it in the **main.tf** file of your terraform project. And `workspace_url` and `token_value` are the output attributes of that module. This provider configuration will allow you to use the generated token during workspace creation to authenticate to the created workspace. ### Credentials validation checks errors Due to a bug in the Terraform AWS provider (spotted in v3.28) the Databricks AWS cross-account policy creation and attachment to the IAM role takes longer than the AWS request confirmation to Terraform. As Terraform continues creating the Workspace, validation checks for the credentials are failing, as the policy doesn't get applied quick enough. Showing the error: -``` +```sh Error: MALFORMED_REQUEST: Failed credentials validation checks: Spot Cancellation, Create Placement Group, Delete Tags, Describe Availability Zones, Describe instances, Describe Instance Status, Describe Placement Group, Describe Route Tables, Describe Security Groups, Describe Spot Instances, Describe Spot Price History, Describe Subnets, Describe Volumes, Describe Vpcs, Request Spot Instances (400 on /api/2.0/accounts/{UUID}/workspaces) ``` @@ -329,7 +334,7 @@ resource "time_sleep" "wait" { If you notice below error: -``` +```sh Error: MALFORMED_REQUEST: Failed credentials validation checks: Spot Cancellation, Create Placement Group, Delete Tags, Describe Availability Zones, Describe instances, Describe Instance Status, Describe Placement Group, Describe Route Tables, Describe Security Groups, Describe Spot Instances, Describe Spot Price History, Describe Subnets, Describe Volumes, Describe Vpcs, Request Spot Instances ``` @@ -337,8 +342,6 @@ Error: MALFORMED_REQUEST: Failed credentials validation checks: Spot Cancellatio ![create_workspace_error](https://github.com/databricks/terraform-provider-databricks/raw/master/docs/images/create_workspace_error.png) - - Verify if the role and policy exists (assume role should allow external id) ![iam_role_trust_error](https://github.com/databricks/terraform-provider-databricks/raw/master/docs/images/iam_role_trust_error.png) - diff --git a/docs/guides/unity-catalog.md b/docs/guides/unity-catalog.md index 0a5ef61b88..6dc7c98610 100644 --- a/docs/guides/unity-catalog.md +++ b/docs/guides/unity-catalog.md @@ -132,15 +132,12 @@ The first step is to create the required AWS objects: - An S3 bucket, which is the default storage location for managed tables in Unity Catalog. Please use a dedicated bucket for each metastore. - An IAM policy that provides Unity Catalog permissions to access and manage data in the bucket. Note that `` is *optional*. If encryption is enabled, provide the name of the KMS key that encrypts the S3 bucket contents. *If encryption is disabled, remove the entire KMS section of the IAM policy.* -- An IAM role that is associated with the IAM policy and will be assumed by Unity Catalog. +- An IAM role that is associated with the IAM policy and will be assumed by Unity Catalog. ```hcl resource "aws_s3_bucket" "metastore" { bucket = "${local.prefix}-metastore" acl = "private" - versioning { - enabled = false - } force_destroy = true tags = merge(local.tags, { Name = "${local.prefix}-metastore" @@ -156,6 +153,13 @@ resource "aws_s3_bucket_public_access_block" "metastore" { depends_on = [aws_s3_bucket.metastore] } +resource "aws_s3_bucket_versioning" "metastore_versioning" { + bucket = aws_s3_bucket.metastore.id + versioning_configuration { + status = "Disabled" + } +} + data "aws_iam_policy_document" "passrole_for_uc" { statement { effect = "Allow" @@ -391,9 +395,6 @@ First, create the required objects in AWS. resource "aws_s3_bucket" "external" { bucket = "${local.prefix}-external" acl = "private" - versioning { - enabled = false - } // destroy all objects with bucket destroy force_destroy = true tags = merge(local.tags, { @@ -401,6 +402,13 @@ resource "aws_s3_bucket" "external" { }) } +resource "aws_s3_bucket_versioning" "external_versioning" { + bucket = aws_s3_bucket.external.id + versioning_configuration { + status = "Disabled" + } +} + resource "aws_s3_bucket_public_access_block" "external" { bucket = aws_s3_bucket.external.id ignore_public_acls = true diff --git a/docs/resources/mws_log_delivery.md b/docs/resources/mws_log_delivery.md index 1da8264e5c..c3b9f870b1 100644 --- a/docs/resources/mws_log_delivery.md +++ b/docs/resources/mws_log_delivery.md @@ -23,9 +23,6 @@ variable "databricks_account_id" { resource "aws_s3_bucket" "logdelivery" { bucket = "${var.prefix}-logdelivery" acl = "private" - versioning { - enabled = false - } force_destroy = true tags = merge(var.tags, { Name = "${var.prefix}-logdelivery" @@ -42,6 +39,13 @@ data "databricks_aws_assume_role_policy" "logdelivery" { for_log_delivery = true } +resource "aws_s3_bucket_versioning" "logdelivery_versioning" { + bucket = aws_s3_bucket.logdelivery.id + versioning_configuration { + status = "Disabled" + } +} + resource "aws_iam_role" "logdelivery" { name = "${var.prefix}-logdelivery" description = "(${var.prefix}) UsageDelivery role" diff --git a/docs/resources/mws_networks.md b/docs/resources/mws_networks.md index e7436034bb..ea97a8a369 100644 --- a/docs/resources/mws_networks.md +++ b/docs/resources/mws_networks.md @@ -104,8 +104,6 @@ resource "databricks_mws_networks" "this" { ### Creating a Databricks on GCP workspace --> **Public Preview** This feature is in [Public Preview](https://docs.databricks.com/release-notes/release-types.html) on GCP. - ```hcl variable "databricks_account_id" { description = "Account Id that could be found in the bottom left corner of https://accounts.cloud.databricks.com/" @@ -231,5 +229,5 @@ The following resources are used in the same context: * [Provisioning Databricks on GCP](../guides/gcp-workspace.md) guide. * [Provisioning Databricks workspaces on GCP with Private Service Connect](../guides/gcp-private-service-connect-workspace.md) guide. * [databricks_mws_vpc_endpoint](mws_vpc_endpoint.md) to register [aws_vpc_endpoint](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/vpc_endpoint) resources with Databricks such that they can be used as part of a [databricks_mws_networks](mws_networks.md) configuration. -* [databricks_mws_private_access_settings](mws_private_access_settings.md) to create a Private Access Setting that can be used as part of a [databricks_mws_workspaces](mws_workspaces.md) resource to create a [Databricks Workspace that leverages AWS PrivateLink](https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html) or [GCP Private Service Connect] (https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/private-service-connect.html). +* [databricks_mws_private_access_settings](mws_private_access_settings.md) to create a Private Access Setting that can be used as part of a [databricks_mws_workspaces](mws_workspaces.md) resource to create a [Databricks Workspace that leverages AWS PrivateLink](https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html) or [GCP Private Service Connect](https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/private-service-connect.html). * [databricks_mws_workspaces](mws_workspaces.md) to set up [workspaces in E2 architecture on AWS](https://docs.databricks.com/getting-started/overview.html#e2-architecture-1). diff --git a/docs/resources/mws_storage_configurations.md b/docs/resources/mws_storage_configurations.md index 0d0073e9a0..6c9efab874 100644 --- a/docs/resources/mws_storage_configurations.md +++ b/docs/resources/mws_storage_configurations.md @@ -23,8 +23,12 @@ variable "databricks_account_id" { resource "aws_s3_bucket" "root_storage_bucket" { bucket = "${var.prefix}-rootbucket" acl = "private" - versioning { - enabled = false +} + +resource "aws_s3_bucket_versioning" "root_versioning" { + bucket = aws_s3_bucket.root_storage_bucket.id + versioning_configuration { + status = "Disabled" } } diff --git a/docs/resources/mws_workspaces.md b/docs/resources/mws_workspaces.md index 1e4a3ad007..1a233c9605 100644 --- a/docs/resources/mws_workspaces.md +++ b/docs/resources/mws_workspaces.md @@ -137,13 +137,17 @@ resource "databricks_mws_credentials" "this" { resource "aws_s3_bucket" "root_storage_bucket" { bucket = "${local.prefix}-rootbucket" acl = "private" - versioning { - enabled = false - } force_destroy = true tags = var.tags } +resource "aws_s3_bucket_versioning" "root_versioning" { + bucket = aws_s3_bucket.root_storage_bucket.id + versioning_configuration { + status = "Disabled" + } +} + resource "aws_s3_bucket_server_side_encryption_configuration" "root_storage_bucket" { bucket = aws_s3_bucket.root_storage_bucket.bucket diff --git a/docs/resources/volume.md b/docs/resources/volume.md index ed4989af04..076c4bfab9 100644 --- a/docs/resources/volume.md +++ b/docs/resources/volume.md @@ -3,6 +3,8 @@ subcategory: "Unity Catalog" --- # databricks_volume (Resource) +-> **Public Preview** This feature is in [Public Preview](https://docs.databricks.com/release-notes/release-types.html). + Volumes are Unity Catalog objects representing a logical volume of storage in a cloud object storage location. Volumes provide capabilities for accessing, storing, governing, and organizing files. While tables provide governance over tabular datasets, volumes add governance over non-tabular datasets. You can use volumes to store and access files in any format, including structured, semi-structured, and unstructured data. A volume resides in the third layer of Unity Catalog’s three-level namespace. Volumes are siblings to tables, views, and other objects organized under a schema in Unity Catalog. @@ -14,6 +16,7 @@ A **managed volume** is a Unity Catalog-governed storage volume created within t An **external volume** is a Unity Catalog-governed storage volume registered against a directory within an external location. A volume can be referenced using its identifier: ```..```, where: + * ``````: The name of the catalog containing the Volume. * ``````: The name of the schema containing the Volume. * ``````: The name of the Volume. It identifies the volume object. From 5ce7ad664b7fe020632852bcf93e4d992931c9f0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 Jul 2023 14:57:35 +0000 Subject: [PATCH 08/22] Bump github.com/databricks/databricks-sdk-go from 0.13.0 to 0.14.1 (#2523) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.13.0 to 0.14.1. - [Release notes](https://github.com/databricks/databricks-sdk-go/releases) - [Changelog](https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md) - [Commits](https://github.com/databricks/databricks-sdk-go/compare/v0.13.0...v0.14.1) --- updated-dependencies: - dependency-name: github.com/databricks/databricks-sdk-go dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Miles Yucht --- go.mod | 2 - go.sum | 173 ++------------------------------------------------------- 2 files changed, 5 insertions(+), 170 deletions(-) diff --git a/go.mod b/go.mod index f3107f5d3c..00d6cca22d 100644 --- a/go.mod +++ b/go.mod @@ -57,9 +57,7 @@ require ( github.com/oklog/run v1.1.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect - github.com/vmihailenco/msgpack/v4 v4.3.12 // indirect github.com/vmihailenco/msgpack/v5 v5.3.5 // indirect - github.com/vmihailenco/tagparser v0.1.2 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect go.opencensus.io v0.24.0 // indirect golang.org/x/crypto v0.11.0 // indirect diff --git a/go.sum b/go.sum index 4167dc6a54..ad6ac76be8 100644 --- a/go.sum +++ b/go.sum @@ -1,39 +1,20 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go/compute v1.19.3 h1:DcTwsFgGev/wV5+q8o2fzgcHOaac+DKGC91ZlvpsQds= -cloud.google.com/go/compute v1.19.3/go.mod h1:qxvISKp/gYnXkSAD1ppcSOveRAmzxicEv/JlizULFrI= cloud.google.com/go/compute v1.22.0 h1:cB8R6FtUtT1TYGl5R3xuxnW6OUIc/DrT2aiR16TTG7Y= cloud.google.com/go/compute v1.22.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/Masterminds/sprig/v3 v3.2.1/go.mod h1:UoaO7Yp8KlPnJIYWTFkMaqPUYKTfGFPhxNuwnnxkKlk= -github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= -github.com/Microsoft/go-winio v0.4.16 h1:FtSW/jqD+l4ba5iPBj9CODVtgfYAD8w2wS923g/cFDk= -github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= github.com/Microsoft/go-winio v0.5.2 h1:a9IhgEQBCUEk6QCdml9CiJGhAws+YwffDHEMp1VMrpA= -github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7 h1:YoJbenK9C67SkzkDfmQuVln04ygHj3vjZfd9FL+GmQQ= -github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7/go.mod h1:z4/9nQmJSSwwds7ejkxaJwO37dru3geImFUdJlaLzQo= github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 h1:KLq8BE0KwCL+mmXnjLWEAOYO+2l2AE4YMmqG1ZpZHBs= github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= -github.com/acomagu/bufpipe v1.0.3 h1:fxAGrHZTgQ9w5QqVItgzwj235/uYZYgbXitB+dLupOk= -github.com/acomagu/bufpipe v1.0.3/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4= github.com/acomagu/bufpipe v1.0.4 h1:e3H4WUzM3npvo5uv95QuJM3cQspFNtFBzvJ2oNjKIDQ= -github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE= -github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= -github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= -github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= -github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -46,18 +27,11 @@ github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XP github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/databricks/databricks-sdk-go v0.12.0 h1:VgMJpvEiyRRrJ0mQx22Rkc73zjxUe125Ou9c5C99phM= -github.com/databricks/databricks-sdk-go v0.12.0/go.mod h1:h/oWnnfWcJQAotAhZS/GMnlcaE/8WhuZ5Vj7el/6Gn8= -github.com/databricks/databricks-sdk-go v0.13.0 h1:Npi4laUUmcOPDPdJf2ZMGFUtybpf4LK6n5NQY56Ya2Q= -github.com/databricks/databricks-sdk-go v0.13.0/go.mod h1:0iuEtPIoD6oqw7OuFbPskhlEryt2FPH+Ies1UYiiDy8= github.com/databricks/databricks-sdk-go v0.14.1 h1:s9x18c2i6XbJxem6zKdTrrwEUXQX/Nzn0iVM+qGlRus= github.com/databricks/databricks-sdk-go v0.14.1/go.mod h1:0iuEtPIoD6oqw7OuFbPskhlEryt2FPH+Ies1UYiiDy8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg= -github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -65,23 +39,12 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= -github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4= -github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E= -github.com/go-git/go-billy/v5 v5.2.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= -github.com/go-git/go-billy/v5 v5.3.1 h1:CPiOUAzKtMRvolEKw+bG1PLRpT7D3LIs3/3ey4Aiu34= -github.com/go-git/go-billy/v5 v5.3.1/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= github.com/go-git/go-billy/v5 v5.4.1 h1:Uwp5tDRkPr+l/TnbHOQzp+tmJfLceOlbVucgpTz8ix4= -github.com/go-git/go-git-fixtures/v4 v4.2.1/go.mod h1:K8zd3kDUAykwTdDCr+I0per6Y6vMiRR/nnVTBtavnB0= -github.com/go-git/go-git/v5 v5.4.2 h1:BXyZu9t0VkbiHtqrsvdq39UDhGJTl1h55VW6CSC4aY4= -github.com/go-git/go-git/v5 v5.4.2/go.mod h1:gQ1kArt6d+n+BGd+/B/I74HwRTLhth2+zti4ihgckDc= github.com/go-git/go-git/v5 v5.6.1 h1:q4ZRqQl4pR/ZJHc1L5CFjGA1a10u76aV1iC+nh+bHsk= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= @@ -96,7 +59,6 @@ github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5y github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -123,45 +85,33 @@ github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/s2a-go v0.1.4 h1:1kZ/sQM3srePvKs3tXAvQzo66XfcReoqFpIpIccE7Oc= github.com/google/s2a-go v0.1.4/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= -github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= -github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.2.5 h1:UR4rDjcgpgEnqpIEvkiqTYKBCKLNmlge2eVjoZfySzM= github.com/googleapis/enterprise-certificate-proxy v0.2.5/go.mod h1:RxW0N9901Cko1VOCW3SXCpWP+mlIEkk2tP7jnHy9a3w= -github.com/googleapis/gax-go/v2 v2.11.0 h1:9V9PWXEsWnPpQhu/PeQIkS4eGzMlTLGgt80cUUI8Ki4= +github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-checkpoint v0.5.0 h1:MFYpPZCnQqQTE18jFwSII6eUQrD/oxMFp3mlgcqk5mU= github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 h1:1/D3zfFHttUKaCaGKZ/dR2roBXv0vKbSCnssIldfQdI= github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320/go.mod h1:EiZBMaudVLy8fmjf9Npq1dq9RalhveqZG5w/yz3mHWs= -github.com/hashicorp/go-hclog v1.4.0 h1:ctuWFGrhFha8BnnzxqeRGidlEcQkDyL5u8J8t5eA11I= -github.com/hashicorp/go-hclog v1.4.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-plugin v1.4.8 h1:CHGwpxYDOttQOY7HOWgETU9dyVjOXzniXDqJcYJE1zM= -github.com/hashicorp/go-plugin v1.4.8/go.mod h1:viDMjcLJuDui6pXb8U4HVfb8AamCWhHGUjr2IrTF67s= github.com/hashicorp/go-plugin v1.4.10 h1:xUbmA4jC6Dq163/fWcp8P3JuHilrHHMLNRxzGQJ9hNk= github.com/hashicorp/go-plugin v1.4.10/go.mod h1:6/1TEzT0eQznvI/gV2CM29DLSkAK/e58mUWKVsPaph0= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/hc-install v0.5.0 h1:D9bl4KayIYKEeJ4vUDe9L5huqxZXczKaykSRcmQ0xY0= -github.com/hashicorp/hc-install v0.5.0/go.mod h1:JyzMfbzfSBSjoDCRPna1vi/24BEDxFaCPfdHtM5SCdo= github.com/hashicorp/hc-install v0.5.2 h1:SfwMFnEXVVirpwkDuSF5kymUOhrUxrTq3udEseZdOD0= github.com/hashicorp/hc-install v0.5.2/go.mod h1:9QISwe6newMWIfEiXpzuu1k9HAGtQYgnSH8H9T8wmoI= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= @@ -172,113 +122,61 @@ github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= github.com/hashicorp/terraform-exec v0.18.1 h1:LAbfDvNQU1l0NOQlTuudjczVhHj061fNX5H8XZxHlH4= github.com/hashicorp/terraform-exec v0.18.1/go.mod h1:58wg4IeuAJ6LVsLUeD2DWZZoc/bYi6dzhLHzxM41980= -github.com/hashicorp/terraform-json v0.16.0 h1:UKkeWRWb23do5LNAFlh/K3N0ymn1qTOO8c+85Albo3s= -github.com/hashicorp/terraform-json v0.16.0/go.mod h1:v0Ufk9jJnk6tcIZvScHvetlKfiNTC+WS21mnXIlc0B0= github.com/hashicorp/terraform-json v0.17.1 h1:eMfvh/uWggKmY7Pmb3T85u86E2EQg6EQHgyRwf3RkyA= github.com/hashicorp/terraform-json v0.17.1/go.mod h1:Huy6zt6euxaY9knPAFKjUITn8QxUFIe9VuSzb4zn/0o= -github.com/hashicorp/terraform-plugin-go v0.14.3 h1:nlnJ1GXKdMwsC8g1Nh05tK2wsC3+3BL/DBBxFEki+j0= -github.com/hashicorp/terraform-plugin-go v0.14.3/go.mod h1:7ees7DMZ263q8wQ6E4RdIdR6nHHJtrdt4ogX5lPkX1A= github.com/hashicorp/terraform-plugin-go v0.18.0 h1:IwTkOS9cOW1ehLd/rG0y+u/TGLK9y6fGoBjXVUquzpE= github.com/hashicorp/terraform-plugin-go v0.18.0/go.mod h1:l7VK+2u5Kf2y+A+742GX0ouLut3gttudmvMgN0PA74Y= -github.com/hashicorp/terraform-plugin-log v0.8.0 h1:pX2VQ/TGKu+UU1rCay0OlzosNKe4Nz1pepLXj95oyy0= -github.com/hashicorp/terraform-plugin-log v0.8.0/go.mod h1:1myFrhVsBLeylQzYYEV17VVjtG8oYPRFdaZs7xdW2xs= github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0= github.com/hashicorp/terraform-plugin-log v0.9.0/go.mod h1:rKL8egZQ/eXSyDqzLUuwUYLVdlYeamldAHSxjUFADow= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.26.1 h1:G9WAfb8LHeCxu7Ae8nc1agZlQOSCUWsb610iAogBhCs= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.26.1/go.mod h1:xcOSYlRVdPLmDUoqPhO9fiO/YCN/l6MGYeTzGt5jgkQ= github.com/hashicorp/terraform-plugin-sdk/v2 v2.27.0 h1:I8efBnjuDrgPjNF1MEypHy48VgcTIUY4X6rOFunrR3Y= github.com/hashicorp/terraform-plugin-sdk/v2 v2.27.0/go.mod h1:cUEP4ly/nxlHy5HzD6YRrHydtlheGvGRJDhiWqqVik4= -github.com/hashicorp/terraform-registry-address v0.1.0 h1:W6JkV9wbum+m516rCl5/NjKxCyTVaaUBbzYcMzBDO3U= -github.com/hashicorp/terraform-registry-address v0.1.0/go.mod h1:EnyO2jYO6j29DTHbJcm00E5nQTFeTtyZH3H5ycydQ5A= github.com/hashicorp/terraform-registry-address v0.2.1 h1:QuTf6oJ1+WSflJw6WYOHhLgwUiQ0FrROpHPYFtwTYWM= github.com/hashicorp/terraform-registry-address v0.2.1/go.mod h1:BSE9fIFzp0qWsJUUyGquo4ldV9k2n+psif6NYkBRS3Y= -github.com/hashicorp/terraform-svchost v0.0.0-20200729002733-f050f53b9734 h1:HKLsbzeOsfXmKNpr3GiT18XAblV0BjCbzL8KQAMZGa0= -github.com/hashicorp/terraform-svchost v0.0.0-20200729002733-f050f53b9734/go.mod h1:kNDNcF7sN4DocDLBkQYz73HGKwN1ANB1blq4lIYLYvg= github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ= github.com/hashicorp/terraform-svchost v0.1.1/go.mod h1:mNsjQfZyf/Jhz35v6/0LWcv26+X7JPS+buii2c9/ctc= -github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d h1:kJCB4vdITiW1eC1vq2e6IsrXKrZit1bv/TDYFGMp4BQ= -github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE= github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= -github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= -github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imdario/mergo v0.3.13 h1:lFzP57bqS/wsqKssCGmtLAb8A0wKjLGrve2q3PPVcBk= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= -github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= -github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE= -github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck= -github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= -github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA= -github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mitchellh/cli v1.1.5/go.mod h1:v8+iFts2sPIKUV1ltktPXMCC8fumSKFItNcD2cLtRR4= -github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= -github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= -github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4= -github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= -github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA= github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/sebdah/goldie v1.0.0/go.mod h1:jXP4hmWywNEwZzhMuv2ccnqTSFpuq8iyQhtQdkkZBH4= -github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= -github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/skeema/knownhosts v1.1.0 h1:Wvr9V0MxhjRbl3f9nMnKnFfiWTJmtECJ9Njkea3ysW0= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= @@ -291,58 +189,36 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI= github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= -github.com/vmihailenco/msgpack/v4 v4.3.12 h1:07s4sz9IReOgdikxLTKNbBdqDMLsjPKXwvCazn8G65U= -github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= github.com/vmihailenco/msgpack/v5 v5.3.5 h1:5gO0H1iULLWGhs2H5tbAHIZTV8/cYafcFOr9znI5mJU= github.com/vmihailenco/msgpack/v5 v5.3.5/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc= -github.com/vmihailenco/tagparser v0.1.1 h1:quXMXlA39OCbd2wAdTsGDlK9RkOk6Wuw+x37wVyIuWY= -github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= -github.com/vmihailenco/tagparser v0.1.2 h1:gnjoVuB/kljJ5wICEEOpx98oXMWPLj22G67Vbd1qPqc= -github.com/vmihailenco/tagparser v0.1.2/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -github.com/xanzy/ssh-agent v0.3.0 h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI= -github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= github.com/zclconf/go-cty v1.13.2 h1:4GvrUxe/QUDYuJKAav4EYqdM47/kZa672LwmXFmEKT0= github.com/zclconf/go-cty v1.13.2/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0= -github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= -golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= -golang.org/x/crypto v0.10.0 h1:LKqV2xt9+kDzSTfOhx4FrkEBcMrAgHSYgzywV9zcGmM= -golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I= golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA= golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e h1:+WEEuIdZHnUeJJmEUjyYC2gfUMj69yZXw17EnHg/otA= -golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e/go.mod h1:Kr81I6Kryrl9sr8s2FK3vxD90NdsKWRuOIl2O4CvYbA= golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 h1:MGwJjxBy0HJshjDNfLsYO8xppfqWlA5ZT9OhtUUhTNw= golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -350,28 +226,18 @@ golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191009170851-d66e71096ffb/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210326060303-6b1517762897/go.mod h1:uSPa2vr4CLtc/ILN5odXGNXS6mhrKVzTaCXzk9m6W3k= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= -golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= -golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU= -golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ= golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50= golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.9.0 h1:BPpt2kU7oMRq3kCHAA1tbSEshXRw1LpG2ztgDwrzuAs= -golang.org/x/oauth2 v0.9.0/go.mod h1:qYgFZaFiu6Wg24azG8bdV52QJXJGbZzIIsRCdVKzbLw= golang.org/x/oauth2 v0.10.0 h1:zHCpF2Khkwy4mMB4bv0U37YtJdTGW8jI0glAApi0Kh8= golang.org/x/oauth2 v0.10.0/go.mod h1:kTpgurOux7LqtuxjuyZa4Gj2gdezIt/jQtGnNFfypQI= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -382,21 +248,14 @@ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210502180810-71e4cd670f79/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -406,20 +265,16 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s= -golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= -golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= -golang.org/x/term v0.9.0 h1:GRRCnKYhdQrD8kfRAdQ6Zcw1P0OcELxGLKJvtjVMZ28= +golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= @@ -427,11 +282,8 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58= -golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= @@ -447,21 +299,16 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.129.0 h1:2XbdjjNfFPXQyufzQVwPf1RRnHH8Den2pfNE2jw7L8w= -google.golang.org/api v0.129.0/go.mod h1:dFjiXlanKwWE3612X97llhsoI36FAoIiRj3aTl5b/zE= google.golang.org/api v0.132.0 h1:8t2/+qZ26kAOGSmOiHwVycqVaDg7q3JDILrNi/Z6rvc= google.golang.org/api v0.132.0/go.mod h1:AeTBC6GpJnJSRJjktDcPX0QwtS8pGYZOV6MSuSCusw0= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc h1:XSJ8Vk1SWuNr8S18z1NZSziL0CPIXLCCMDOEFtHBOFc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= google.golang.org/genproto/googleapis/rpc v0.0.0-20230717213848-3f92550aa753 h1:XUODHrpzJEUeWmVo/jfNTLj0YyVveOo28oE6vkFbkO4= google.golang.org/genproto/googleapis/rpc v0.0.0-20230717213848-3f92550aa753/go.mod h1:TUfxEVdsvPg18p6AslUXFoLdpED4oBnGwyqk3dV1XzM= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= @@ -472,8 +319,6 @@ google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTp google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= -google.golang.org/grpc v1.56.1 h1:z0dNfjIl0VpaZ9iSVjA6daGatAYwPGstTjt5vkRMFkQ= -google.golang.org/grpc v1.56.1/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= google.golang.org/grpc v1.56.2 h1:fVRFRnXvU+x6C4IlHZewvJOVHoOv1TUuQyoRsYnB4bI= google.golang.org/grpc v1.56.2/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= @@ -492,20 +337,12 @@ google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqw gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= -gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= From 05998516df3a252d211eda576c31d77111830df4 Mon Sep 17 00:00:00 2001 From: vuong-nguyen <44292934+nkvuong@users.noreply.github.com> Date: Mon, 31 Jul 2023 15:59:33 +0100 Subject: [PATCH 09/22] Fix IP ACL read (#2515) --- access/resource_ip_access_list.go | 2 +- access/resource_ip_access_list_test.go | 7 +------ 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/access/resource_ip_access_list.go b/access/resource_ip_access_list.go index c973d6a279..07c10d30b7 100644 --- a/access/resource_ip_access_list.go +++ b/access/resource_ip_access_list.go @@ -53,7 +53,7 @@ func ResourceIPAccessList() *schema.Resource { if err != nil { return err } - common.StructToData(status, s, d) + common.StructToData(status.IpAccessList, s, d) return nil }, Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { diff --git a/access/resource_ip_access_list_test.go b/access/resource_ip_access_list_test.go index 6398ee4a85..98e2344d3d 100644 --- a/access/resource_ip_access_list_test.go +++ b/access/resource_ip_access_list_test.go @@ -205,7 +205,7 @@ func TestIPACLRead(t *testing.T) { { Method: http.MethodGet, Resource: "/api/2.0/ip-access-lists/" + TestingId + "?", - Response: settings.CreateIpAccessListResponse{ + Response: settings.FetchIpAccessListResponse{ IpAccessList: &settings.IpAccessListInfo{ ListId: TestingId, Label: TestingLabel, @@ -221,11 +221,6 @@ func TestIPACLRead(t *testing.T) { }, }, }, - State: map[string]any{ - "label": TestingLabel, - "list_type": TestingListTypeString, - "ip_addresses": TestingIpAddressesState, - }, Resource: ResourceIPAccessList(), Read: true, New: true, From 345184c38be4dcdfb3adc35cba0015c6cfa61646 Mon Sep 17 00:00:00 2001 From: bvdboom Date: Mon, 31 Jul 2023 17:01:47 +0200 Subject: [PATCH 10/22] Add support for `USE_MARKETPLACE_ASSETS` privilege to metastore (#2505) * Update docs to include USE_MARKETPLACE_ASSETS privilege * Add USE_MARKETPLACE_ASSETS to metastore privileges --- catalog/resource_grants.go | 1 + docs/resources/grants.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/catalog/resource_grants.go b/catalog/resource_grants.go index 9e71c98af5..425f657c1f 100644 --- a/catalog/resource_grants.go +++ b/catalog/resource_grants.go @@ -246,6 +246,7 @@ var mapping = securableMapping{ "USE_PROVIDER": true, "USE_SHARE": true, "USE_RECIPIENT": true, + "USE_MARKETPLACE_ASSETS": true, "SET_SHARE_PERMISSION": true, }, "function": { diff --git a/docs/resources/grants.md b/docs/resources/grants.md index ce4ea1f7d8..c7c53b29e7 100644 --- a/docs/resources/grants.md +++ b/docs/resources/grants.md @@ -38,7 +38,7 @@ Unlike the [SQL specification](https://docs.databricks.com/sql/language-manual/s ## Metastore grants -You can grant `CREATE_CATALOG`, `CREATE_CONNECTION`, `CREATE_EXTERNAL_LOCATION`, `CREATE_PROVIDER`, `CREATE_RECIPIENT`, `CREATE_SHARE`, `SET_SHARE_PERMISSION`, `USE_CONNECTION`, `USE_PROVIDER`, `USE_RECIPIENT` and `USE_SHARE` privileges to [databricks_metastore](metastore.md) id specified in `metastore` attribute. +You can grant `CREATE_CATALOG`, `CREATE_CONNECTION`, `CREATE_EXTERNAL_LOCATION`, `CREATE_PROVIDER`, `CREATE_RECIPIENT`, `CREATE_SHARE`, `SET_SHARE_PERMISSION`, `USE_MARKETPLACE_ASSETS`, `USE_CONNECTION`, `USE_PROVIDER`, `USE_RECIPIENT` and `USE_SHARE` privileges to [databricks_metastore](metastore.md) id specified in `metastore` attribute. ```hcl resource "databricks_grants" "sandbox" { From 52af856c912ab51e09004e7631cdb400d8835911 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 1 Aug 2023 14:28:26 +0200 Subject: [PATCH 11/22] Add git job_source to job resource (#2538) * Add git job_source to job resource * lint * fix test * Use go sdk type --- jobs/resource_job.go | 11 ++++++----- jobs/resource_job_test.go | 13 ++++++++++++- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/jobs/resource_job.go b/jobs/resource_job.go index 85d2800229..3dd2a421f7 100644 --- a/jobs/resource_job.go +++ b/jobs/resource_job.go @@ -161,11 +161,12 @@ type CronSchedule struct { // BEGIN Jobs + Repo integration preview type GitSource struct { - Url string `json:"git_url" tf:"alias:url"` - Provider string `json:"git_provider,omitempty" tf:"alias:provider"` - Branch string `json:"git_branch,omitempty" tf:"alias:branch"` - Tag string `json:"git_tag,omitempty" tf:"alias:tag"` - Commit string `json:"git_commit,omitempty" tf:"alias:commit"` + Url string `json:"git_url" tf:"alias:url"` + Provider string `json:"git_provider,omitempty" tf:"alias:provider"` + Branch string `json:"git_branch,omitempty" tf:"alias:branch"` + Tag string `json:"git_tag,omitempty" tf:"alias:tag"` + Commit string `json:"git_commit,omitempty" tf:"alias:commit"` + JobSource *jobs.JobSource `json:"job_source,omitempty"` } // End Jobs + Repo integration preview diff --git a/jobs/resource_job_test.go b/jobs/resource_job_test.go index 3ddc416c7f..617a63c54b 100644 --- a/jobs/resource_job_test.go +++ b/jobs/resource_job_test.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/terraform-provider-databricks/clusters" "github.com/databricks/terraform-provider-databricks/common" "github.com/databricks/terraform-provider-databricks/libraries" @@ -1081,6 +1082,11 @@ func TestResourceJobCreateFromGitSource(t *testing.T) { Url: "https://github.com/databricks/terraform-provider-databricks", Tag: "0.4.8", Provider: "gitHub", + JobSource: &jobs.JobSource{ + JobConfigPath: "a/b/c/databricks.yml", + ImportFromGitBranch: "main", + DirtyState: "NOT_SYNCED", + }, }, }, Response: Job{ @@ -1115,6 +1121,11 @@ func TestResourceJobCreateFromGitSource(t *testing.T) { git_source { url = "https://github.com/databricks/terraform-provider-databricks" tag = "0.4.8" + job_source { + job_config_path = "a/b/c/databricks.yml" + import_from_git_branch = "main" + dirty_state = "NOT_SYNCED" + } } task { @@ -1204,7 +1215,7 @@ func TestResourceJobCreateFromGitSourceWithoutProviderFail(t *testing.T) { } } `, - }.ExpectError(t, "git source is not empty but Git Provider is not specified and cannot be guessed by url &{Url:https://custom.git.hosting.com/databricks/terraform-provider-databricks Provider: Branch: Tag:0.4.8 Commit:}") + }.ExpectError(t, "git source is not empty but Git Provider is not specified and cannot be guessed by url &{Url:https://custom.git.hosting.com/databricks/terraform-provider-databricks Provider: Branch: Tag:0.4.8 Commit: JobSource:}") } func TestResourceJobCreateSingleNode_Fail(t *testing.T) { From 33a88d7aa7a4375b2a52ac41ce5a00f946dffdc1 Mon Sep 17 00:00:00 2001 From: Alex Ott Date: Tue, 1 Aug 2023 19:35:50 +0200 Subject: [PATCH 12/22] Allow search SQL Warehouses by name in `databricks_sql_warehouse` data source (#2458) * Allow search SQL Warehouses by name in `databricks_sql_warehouse` data source Right now it's possible to search only by the warehouse ID, but it's not always convenient although it's possible by using `databricks_sql_warehouses` data source + explicit filtering. This PR adds a capability to search by either SQL warehouse name or ID. This fixes #2443 * Update docs/data-sources/sql_warehouse.md Co-authored-by: Miles Yucht * Address review comments also change documentation a bit to better match the data source - it was copied from the resource as-is. * More fixes from review * code review comments --------- Co-authored-by: Miles Yucht --- docs/data-sources/sql_warehouse.md | 23 ++++-- sql/data_sql_warehouse.go | 37 ++++++++-- sql/data_sql_warehouse_test.go | 110 +++++++++++++++++++++++++++++ sql/resource_sql_endpoint.go | 7 ++ 4 files changed, 166 insertions(+), 11 deletions(-) diff --git a/docs/data-sources/sql_warehouse.md b/docs/data-sources/sql_warehouse.md index e6f3e3f620..d32be78bfa 100644 --- a/docs/data-sources/sql_warehouse.md +++ b/docs/data-sources/sql_warehouse.md @@ -9,7 +9,7 @@ Retrieves information about a [databricks_sql_warehouse](../resources/sql_wareho ## Example usage -Retrieve attributes of each SQL warehouses in a workspace +* Retrieve attributes of each SQL warehouses in a workspace: ```hcl data "databricks_sql_warehouses" "all" { @@ -19,32 +19,41 @@ data "databricks_sql_warehouse" "all" { for_each = data.databricks_sql.warehouses.ids id = each.value } +``` + +* Search for a specific SQL Warehouse by name: +```hcl +data "databricks_sql_warehouse" "all" { + name = "Starter Warehouse" +} ``` ## Argument reference -* `id` - (Required) The ID of the SQL warehouse +* `id` - (Required, if `name` isn't specified) The ID of the SQL warehouse. +* `name` - (Required, if `id` isn't specified) Name of the SQL warehouse to search (case-sensitive). ## Attribute reference This data source exports the following attributes: -* `name` - Name of the SQL warehouse. Must be unique. +* `id` - The ID of the SQL warehouse. +* `name` - Name of the SQL warehouse. * `cluster_size` - The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large". * `min_num_clusters` - Minimum number of clusters available when a SQL warehouse is running. * `max_num_clusters` - Maximum number of clusters available when a SQL warehouse is running. * `auto_stop_mins` - Time in minutes until an idle SQL warehouse terminates all clusters and stops. -* `tags` - Databricks tags all warehouse resources with these tags. +* `tags` - tags used for SQL warehouse resources. * `spot_instance_policy` - The spot policy to use for allocating instances to clusters: `COST_OPTIMIZED` or `RELIABILITY_OPTIMIZED`. -* `enable_photon` - Whether to enable [Photon](https://databricks.com/product/delta-engine). -* `enable_serverless_compute` - Whether this SQL warehouse is a serverless SQL warehouse. If this value is `true`, `warehouse_type` must be `PRO`. +* `enable_photon` - Whether [Photon](https://databricks.com/product/delta-engine) is enabled. +* `enable_serverless_compute` - Whether this SQL warehouse is a serverless SQL warehouse. - **For AWS**: If your account needs updated [terms of use](https://docs.databricks.com/sql/admin/serverless.html#accept-terms), workspace admins are prompted in the Databricks SQL UI. A workspace must meet the [requirements](https://docs.databricks.com/sql/admin/serverless.html#requirements) and might require an update its instance profile role to [add a trust relationship](https://docs.databricks.com/sql/admin/serverless.html#aws-instance-profile-setup). - **For Azure**, you must [enable your workspace for serverless SQL warehouse](https://learn.microsoft.com/azure/databricks/sql/admin/serverless). -* `warehouse_type` - SQL warehouse type. See for [AWS](https://docs.databricks.com/sql/index.html#warehouse-types) or [Azure](https://learn.microsoft.com/azure/databricks/sql/#warehouse-types). Set to `PRO` or `CLASSIC`. If the field `enable_serverless_compute` has the value `true`, this needs to be set to `PRO`. +* `warehouse_type` - SQL warehouse type. See for [AWS](https://docs.databricks.com/sql/index.html#warehouse-types) or [Azure](https://learn.microsoft.com/azure/databricks/sql/#warehouse-types). * `channel` block, consisting of following fields: * `name` - Name of the Databricks SQL release channel. Possible values are: `CHANNEL_NAME_PREVIEW` and `CHANNEL_NAME_CURRENT`. Default is `CHANNEL_NAME_CURRENT`. * `jdbc_url` - JDBC connection string. diff --git a/sql/data_sql_warehouse.go b/sql/data_sql_warehouse.go index 28c88bbf16..32e76397f7 100644 --- a/sql/data_sql_warehouse.go +++ b/sql/data_sql_warehouse.go @@ -10,7 +10,7 @@ import ( func DataSourceWarehouse() *schema.Resource { type SQLWarehouseInfo struct { - ID string `json:"id"` + ID string `json:"id,omitempty" tf:"computed"` Name string `json:"name,omitempty" tf:"computed"` ClusterSize string `json:"cluster_size,omitempty" tf:"computed"` AutoStopMinutes int `json:"auto_stop_mins,omitempty" tf:"computed"` @@ -31,15 +31,44 @@ func DataSourceWarehouse() *schema.Resource { return common.DataResource(SQLWarehouseInfo{}, func(ctx context.Context, e interface{}, c *common.DatabricksClient) error { data := e.(*SQLWarehouseInfo) - err := c.Get(ctx, fmt.Sprintf("/sql/warehouses/%s", data.ID), nil, data) + var id string + if data.ID == "" && data.Name == "" { + return fmt.Errorf("either 'id' or 'name' should be provided") + } + endpointsAPI := NewSQLEndpointsAPI(ctx, c) + selected := []DataSource{} + dataSources, err := endpointsAPI.listDataSources() if err != nil { return err } - endpointsAPI := NewSQLEndpointsAPI(ctx, c) - data.DataSourceID, err = endpointsAPI.ResolveDataSourceID(data.ID) + for _, source := range dataSources { + if data.Name != "" && source.Name == data.Name { + selected = append(selected, source) + } else if data.ID != "" && source.EndpointID == data.ID { + selected = append(selected, source) + break + } + } + if len(selected) == 0 { + if data.Name != "" { + return fmt.Errorf("can't find SQL warehouse with the name '%s'", data.Name) + } else { + return fmt.Errorf("can't find SQL warehouse with the ID '%s'", data.ID) + } + } + if len(selected) > 1 { + if data.Name != "" { + return fmt.Errorf("there are multiple SQL warehouses with the name '%s'", data.Name) + } else { + return fmt.Errorf("there are multiple SQL warehouses with the ID '%s'", data.ID) + } + } + id = selected[0].EndpointID + err = c.Get(ctx, fmt.Sprintf("/sql/warehouses/%s", id), nil, data) if err != nil { return err } + data.DataSourceID = selected[0].ID return nil }) } diff --git a/sql/data_sql_warehouse_test.go b/sql/data_sql_warehouse_test.go index 254d2f26c8..466d492b05 100644 --- a/sql/data_sql_warehouse_test.go +++ b/sql/data_sql_warehouse_test.go @@ -46,3 +46,113 @@ func TestWarehouseData_Error(t *testing.T) { ID: "_", }.ExpectError(t, "I'm a teapot") } + +func TestWarehouseDataByName_ListError(t *testing.T) { + qa.ResourceFixture{ + Fixtures: qa.HTTPFailures, + Resource: DataSourceWarehouse(), + Read: true, + NonWritable: true, + HCL: `name = "abc"`, + ID: "_", + }.ExpectError(t, "I'm a teapot") +} + +func TestWarehouseDataByName_NotFoundError(t *testing.T) { + qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/preview/sql/data_sources", + Response: []DataSource{ + { + ID: "d7c9d05c-7496-4c69-b089-48823edad401", + EndpointID: "def", + Name: "test", + }, + { + ID: "d7c9d05c-7496-4c69-b089-48823edad40c", + EndpointID: "abc", + Name: "abc2", + }, + }, + }, + }, + Resource: DataSourceWarehouse(), + Read: true, + NonWritable: true, + HCL: `name = "abc"`, + ID: "_", + }.ExpectError(t, "can't find SQL warehouse with the name 'abc'") +} + +func TestWarehouseDataByName_DuplicatesError(t *testing.T) { + qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/preview/sql/data_sources", + Response: []DataSource{ + { + ID: "d7c9d05c-7496-4c69-b089-48823edad401", + EndpointID: "def", + Name: "abc", + }, + { + ID: "d7c9d05c-7496-4c69-b089-48823edad40c", + EndpointID: "abc", + Name: "abc", + }, + }, + }, + }, + Resource: DataSourceWarehouse(), + Read: true, + NonWritable: true, + HCL: `name = "abc"`, + ID: "_", + }.ExpectError(t, "there are multiple SQL warehouses with the name 'abc'") +} + +func TestWarehouseDataByName(t *testing.T) { + d, err := qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/preview/sql/data_sources", + Response: []DataSource{ + { + ID: "d7c9d05c-7496-4c69-b089-48823edad401", + EndpointID: "def", + Name: "abc", + }, + { + ID: "d7c9d05c-7496-4c69-b089-48823edad40c", + EndpointID: "abc", + Name: "test", + }, + }, + }, + { + Method: "GET", + Resource: "/api/2.0/sql/warehouses/abc", + ReuseRequest: true, + Response: SQLEndpoint{ + Name: "test", + ClusterSize: "Small", + ID: "abc", + State: "RUNNING", + }, + }, + }, + Resource: DataSourceWarehouse(), + Read: true, + NonWritable: true, + HCL: `name = "test"`, + ID: "_", + }.Apply(t) + require.NoError(t, err) + assert.Equal(t, "abc", d.Id()) + assert.Equal(t, "RUNNING", d.Get("state")) + assert.Equal(t, "d7c9d05c-7496-4c69-b089-48823edad40c", d.Get("data_source_id")) +} diff --git a/sql/resource_sql_endpoint.go b/sql/resource_sql_endpoint.go index ccb2716ed8..22a288a416 100644 --- a/sql/resource_sql_endpoint.go +++ b/sql/resource_sql_endpoint.go @@ -80,6 +80,7 @@ type Tag struct { type DataSource struct { ID string `json:"id"` EndpointID string `json:"endpoint_id"` + Name string `json:"name"` } // endpointList ... @@ -138,6 +139,12 @@ func (a SQLEndpointsAPI) Create(se *SQLEndpoint, timeout time.Duration) error { return a.waitForRunning(se.ID, timeout) } +func (a SQLEndpointsAPI) listDataSources() ([]DataSource, error) { + var dss []DataSource + err := a.client.Get(a.context, "/preview/sql/data_sources", nil, &dss) + return dss, err +} + // ResolveDataSourceID ... func (a SQLEndpointsAPI) ResolveDataSourceID(endpointID string) (dataSourceID string, err error) { var dss []DataSource From 86a2e5cd1be4747ab540281e3250a336be3db169 Mon Sep 17 00:00:00 2001 From: Alex Ott Date: Tue, 1 Aug 2023 19:43:04 +0200 Subject: [PATCH 13/22] Late jobs support (aka health conditions) in `databricks_job` resource (#2496) * Late jobs support (aka health conditions) in `databricks_job` resource Added support for `health` block that is used to detect late jobs. Also, this PR includes following changes: * Added `on_duration_warning_threshold_exceeded` attribute to email & webhook notifications (needed for late jobs support) * Added `notification_settings` on a task level & use jobs & task notification structs from Go SDK * Reorganized documentation for task block as it's getting more & more attributes * Update docs/resources/job.md Co-authored-by: Gabor Ratky * Update docs/resources/job.md Co-authored-by: Gabor Ratky * Update docs/resources/job.md Co-authored-by: Gabor Ratky * Update docs/resources/job.md Co-authored-by: Gabor Ratky * Update docs/resources/job.md Co-authored-by: Gabor Ratky * Update docs/resources/job.md Co-authored-by: Gabor Ratky * Update docs/resources/job.md Co-authored-by: Gabor Ratky * Update docs/resources/job.md Co-authored-by: Gabor Ratky * Update docs/resources/job.md Co-authored-by: Gabor Ratky * Update docs/resources/job.md Co-authored-by: Gabor Ratky * address review comments * add list of tasks * more review chanes --------- Co-authored-by: Gabor Ratky Co-authored-by: Miles Yucht --- docs/resources/job.md | 62 ++++++++++++++++++++++++++++++++---- jobs/resource_job.go | 67 ++++++++++++++++++++++----------------- jobs/resource_job_test.go | 42 ++++++++++++++++++++++-- 3 files changed, 132 insertions(+), 39 deletions(-) diff --git a/docs/resources/job.md b/docs/resources/job.md index 56027c1133..4a3d8498ae 100644 --- a/docs/resources/job.md +++ b/docs/resources/job.md @@ -10,7 +10,7 @@ The `databricks_job` resource allows you to manage [Databricks Jobs](https://doc -> **Note** In Terraform configuration, it is recommended to define tasks in alphabetical order of their `task_key` arguments, so that you get consistent and readable diff. Whenever tasks are added or removed, or `task_key` is renamed, you'll observe a change in the majority of tasks. It's related to the fact that the current version of the provider treats `task` blocks as an ordered list. Alternatively, `task` block could have been an unordered set, though end-users would see the entire block replaced upon a change in single property of the task. -It is possible to create [a Databricks job](https://docs.databricks.com/data-engineering/jobs/jobs-user-guide.html) using `task` blocks. Single task is defined with the `task` block containing one of the `*_task` block, `task_key`, `libraries`, `email_notifications`, `timeout_seconds`, `max_retries`, `min_retry_interval_millis`, `retry_on_timeout` attributes and `depends_on` blocks to define cross-task dependencies. +It is possible to create [a Databricks job](https://docs.databricks.com/data-engineering/jobs/jobs-user-guide.html) using `task` blocks. Single task is defined with the `task` block containing one of the `*_task` block, `task_key`, and additional arguments described below. ```hcl resource "databricks_job" "this" { @@ -88,13 +88,44 @@ The resource supports the following arguments: ``` * `library` - (Optional) (Set) An optional list of libraries to be installed on the cluster that will execute the job. Please consult [libraries section](cluster.md#libraries) for [databricks_cluster](cluster.md) resource. * `retry_on_timeout` - (Optional) (Bool) An optional policy to specify whether to retry a job when it times out. The default behavior is to not retry on timeout. -* `max_retries` - (Optional) (Integer) An optional maximum number of times to retry an unsuccessful run. A run is considered to be unsuccessful if it completes with a FAILED or INTERNAL_ERROR lifecycle state. The value -1 means to retry indefinitely and the value 0 means to never retry. The default behavior is to never retry. A run can have the following lifecycle state: PENDING, RUNNING, TERMINATING, TERMINATED, SKIPPED or INTERNAL_ERROR +* `max_retries` - (Optional) (Integer) An optional maximum number of times to retry an unsuccessful run. A run is considered to be unsuccessful if it completes with a `FAILED` or `INTERNAL_ERROR` lifecycle state. The value -1 means to retry indefinitely and the value 0 means to never retry. The default behavior is to never retry. * `timeout_seconds` - (Optional) (Integer) An optional timeout applied to each run of this job. The default behavior is to have no timeout. * `min_retry_interval_millis` - (Optional) (Integer) An optional minimal interval in milliseconds between the start of the failed run and the subsequent retry run. The default behavior is that unsuccessful runs are immediately retried. * `max_concurrent_runs` - (Optional) (Integer) An optional maximum allowed number of concurrent runs of the job. Defaults to *1*. -* `email_notifications` - (Optional) (List) An optional set of email addresses notified when runs of this job begins, completes and fails. The default behavior is to not send any emails. This field is a block and is documented below. +* `email_notifications` - (Optional) (List) An optional set of email addresses notified when runs of this job begins, completes and fails. The default behavior is to not send any emails. This field is a block and is [documented below](#email_notifications-configuration-block). * `webhook_notifications` - (Optional) (List) An optional set of system destinations (for example, webhook destinations or Slack) to be notified when runs of this job begins, completes and fails. The default behavior is to not send any notifications. This field is a block and is documented below. +* `notification_settings` - (Optional) An optional block controlling the notification settings on the job level (described below). * `schedule` - (Optional) (List) An optional periodic schedule for this job. The default behavior is that the job runs when triggered by clicking Run Now in the Jobs UI or sending an API request to runNow. This field is a block and is documented below. +* `health` - (Optional) An optional block that specifies the health conditions for the job (described below). + +### task Configuration Block + +This block describes individual tasks: + +* `task_key` - (Required) string specifying an unique key for a given task. +* `*_task` - (Required) one of the specific task blocks described below: + * `dbt_task` + * `notebook_task` + * `pipeline_task` + * `python_wheel_task` + * `spark_jar_task` + * `spark_python_task` + * `spark_submit_task` + * `sql_task` +* `library` - (Optional) (Set) An optional list of libraries to be installed on the cluster that will execute the job. Please consult [libraries section](cluster.md#libraries) for [databricks_cluster](cluster.md) resource. +* `depends_on` - (Optional) block specifying dependency(-ies) for a given task. +* `retry_on_timeout` - (Optional) (Bool) An optional policy to specify whether to retry a job when it times out. The default behavior is to not retry on timeout. +* `max_retries` - (Optional) (Integer) An optional maximum number of times to retry an unsuccessful run. A run is considered to be unsuccessful if it completes with a `FAILED` or `INTERNAL_ERROR` lifecycle state. The value -1 means to retry indefinitely and the value 0 means to never retry. The default behavior is to never retry. A run can have the following lifecycle state: `PENDING`, `RUNNING`, `TERMINATING`, `TERMINATED`, `SKIPPED` or `INTERNAL_ERROR`. +* `timeout_seconds` - (Optional) (Integer) An optional timeout applied to each run of this job. The default behavior is to have no timeout. +* `min_retry_interval_millis` - (Optional) (Integer) An optional minimal interval in milliseconds between the start of the failed run and the subsequent retry run. The default behavior is that unsuccessful runs are immediately retried. +* `email_notifications` - (Optional) (List) An optional set of email addresses notified when runs of this job begins, completes and fails. The default behavior is to not send any emails. This field is a block and is [documented below](#email_notifications-configuration-block). +* `health` - (Optional) block described below that specifies health conditions for a given task. + +### depends_on Configuration Block + +This block describes dependencies of a given task: + +* `task_key` - (Required) The name of the task this task depends on. ### tags Configuration Map `tags` - (Optional) (Map) An optional map of the tags associated with the job. Specified tags will be used as cluster tags for job clusters. @@ -130,8 +161,6 @@ resource "databricks_job" "this" { } ``` - - ### job_cluster Configuration Block [Shared job cluster](https://docs.databricks.com/jobs.html#use-shared-job-clusters) specification. Allows multiple tasks in the same job run to reuse the cluster. @@ -172,6 +201,7 @@ This block is used to specify Git repository information & branch/tag/commit tha * `on_start` - (Optional) (List) list of emails to notify when the run starts. * `on_success` - (Optional) (List) list of emails to notify when the run completes successfully. * `on_failure` - (Optional) (List) list of emails to notify when the run fails. +* `on_duration_warning_threshold_exceeded` - (Optional) (List) list of emails to notify when the duration of a run exceeds the threshold specified by the `RUN_DURATION_SECONDS` metric in the `health` block. * `no_alert_for_skipped_runs` - (Optional) (Bool) don't send alert for skipped runs. (It's recommended to use the corresponding setting in the `notification_settings` configuration block). ### webhook_notifications Configuration Block @@ -181,6 +211,7 @@ Each entry in `webhook_notification` block takes a list `webhook` blocks. The fi * `on_start` - (Optional) (List) list of notification IDs to call when the run starts. A maximum of 3 destinations can be specified. * `on_success` - (Optional) (List) list of notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified. * `on_failure` - (Optional) (List) list of notification IDs to call when the run fails. A maximum of 3 destinations can be specified. +* `on_duration_warning_threshold_exceeded` - (Optional) (List) list of notification IDs to call when the duration of a run exceeds the threshold specified by the `RUN_DURATION_SECONDS` metric in the `health` block. Note that the `id` is not to be confused with the name of the alert destination. The `id` can be retrieved through the API or the URL of Databricks UI `https:///sql/destinations/?o=` @@ -200,13 +231,30 @@ webhook_notifications { -> **Note** The following configuration blocks can be standalone or nested inside a `task` block -### notification_settings Configuration Block +### notification_settings Configuration Block (Job Level) -This block controls notification settings for both email & webhook notifications: +This block controls notification settings for both email & webhook notifications on a job level: * `no_alert_for_skipped_runs` - (Optional) (Bool) don't send alert for skipped runs. * `no_alert_for_canceled_runs` - (Optional) (Bool) don't send alert for cancelled runs. +### notification_settings Configuration Block (Task Level) + +This block controls notification settings for both email & webhook notifications on a task level: + +* `no_alert_for_skipped_runs` - (Optional) (Bool) don't send alert for skipped runs. +* `no_alert_for_canceled_runs` - (Optional) (Bool) don't send alert for cancelled runs. +* `alert_on_last_attempt` - (Optional) (Bool) do not send notifications to recipients specified in `on_start` for the retried runs and do not send notifications to recipients specified in `on_failure` until the last retry of the run. + +### health Configuration Block + +This block describes health conditions for a given job or an individual task. It consists of the following attributes: + +* `rules` - (List) list of rules that are represented as objects with the following attributes: + * `metric` - (Optional) string specifying the metric to check. The only supported metric is `RUN_DURATION_SECONDS` (check [Jobs REST API documentation](https://docs.databricks.com/api/workspace/jobs/create) for the latest information). + * `op` - (Optional) string specifying the operation used to evaluate the given metric. The only supported operation is `GREATER_THAN`. + * `value` - (Optional) integer value used to compare to the given metric. + ### spark_jar_task Configuration Block * `parameters` - (Optional) (List) Parameters passed to the main method. diff --git a/jobs/resource_job.go b/jobs/resource_job.go index 3dd2a421f7..5d36755da2 100644 --- a/jobs/resource_job.go +++ b/jobs/resource_job.go @@ -114,24 +114,20 @@ type DbtTask struct { // EmailNotifications contains the information for email notifications after job or task run start or completion type EmailNotifications struct { - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` } // WebhookNotifications contains the information for webhook notifications sent after job start or completion. type WebhookNotifications struct { - OnStart []Webhook `json:"on_start,omitempty"` - OnSuccess []Webhook `json:"on_success,omitempty"` - OnFailure []Webhook `json:"on_failure,omitempty"` -} - -// NotificationSettings control the notification settings for a job -type NotificationSettings struct { - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - NoAlertForCanceledRuns bool `json:"no_alert_for_canceled_runs,omitempty"` + OnStart []Webhook `json:"on_start,omitempty"` + OnSuccess []Webhook `json:"on_success,omitempty"` + OnFailure []Webhook `json:"on_failure,omitempty"` + OnDurationWarningThresholdExceeded []Webhook `json:"on_duration_warning_threshold_exceeded,omitempty"` } func (wn *WebhookNotifications) Sort() { @@ -171,6 +167,16 @@ type GitSource struct { // End Jobs + Repo integration preview +type JobHealthRule struct { + Metric string `json:"metric,omitempty"` + Operation string `json:"op,omitempty"` + Value int32 `json:"value,omitempty"` +} + +type JobHealth struct { + Rules []JobHealthRule `json:"rules"` +} + type JobTaskSettings struct { TaskKey string `json:"task_key,omitempty"` Description string `json:"description,omitempty"` @@ -198,11 +204,13 @@ type JobTaskSettings struct { // ConditionTask is in private preview ConditionTask *jobs.ConditionTask `json:"condition_task,omitempty" tf:"group:task_type"` - EmailNotifications *EmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"` - TimeoutSeconds int32 `json:"timeout_seconds,omitempty"` - MaxRetries int32 `json:"max_retries,omitempty"` - MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty"` - RetryOnTimeout bool `json:"retry_on_timeout,omitempty" tf:"computed"` + EmailNotifications *EmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"` + NotificationSettings *jobs.TaskNotificationSettings `json:"notification_settings,omitempty"` + TimeoutSeconds int32 `json:"timeout_seconds,omitempty"` + MaxRetries int32 `json:"max_retries,omitempty"` + MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty"` + RetryOnTimeout bool `json:"retry_on_timeout,omitempty" tf:"computed"` + Health *JobHealth `json:"health,omitempty"` } type JobCluster struct { @@ -270,16 +278,17 @@ type JobSettings struct { GitSource *GitSource `json:"git_source,omitempty"` // END Jobs + Repo integration preview - Schedule *CronSchedule `json:"schedule,omitempty"` - Continuous *ContinuousConf `json:"continuous,omitempty"` - Trigger *Trigger `json:"trigger,omitempty"` - MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty"` - EmailNotifications *EmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"` - WebhookNotifications *WebhookNotifications `json:"webhook_notifications,omitempty" tf:"suppress_diff"` - NotificationSettings *NotificationSettings `json:"notification_settings,omitempty"` - Tags map[string]string `json:"tags,omitempty"` - Queue *Queue `json:"queue,omitempty"` - RunAs *JobRunAs `json:"run_as,omitempty"` + Schedule *CronSchedule `json:"schedule,omitempty"` + Continuous *ContinuousConf `json:"continuous,omitempty"` + Trigger *Trigger `json:"trigger,omitempty"` + MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty"` + EmailNotifications *EmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"` + WebhookNotifications *WebhookNotifications `json:"webhook_notifications,omitempty" tf:"suppress_diff"` + NotificationSettings *jobs.JobNotificationSettings `json:"notification_settings,omitempty"` + Tags map[string]string `json:"tags,omitempty"` + Queue *Queue `json:"queue,omitempty"` + RunAs *JobRunAs `json:"run_as,omitempty"` + Health *JobHealth `json:"health,omitempty"` } func (js *JobSettings) isMultiTask() bool { diff --git a/jobs/resource_job_test.go b/jobs/resource_job_test.go index 617a63c54b..583fc02e87 100644 --- a/jobs/resource_job_test.go +++ b/jobs/resource_job_test.go @@ -14,6 +14,7 @@ import ( "github.com/databricks/terraform-provider-databricks/common" "github.com/databricks/terraform-provider-databricks/libraries" "github.com/databricks/terraform-provider-databricks/qa" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -141,6 +142,15 @@ func TestResourceJobCreate_MultiTask(t *testing.T) { SparkJarTask: &SparkJarTask{ MainClassName: "com.labs.BarMain", }, + Health: &JobHealth{ + Rules: []JobHealthRule{ + { + Metric: "RUN_DURATION_SECONDS", + Operation: "GREATER_THAN", + Value: 3600, + }, + }, + }, }, { TaskKey: "b", @@ -158,6 +168,15 @@ func TestResourceJobCreate_MultiTask(t *testing.T) { }, }, MaxConcurrentRuns: 1, + Health: &JobHealth{ + Rules: []JobHealthRule{ + { + Metric: "RUN_DURATION_SECONDS", + Operation: "GREATER_THAN", + Value: 3600, + }, + }, + }, }, Response: Job{ JobID: 789, @@ -185,7 +204,15 @@ func TestResourceJobCreate_MultiTask(t *testing.T) { Resource: ResourceJob(), HCL: ` name = "Featurizer" - + + health { + rules { + metric = "RUN_DURATION_SECONDS" + op = "GREATER_THAN" + value = 3600 + } + } + task { task_key = "a" @@ -198,6 +225,15 @@ func TestResourceJobCreate_MultiTask(t *testing.T) { library { jar = "dbfs://aa/bb/cc.jar" } + + health { + rules { + metric = "RUN_DURATION_SECONDS" + op = "GREATER_THAN" + value = 3600 + } + } + } task { @@ -983,7 +1019,7 @@ func TestResourceJobCreateWithWebhooks(t *testing.T) { OnSuccess: []Webhook{{ID: "id2"}}, OnFailure: []Webhook{{ID: "id3"}}, }, - NotificationSettings: &NotificationSettings{ + NotificationSettings: &jobs.JobNotificationSettings{ NoAlertForSkippedRuns: true, NoAlertForCanceledRuns: true, }, @@ -1014,7 +1050,7 @@ func TestResourceJobCreateWithWebhooks(t *testing.T) { OnSuccess: []Webhook{{ID: "id2"}}, OnFailure: []Webhook{{ID: "id3"}}, }, - NotificationSettings: &NotificationSettings{ + NotificationSettings: &jobs.JobNotificationSettings{ NoAlertForSkippedRuns: true, NoAlertForCanceledRuns: true, }, From 9761051fc0285c770c97c807922ae5951f9e082d Mon Sep 17 00:00:00 2001 From: Vuong Date: Wed, 2 Aug 2023 10:56:12 +0100 Subject: [PATCH 14/22] feedback --- catalog/resource_connection.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/catalog/resource_connection.go b/catalog/resource_connection.go index 0f59bf4947..2fbbf0fdef 100644 --- a/catalog/resource_connection.go +++ b/catalog/resource_connection.go @@ -46,8 +46,8 @@ func ResourceConnection() *schema.Resource { var createConnectionRequest catalog.CreateConnection var alias ConnectionInfo common.DataToStructPointer(d, s, &createConnectionRequest) - common.DataToStructPointer(d, s, &alias) //workaround as cannot set tf:"alias" for the Go SDK struct + common.DataToStructPointer(d, s, &alias) createConnectionRequest.OptionsKvpairs = alias.OptionsKvpairs createConnectionRequest.PropertiesKvpairs = alias.PropertiesKvpairs conn, err := w.Connections.Create(ctx, createConnectionRequest) @@ -76,8 +76,8 @@ func ResourceConnection() *schema.Resource { var updateConnectionRequest catalog.UpdateConnection var alias ConnectionInfo common.DataToStructPointer(d, s, &updateConnectionRequest) - common.DataToStructPointer(d, s, &alias) //workaround as cannot set tf:"alias" for the Go SDK struct + common.DataToStructPointer(d, s, &alias) updateConnectionRequest.OptionsKvpairs = alias.OptionsKvpairs updateConnectionRequest.NameArg = d.Id() conn, err := w.Connections.Update(ctx, updateConnectionRequest) From 87c4c27f0ac3b22291ead588622f216a81837e81 Mon Sep 17 00:00:00 2001 From: Vuong Date: Mon, 14 Aug 2023 12:19:11 +0100 Subject: [PATCH 15/22] update struct --- catalog/resource_connection.go | 13 ++----------- catalog/resource_connection_test.go | 20 ++++++++++---------- 2 files changed, 12 insertions(+), 21 deletions(-) diff --git a/catalog/resource_connection.go b/catalog/resource_connection.go index 2fbbf0fdef..a76ee90c97 100644 --- a/catalog/resource_connection.go +++ b/catalog/resource_connection.go @@ -21,12 +21,12 @@ type ConnectionInfo struct { // Name of the connection. NameArg string `json:"-" url:"-"` // A map of key-value properties attached to the securable. - OptionsKvpairs map[string]string `json:"options_kvpairs" tf:"alias:options,sensitive"` + Options map[string]string `json:"options" tf:"sensitive"` // Username of current owner of the connection. Owner string `json:"owner,omitempty" tf:"force_new"` // An object containing map of key-value properties attached to the // connection. - PropertiesKvpairs map[string]string `json:"properties_kvpairs,omitempty" tf:"alias:properties,force_new"` + Properties map[string]string `json:"properties,omitempty" tf:"force_new"` // If the connection is read only. ReadOnly bool `json:"read_only,omitempty" tf:"force_new"` } @@ -44,12 +44,7 @@ func ResourceConnection() *schema.Resource { return err } var createConnectionRequest catalog.CreateConnection - var alias ConnectionInfo common.DataToStructPointer(d, s, &createConnectionRequest) - //workaround as cannot set tf:"alias" for the Go SDK struct - common.DataToStructPointer(d, s, &alias) - createConnectionRequest.OptionsKvpairs = alias.OptionsKvpairs - createConnectionRequest.PropertiesKvpairs = alias.PropertiesKvpairs conn, err := w.Connections.Create(ctx, createConnectionRequest) if err != nil { return err @@ -74,11 +69,7 @@ func ResourceConnection() *schema.Resource { return err } var updateConnectionRequest catalog.UpdateConnection - var alias ConnectionInfo common.DataToStructPointer(d, s, &updateConnectionRequest) - //workaround as cannot set tf:"alias" for the Go SDK struct - common.DataToStructPointer(d, s, &alias) - updateConnectionRequest.OptionsKvpairs = alias.OptionsKvpairs updateConnectionRequest.NameArg = d.Id() conn, err := w.Connections.Update(ctx, updateConnectionRequest) if err != nil { diff --git a/catalog/resource_connection_test.go b/catalog/resource_connection_test.go index 082c8e36c9..3c04f4f4c3 100644 --- a/catalog/resource_connection_test.go +++ b/catalog/resource_connection_test.go @@ -24,10 +24,10 @@ func TestConnectionsCreate(t *testing.T) { Name: "testConnectionName", ConnectionType: catalog.ConnectionType("testConnectionType"), Comment: "This is a test comment.", - OptionsKvpairs: map[string]string{ + Options: map[string]string{ "host": "test.com", }, - PropertiesKvpairs: map[string]string{ + Properties: map[string]string{ "purpose": "testing", }, Owner: "InitialOwner", @@ -38,10 +38,10 @@ func TestConnectionsCreate(t *testing.T) { Comment: "This is a test comment.", FullName: "testConnectionName", Owner: "InitialOwner", - OptionsKvpairs: map[string]string{ + Options: map[string]string{ "host": "test.com", }, - PropertiesKvpairs: map[string]string{ + Properties: map[string]string{ "purpose": "testing", }, }, @@ -55,10 +55,10 @@ func TestConnectionsCreate(t *testing.T) { Comment: "This is a test comment.", FullName: "testConnectionName", Owner: "InitialOwner", - OptionsKvpairs: map[string]string{ + Options: map[string]string{ "host": "test.com", }, - PropertiesKvpairs: map[string]string{ + Properties: map[string]string{ "purpose": "testing", }, }, @@ -97,7 +97,7 @@ func TestConnectionsCreate_Error(t *testing.T) { Name: "testConnectionName", ConnectionType: catalog.ConnectionType("testConnectionType"), Comment: "This is a test comment.", - OptionsKvpairs: map[string]string{ + Options: map[string]string{ "host": "test.com", }, Owner: "testOwner", @@ -135,7 +135,7 @@ func TestConnectionsRead(t *testing.T) { ConnectionType: catalog.ConnectionType("testConnectionType"), Comment: "This is a test comment.", FullName: "testConnectionName", - OptionsKvpairs: map[string]string{ + Options: map[string]string{ "host": "test.com", }, }, @@ -198,7 +198,7 @@ func TestConnectionsUpdate(t *testing.T) { Resource: "/api/2.1/unity-catalog/connections/testConnectionName", ExpectedRequest: catalog.UpdateConnection{ Name: "testConnectionNameNew", - OptionsKvpairs: map[string]string{ + Options: map[string]string{ "host": "test.com", }, }, @@ -248,7 +248,7 @@ func TestConnectionUpdate_Error(t *testing.T) { Resource: "/api/2.1/unity-catalog/connections/testConnectionName", ExpectedRequest: catalog.UpdateConnection{ Name: "testConnectionNameNew", - OptionsKvpairs: map[string]string{ + Options: map[string]string{ "host": "test.com", }, }, From 49898662e42e2991e93d7020e51140d3b84f7ab8 Mon Sep 17 00:00:00 2001 From: Vuong Date: Mon, 14 Aug 2023 13:03:01 +0100 Subject: [PATCH 16/22] add suppress diff --- catalog/resource_connection.go | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/catalog/resource_connection.go b/catalog/resource_connection.go index a76ee90c97..8548a244f8 100644 --- a/catalog/resource_connection.go +++ b/catalog/resource_connection.go @@ -23,17 +23,24 @@ type ConnectionInfo struct { // A map of key-value properties attached to the securable. Options map[string]string `json:"options" tf:"sensitive"` // Username of current owner of the connection. - Owner string `json:"owner,omitempty" tf:"force_new"` + Owner string `json:"owner,omitempty" tf:"force_new,suppress_diff"` // An object containing map of key-value properties attached to the // connection. Properties map[string]string `json:"properties,omitempty" tf:"force_new"` // If the connection is read only. - ReadOnly bool `json:"read_only,omitempty" tf:"force_new"` + ReadOnly bool `json:"read_only,omitempty" tf:"force_new,default:true"` +} + +func suppressSensitiveOptions(k, old, new string, d *schema.ResourceData) bool { + //ignore changes in user & password + // this list will need to be extended + return !d.HasChanges("options.0.user", "options.0.password") } func ResourceConnection() *schema.Resource { s := common.StructToSchema(ConnectionInfo{}, func(m map[string]*schema.Schema) map[string]*schema.Schema { + m["options"].DiffSuppressFunc = suppressSensitiveOptions return m }) return common.Resource{ From 9f9de751e7bbdae469ad085396a70a505c8a8ec4 Mon Sep 17 00:00:00 2001 From: Vuong Date: Mon, 14 Aug 2023 14:45:07 +0100 Subject: [PATCH 17/22] fix suppress diff --- catalog/resource_connection.go | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/catalog/resource_connection.go b/catalog/resource_connection.go index 8548a244f8..cb3bf7bdb1 100644 --- a/catalog/resource_connection.go +++ b/catalog/resource_connection.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/terraform-provider-databricks/common" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "golang.org/x/exp/slices" ) // This structure contains the fields of catalog.UpdateConnection and catalog.CreateConnection @@ -28,13 +29,26 @@ type ConnectionInfo struct { // connection. Properties map[string]string `json:"properties,omitempty" tf:"force_new"` // If the connection is read only. - ReadOnly bool `json:"read_only,omitempty" tf:"force_new,default:true"` + ReadOnly bool `json:"read_only,omitempty" tf:"force_new,computed"` } +// suppress diff for sensitive options, which are not returned by the server func suppressSensitiveOptions(k, old, new string, d *schema.ResourceData) bool { - //ignore changes in user & password - // this list will need to be extended - return !d.HasChanges("options.0.user", "options.0.password") + //this list will expand as other auth may have different sensitive options + sensitiveOptions := []string{"user", "password"} + o, n := d.GetChange("options") + oldOpt := o.(map[string]any) + newOpt := n.(map[string]any) + //loop through the map and ignore diff for sensitive options + for key, element := range newOpt { + if slices.Contains(sensitiveOptions, key) { + continue + } + if oldOpt[key] != element { + return false + } + } + return true } func ResourceConnection() *schema.Resource { From 22b9e8ac82eb75f5a9f1c326d18a3c70c813328c Mon Sep 17 00:00:00 2001 From: Vuong Date: Mon, 14 Aug 2023 15:10:06 +0100 Subject: [PATCH 18/22] fix acceptance tests --- internal/acceptance/connection_test.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/internal/acceptance/connection_test.go b/internal/acceptance/connection_test.go index 85299253e2..320b64a4c2 100644 --- a/internal/acceptance/connection_test.go +++ b/internal/acceptance/connection_test.go @@ -7,9 +7,8 @@ import ( func TestUcAccConnectionsResourceFullLifecycle(t *testing.T) { unityWorkspaceLevel(t, step{ Template: ` - resource "databricks_volume" "this" { + resource "databricks_connection" "this" { name = "name-{var.STICKY_RANDOM}" - comment = "comment-{var.STICKY_RANDOM}" connection_type = "MYSQL" comment = "this is a connection to mysql db" options = { From de261414701161434a9c8201b2657236001110b7 Mon Sep 17 00:00:00 2001 From: Vuong Date: Tue, 15 Aug 2023 16:57:58 +0100 Subject: [PATCH 19/22] test feedback --- catalog/resource_connection_test.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/catalog/resource_connection_test.go b/catalog/resource_connection_test.go index 3c04f4f4c3..c9d1e75d8b 100644 --- a/catalog/resource_connection_test.go +++ b/catalog/resource_connection_test.go @@ -206,6 +206,9 @@ func TestConnectionsUpdate(t *testing.T) { Name: "testConnectionNameNew", ConnectionType: catalog.ConnectionType("testConnectionType"), Comment: "testComment", + Options: map[string]string{ + "host": "test.com", + }, }, }, { @@ -215,6 +218,9 @@ func TestConnectionsUpdate(t *testing.T) { Name: "testConnectionNameNew", ConnectionType: catalog.ConnectionType("testConnectionType"), Comment: "testComment", + Options: map[string]string{ + "host": "test.com", + }, }, }, }, From 6e82b4effb0a03c11274d402afc6ea2d801c871e Mon Sep 17 00:00:00 2001 From: Vuong Date: Tue, 15 Aug 2023 21:10:35 +0100 Subject: [PATCH 20/22] make id a pair --- catalog/resource_connection.go | 33 +++++++++++++++++++++++------ catalog/resource_connection_test.go | 24 +++++++++++++-------- 2 files changed, 41 insertions(+), 16 deletions(-) diff --git a/catalog/resource_connection.go b/catalog/resource_connection.go index cb3bf7bdb1..43b04d7452 100644 --- a/catalog/resource_connection.go +++ b/catalog/resource_connection.go @@ -17,6 +17,8 @@ type ConnectionInfo struct { Comment string `json:"comment,omitempty" tf:"force_new"` // The type of connection. ConnectionType string `json:"connection_type" tf:"force_new"` + // Unique identifier of parent metastore. + MetastoreId string `json:"metastore_id,omitempty" tf:"computed"` // Name of the connection. Name string `json:"name"` // Name of the connection. @@ -57,6 +59,10 @@ func ResourceConnection() *schema.Resource { m["options"].DiffSuppressFunc = suppressSensitiveOptions return m }) + pi := common.NewPairID("name", "metastore_id").Schema( + func(m map[string]*schema.Schema) map[string]*schema.Schema { + return s + }) return common.Resource{ Schema: s, Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { @@ -70,7 +76,8 @@ func ResourceConnection() *schema.Resource { if err != nil { return err } - d.SetId(conn.Name) + d.Set("metastore_id", conn.MetastoreId) + pi.Pack(d) return nil }, Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { @@ -78,7 +85,11 @@ func ResourceConnection() *schema.Resource { if err != nil { return err } - conn, err := w.Connections.GetByNameArg(ctx, d.Id()) + connName, _, err := pi.Unpack(d) + if err != nil { + return err + } + conn, err := w.Connections.GetByNameArg(ctx, connName) if err != nil { return err } @@ -91,14 +102,18 @@ func ResourceConnection() *schema.Resource { } var updateConnectionRequest catalog.UpdateConnection common.DataToStructPointer(d, s, &updateConnectionRequest) - updateConnectionRequest.NameArg = d.Id() + connName, _, err := pi.Unpack(d) + updateConnectionRequest.NameArg = connName + if err != nil { + return err + } conn, err := w.Connections.Update(ctx, updateConnectionRequest) if err != nil { return err } - // We need to update the resource Id because Name is updatable and FullName consists of Name, - // So if we don't update the field then the requests would be made to old FullName which doesn't exists. - d.SetId(conn.Name) + // We need to repack the Id as the name may have changed + d.Set("name", conn.Name) + pi.Pack(d) return nil }, Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { @@ -106,7 +121,11 @@ func ResourceConnection() *schema.Resource { if err != nil { return err } - return w.Connections.DeleteByNameArg(ctx, d.Id()) + connName, _, err := pi.Unpack(d) + if err != nil { + return err + } + return w.Connections.DeleteByNameArg(ctx, connName) }, }.ToResource() } diff --git a/catalog/resource_connection_test.go b/catalog/resource_connection_test.go index c9d1e75d8b..8d6ef71a92 100644 --- a/catalog/resource_connection_test.go +++ b/catalog/resource_connection_test.go @@ -37,6 +37,7 @@ func TestConnectionsCreate(t *testing.T) { ConnectionType: catalog.ConnectionType("testConnectionType"), Comment: "This is a test comment.", FullName: "testConnectionName", + MetastoreId: "abc", Owner: "InitialOwner", Options: map[string]string{ "host": "test.com", @@ -55,6 +56,7 @@ func TestConnectionsCreate(t *testing.T) { Comment: "This is a test comment.", FullName: "testConnectionName", Owner: "InitialOwner", + MetastoreId: "abc", Options: map[string]string{ "host": "test.com", }, @@ -135,6 +137,7 @@ func TestConnectionsRead(t *testing.T) { ConnectionType: catalog.ConnectionType("testConnectionType"), Comment: "This is a test comment.", FullName: "testConnectionName", + MetastoreId: "abc", Options: map[string]string{ "host": "test.com", }, @@ -143,7 +146,7 @@ func TestConnectionsRead(t *testing.T) { }, Resource: ResourceConnection(), Read: true, - ID: "testConnectionName", + ID: "testConnectionName|abc", HCL: ` name = "testConnectionName" connection_type = "testConnectionType" @@ -160,7 +163,7 @@ func TestConnectionsRead(t *testing.T) { assert.Equal(t, map[string]interface{}{"host": "test.com"}, d.Get("options")) } -func TestResourceConnectionRead_Error(t *testing.T) { +func TestConnectionRead_Error(t *testing.T) { d, err := qa.ResourceFixture{ Fixtures: []qa.HTTPFixture{ { @@ -175,10 +178,10 @@ func TestResourceConnectionRead_Error(t *testing.T) { }, Resource: ResourceConnection(), Read: true, - ID: "testConnectionName", + ID: "testConnectionName|abc", }.Apply(t) qa.AssertErrorStartsWith(t, err, "Internal error happened") - assert.Equal(t, "testConnectionName", d.Id(), "Id should not be empty for error reads") + assert.Equal(t, "testConnectionName|abc", d.Id(), "Id should not be empty for error reads") } func TestConnectionsUpdate(t *testing.T) { @@ -190,6 +193,7 @@ func TestConnectionsUpdate(t *testing.T) { Response: catalog.ConnectionInfo{ Name: "testConnectionName", ConnectionType: catalog.ConnectionType("testConnectionType"), + MetastoreId: "abc", Comment: "testComment", }, }, @@ -206,6 +210,7 @@ func TestConnectionsUpdate(t *testing.T) { Name: "testConnectionNameNew", ConnectionType: catalog.ConnectionType("testConnectionType"), Comment: "testComment", + MetastoreId: "abc", Options: map[string]string{ "host": "test.com", }, @@ -218,6 +223,7 @@ func TestConnectionsUpdate(t *testing.T) { Name: "testConnectionNameNew", ConnectionType: catalog.ConnectionType("testConnectionType"), Comment: "testComment", + MetastoreId: "abc", Options: map[string]string{ "host": "test.com", }, @@ -226,7 +232,7 @@ func TestConnectionsUpdate(t *testing.T) { }, Resource: ResourceConnection(), Update: true, - ID: "testConnectionName", + ID: "testConnectionName|abc", InstanceState: map[string]string{ "connection_type": "testConnectionType", "comment": "testComment", @@ -267,7 +273,7 @@ func TestConnectionUpdate_Error(t *testing.T) { }, Resource: ResourceConnection(), Update: true, - ID: "testConnectionName", + ID: "testConnectionName|abc", InstanceState: map[string]string{ "connection_type": "testConnectionType", "comment": "testComment", @@ -294,10 +300,10 @@ func TestConnectionDelete(t *testing.T) { }, Resource: ResourceConnection(), Delete: true, - ID: "testConnectionName", + ID: "testConnectionName|abc", }.Apply(t) assert.NoError(t, err) - assert.Equal(t, "testConnectionName", d.Id()) + assert.Equal(t, "testConnectionName|abc", d.Id()) } func TestConnectionDelete_Error(t *testing.T) { @@ -316,6 +322,6 @@ func TestConnectionDelete_Error(t *testing.T) { Resource: ResourceConnection(), Delete: true, Removed: true, - ID: "testConnectionName", + ID: "testConnectionName|abc", }.ExpectError(t, "Something went wrong") } From baa34419a2dad79f9e3f08897f110a5d50b30367 Mon Sep 17 00:00:00 2001 From: Vuong Date: Wed, 16 Aug 2023 12:12:09 +0100 Subject: [PATCH 21/22] better sensitive options handling --- catalog/resource_connection.go | 28 +++++++++------------------- 1 file changed, 9 insertions(+), 19 deletions(-) diff --git a/catalog/resource_connection.go b/catalog/resource_connection.go index 43b04d7452..9e76baa4f9 100644 --- a/catalog/resource_connection.go +++ b/catalog/resource_connection.go @@ -34,29 +34,11 @@ type ConnectionInfo struct { ReadOnly bool `json:"read_only,omitempty" tf:"force_new,computed"` } -// suppress diff for sensitive options, which are not returned by the server -func suppressSensitiveOptions(k, old, new string, d *schema.ResourceData) bool { - //this list will expand as other auth may have different sensitive options - sensitiveOptions := []string{"user", "password"} - o, n := d.GetChange("options") - oldOpt := o.(map[string]any) - newOpt := n.(map[string]any) - //loop through the map and ignore diff for sensitive options - for key, element := range newOpt { - if slices.Contains(sensitiveOptions, key) { - continue - } - if oldOpt[key] != element { - return false - } - } - return true -} +var sensitiveOptions = []string{"user", "password", "personalAccessToken", "access_token", "client_secret", "OAuthPvtKey"} func ResourceConnection() *schema.Resource { s := common.StructToSchema(ConnectionInfo{}, func(m map[string]*schema.Schema) map[string]*schema.Schema { - m["options"].DiffSuppressFunc = suppressSensitiveOptions return m }) pi := common.NewPairID("name", "metastore_id").Schema( @@ -93,6 +75,14 @@ func ResourceConnection() *schema.Resource { if err != nil { return err } + // We need to preserve original sensitive options as API doesn't return them + var cOrig catalog.CreateConnection + common.DataToStructPointer(d, s, &cOrig) + for key, element := range cOrig.Options { + if slices.Contains(sensitiveOptions, key) { + conn.Options[key] = element + } + } return common.StructToData(conn, s, d) }, Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { From 74acee9376eb79b86c5a76109a7c1db5a111b364 Mon Sep 17 00:00:00 2001 From: Vuong Date: Tue, 22 Aug 2023 15:13:31 +0100 Subject: [PATCH 22/22] reorder id pair --- catalog/resource_connection.go | 8 ++++---- catalog/resource_connection_test.go | 16 ++++++++-------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/catalog/resource_connection.go b/catalog/resource_connection.go index 9e76baa4f9..1e6a8a1f10 100644 --- a/catalog/resource_connection.go +++ b/catalog/resource_connection.go @@ -41,7 +41,7 @@ func ResourceConnection() *schema.Resource { func(m map[string]*schema.Schema) map[string]*schema.Schema { return m }) - pi := common.NewPairID("name", "metastore_id").Schema( + pi := common.NewPairID("metastore_id", "name").Schema( func(m map[string]*schema.Schema) map[string]*schema.Schema { return s }) @@ -67,7 +67,7 @@ func ResourceConnection() *schema.Resource { if err != nil { return err } - connName, _, err := pi.Unpack(d) + _, connName, err := pi.Unpack(d) if err != nil { return err } @@ -92,7 +92,7 @@ func ResourceConnection() *schema.Resource { } var updateConnectionRequest catalog.UpdateConnection common.DataToStructPointer(d, s, &updateConnectionRequest) - connName, _, err := pi.Unpack(d) + _, connName, err := pi.Unpack(d) updateConnectionRequest.NameArg = connName if err != nil { return err @@ -111,7 +111,7 @@ func ResourceConnection() *schema.Resource { if err != nil { return err } - connName, _, err := pi.Unpack(d) + _, connName, err := pi.Unpack(d) if err != nil { return err } diff --git a/catalog/resource_connection_test.go b/catalog/resource_connection_test.go index 8d6ef71a92..9ade5fdd7f 100644 --- a/catalog/resource_connection_test.go +++ b/catalog/resource_connection_test.go @@ -146,7 +146,7 @@ func TestConnectionsRead(t *testing.T) { }, Resource: ResourceConnection(), Read: true, - ID: "testConnectionName|abc", + ID: "abc|testConnectionName", HCL: ` name = "testConnectionName" connection_type = "testConnectionType" @@ -178,10 +178,10 @@ func TestConnectionRead_Error(t *testing.T) { }, Resource: ResourceConnection(), Read: true, - ID: "testConnectionName|abc", + ID: "abc|testConnectionName", }.Apply(t) qa.AssertErrorStartsWith(t, err, "Internal error happened") - assert.Equal(t, "testConnectionName|abc", d.Id(), "Id should not be empty for error reads") + assert.Equal(t, "abc|testConnectionName", d.Id(), "Id should not be empty for error reads") } func TestConnectionsUpdate(t *testing.T) { @@ -232,7 +232,7 @@ func TestConnectionsUpdate(t *testing.T) { }, Resource: ResourceConnection(), Update: true, - ID: "testConnectionName|abc", + ID: "abc|testConnectionName", InstanceState: map[string]string{ "connection_type": "testConnectionType", "comment": "testComment", @@ -273,7 +273,7 @@ func TestConnectionUpdate_Error(t *testing.T) { }, Resource: ResourceConnection(), Update: true, - ID: "testConnectionName|abc", + ID: "abc|testConnectionName", InstanceState: map[string]string{ "connection_type": "testConnectionType", "comment": "testComment", @@ -300,10 +300,10 @@ func TestConnectionDelete(t *testing.T) { }, Resource: ResourceConnection(), Delete: true, - ID: "testConnectionName|abc", + ID: "abc|testConnectionName", }.Apply(t) assert.NoError(t, err) - assert.Equal(t, "testConnectionName|abc", d.Id()) + assert.Equal(t, "abc|testConnectionName", d.Id()) } func TestConnectionDelete_Error(t *testing.T) { @@ -322,6 +322,6 @@ func TestConnectionDelete_Error(t *testing.T) { Resource: ResourceConnection(), Delete: true, Removed: true, - ID: "testConnectionName|abc", + ID: "abc|testConnectionName", }.ExpectError(t, "Something went wrong") }