-
Notifications
You must be signed in to change notification settings - Fork 366
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Add databricks_connection
resource to support Lakehouse Federation
#2528
Changes from all commits
5ef3414
a007f4a
207ec02
bacb45c
9bb853f
1087e85
60b98d1
5ce7ad6
0599851
345184c
52af856
33a88d7
86a2e5c
9761051
f91d380
270a4c9
87c4c27
4989866
9f9de75
22b9e8a
de26141
6e82b4e
baa3441
74acee9
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,121 @@ | ||
package catalog | ||
|
||
import ( | ||
"context" | ||
|
||
"github.com/databricks/databricks-sdk-go/service/catalog" | ||
"github.com/databricks/terraform-provider-databricks/common" | ||
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" | ||
"golang.org/x/exp/slices" | ||
) | ||
|
||
// This structure contains the fields of catalog.UpdateConnection and catalog.CreateConnection | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We need better utilities for converting Go SDK structs to TF schemas so we don't need to inline stuff like this. |
||
// We need to create this because we need Owner, FullNameArg, SchemaName and CatalogName which aren't present in a single of them. | ||
// We also need to annotate tf:"computed" for the Owner field. | ||
type ConnectionInfo struct { | ||
// User-provided free-form text description. | ||
Comment string `json:"comment,omitempty" tf:"force_new"` | ||
// The type of connection. | ||
ConnectionType string `json:"connection_type" tf:"force_new"` | ||
// Unique identifier of parent metastore. | ||
MetastoreId string `json:"metastore_id,omitempty" tf:"computed"` | ||
// Name of the connection. | ||
Name string `json:"name"` | ||
// Name of the connection. | ||
NameArg string `json:"-" url:"-"` | ||
// A map of key-value properties attached to the securable. | ||
Options map[string]string `json:"options" tf:"sensitive"` | ||
// Username of current owner of the connection. | ||
Owner string `json:"owner,omitempty" tf:"force_new,suppress_diff"` | ||
// An object containing map of key-value properties attached to the | ||
// connection. | ||
Properties map[string]string `json:"properties,omitempty" tf:"force_new"` | ||
// If the connection is read only. | ||
ReadOnly bool `json:"read_only,omitempty" tf:"force_new,computed"` | ||
} | ||
|
||
var sensitiveOptions = []string{"user", "password", "personalAccessToken", "access_token", "client_secret", "OAuthPvtKey"} | ||
|
||
func ResourceConnection() *schema.Resource { | ||
s := common.StructToSchema(ConnectionInfo{}, | ||
func(m map[string]*schema.Schema) map[string]*schema.Schema { | ||
return m | ||
}) | ||
Comment on lines
+41
to
+43
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think that we have multiple places like this, so it makes sense to move this to the |
||
pi := common.NewPairID("metastore_id", "name").Schema( | ||
func(m map[string]*schema.Schema) map[string]*schema.Schema { | ||
return s | ||
}) | ||
return common.Resource{ | ||
Schema: s, | ||
Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { | ||
w, err := c.WorkspaceClient() | ||
if err != nil { | ||
return err | ||
} | ||
var createConnectionRequest catalog.CreateConnection | ||
common.DataToStructPointer(d, s, &createConnectionRequest) | ||
conn, err := w.Connections.Create(ctx, createConnectionRequest) | ||
if err != nil { | ||
return err | ||
} | ||
d.Set("metastore_id", conn.MetastoreId) | ||
pi.Pack(d) | ||
return nil | ||
}, | ||
Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { | ||
w, err := c.WorkspaceClient() | ||
if err != nil { | ||
return err | ||
} | ||
_, connName, err := pi.Unpack(d) | ||
if err != nil { | ||
return err | ||
} | ||
conn, err := w.Connections.GetByNameArg(ctx, connName) | ||
if err != nil { | ||
return err | ||
} | ||
// We need to preserve original sensitive options as API doesn't return them | ||
var cOrig catalog.CreateConnection | ||
common.DataToStructPointer(d, s, &cOrig) | ||
for key, element := range cOrig.Options { | ||
if slices.Contains(sensitiveOptions, key) { | ||
conn.Options[key] = element | ||
} | ||
} | ||
return common.StructToData(conn, s, d) | ||
}, | ||
Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { | ||
w, err := c.WorkspaceClient() | ||
if err != nil { | ||
return err | ||
} | ||
var updateConnectionRequest catalog.UpdateConnection | ||
common.DataToStructPointer(d, s, &updateConnectionRequest) | ||
_, connName, err := pi.Unpack(d) | ||
updateConnectionRequest.NameArg = connName | ||
if err != nil { | ||
return err | ||
} | ||
conn, err := w.Connections.Update(ctx, updateConnectionRequest) | ||
if err != nil { | ||
return err | ||
} | ||
// We need to repack the Id as the name may have changed | ||
d.Set("name", conn.Name) | ||
pi.Pack(d) | ||
return nil | ||
}, | ||
Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { | ||
w, err := c.WorkspaceClient() | ||
if err != nil { | ||
return err | ||
} | ||
_, connName, err := pi.Unpack(d) | ||
if err != nil { | ||
return err | ||
} | ||
return w.Connections.DeleteByNameArg(ctx, connName) | ||
}, | ||
}.ToResource() | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
CatalogInfo also has options - our open api spec needs updating.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
cc @Doug-Luce
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@nkvuong Can you address this one as well? Thanks!
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@andrewli81 how would the options be split between connections & catalogs? and would there be sensitive options to be handled here as well?
once this is fixed in OpenAPI, we just need to regenerate the SDK, so should not take long
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@nkvuong
The options of catalogs are mainly used to identify databases. This is because database systems like postgresql uses 3 layer namespaces (same as UC), so customer will need to specify which database to import into a UC catalog.
https://docs.databricks.com/en/query-federation/postgresql.html#language-SQL:~:text=Run%20the%20following%20SQL%20command%20in%20a%20notebook%20or%20Databricks%20SQL%20editor.%20Items%20in%20brackets%20are%20optional.%20Replace%20the%20placeholder%20values%3A
The open API spec has been fixed: https://docs.databricks.com/api/workspace/connections/create
Can we also fix it here?