From 4457d1112c3ac52f086d337c285d6d52d42546f6 Mon Sep 17 00:00:00 2001 From: Karan Popat Date: Fri, 18 Aug 2023 13:54:41 +0530 Subject: [PATCH 1/8] Update table names --- databricks/plugin.go | 4 +-- .../table_databricks_catalog_catalog.go | 36 +++++++++---------- databricks/table_databricks_catalog_schema.go | 2 +- .../table_databricks_workspace_workspace.go | 12 +++---- docs/tables/databricks_catalog_catalog.md | 18 +++++----- docs/tables/databricks_catalog_schema.md | 2 +- docs/tables/databricks_catalog_table.md | 2 +- docs/tables/databricks_workspace_workspace.md | 12 +++---- 8 files changed, 44 insertions(+), 44 deletions(-) diff --git a/databricks/plugin.go b/databricks/plugin.go index 4283368..c7d76fc 100644 --- a/databricks/plugin.go +++ b/databricks/plugin.go @@ -23,7 +23,7 @@ func Plugin(ctx context.Context) *plugin.Plugin { Schema: ConfigSchema, }, TableMap: map[string]*plugin.Table{ - "databricks_catalog_catalog": tableDatabricksCatalogCatalog(ctx), + "databricks_catalog": tableDatabricksCatalog(ctx), "databricks_catalog_connection": tableDatabricksCatalogConnection(ctx), "databricks_catalog_external_location": tableDatabricksCatalogExternalLocation(ctx), "databricks_catalog_function": tableDatabricksCatalogFunction(ctx), @@ -73,7 +73,7 @@ func Plugin(ctx context.Context) *plugin.Plugin { "databricks_workspace_repo": tableDatabricksWorkspaceRepo(ctx), "databricks_workspace_scope": tableDatabricksWorkspaceScope(ctx), "databricks_workspace_secret": tableDatabricksWorkspaceSecret(ctx), - "databricks_workspace_workspace": tableDatabricksWorkspaceWorkspace(ctx), + "databricks_workspace": tableDatabricksWorkspace(ctx), }, } diff --git a/databricks/table_databricks_catalog_catalog.go b/databricks/table_databricks_catalog_catalog.go index e2f68aa..9f71eb3 100644 --- a/databricks/table_databricks_catalog_catalog.go +++ b/databricks/table_databricks_catalog_catalog.go @@ -11,17 +11,17 @@ import ( //// TABLE DEFINITION -func tableDatabricksCatalogCatalog(_ context.Context) *plugin.Table { +func tableDatabricksCatalog(_ context.Context) *plugin.Table { return &plugin.Table{ - Name: "databricks_catalog_catalog", + Name: "databricks_catalog", Description: "Gets an array of catalogs in the metastore.", List: &plugin.ListConfig{ - Hydrate: listCatalogCatalogs, + Hydrate: listCatalogs, }, Get: &plugin.GetConfig{ KeyColumns: plugin.SingleColumn("name"), ShouldIgnoreError: isNotFoundError([]string{"CATALOG_DOES_NOT_EXIST"}), - Hydrate: getCatalogCatalog, + Hydrate: getCatalog, }, Columns: databricksAccountColumns([]*plugin.Column{ { @@ -141,7 +141,7 @@ func tableDatabricksCatalogCatalog(_ context.Context) *plugin.Table { Name: "workspace_bindings", Description: "Array of workspace bindings.", Type: proto.ColumnType_JSON, - Hydrate: getCatalogCatalogWorkspaceBindings, + Hydrate: getCatalogWorkspaceBindings, Transform: transform.FromValue(), }, @@ -158,19 +158,19 @@ func tableDatabricksCatalogCatalog(_ context.Context) *plugin.Table { //// LIST FUNCTION -func listCatalogCatalogs(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { +func listCatalogs(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_catalog_catalog.listCatalogCatalogs", "connection_error", err) + logger.Error("databricks_catalog.listCatalogs", "connection_error", err) return nil, err } catalogs, err := client.Catalogs.ListAll(ctx) if err != nil { - logger.Error("databricks_catalog_catalog.listCatalogCatalogs", "api_error", err) + logger.Error("databricks_catalog.listCatalogs", "api_error", err) return nil, err } @@ -188,7 +188,7 @@ func listCatalogCatalogs(ctx context.Context, d *plugin.QueryData, h *plugin.Hyd //// HYDRATE FUNCTIONS -func getCatalogCatalog(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error) { +func getCatalog(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) name := d.EqualsQualString("name") @@ -200,33 +200,33 @@ func getCatalogCatalog(ctx context.Context, d *plugin.QueryData, _ *plugin.Hydra // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_catalog_catalog.getCatalogCatalog", "connection_error", err) + logger.Error("databricks_catalog.getCatalog", "connection_error", err) return nil, err } catalog, err := client.Catalogs.GetByName(ctx, name) if err != nil { - logger.Error("databricks_catalog_catalog.getCatalogCatalog", "api_error", err) + logger.Error("databricks_catalog.getCatalog", "api_error", err) return nil, err } return *catalog, nil } -func getCatalogCatalogWorkspaceBindings(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { +func getCatalogWorkspaceBindings(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) name := h.Item.(catalog.CatalogInfo).Name // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_catalog_catalog.getCatalogCatalogWorkspaceBindings", "connection_error", err) + logger.Error("databricks_catalog.getCatalogWorkspaceBindings", "connection_error", err) return nil, err } bindings, err := client.WorkspaceBindings.GetByName(ctx, name) if err != nil { - logger.Error("databricks_catalog_catalog.getCatalogCatalogWorkspaceBindings", "api_error", err) + logger.Error("databricks_catalog.getCatalogWorkspaceBindings", "api_error", err) return nil, err } @@ -240,13 +240,13 @@ func getCatalogPermissions(ctx context.Context, d *plugin.QueryData, h *plugin.H // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_catalog_catalog.getCatalogPermissions", "connection_error", err) + logger.Error("databricks_catalog.getCatalogPermissions", "connection_error", err) return nil, err } permission, err := client.Grants.GetBySecurableTypeAndFullName(ctx, catalog.SecurableTypeCatalog, name) if err != nil { - logger.Error("databricks_catalog_catalog.getCatalogPermissions", "api_error", err) + logger.Error("databricks_catalog.getCatalogPermissions", "api_error", err) return nil, err } return permission.PrivilegeAssignments, nil @@ -259,13 +259,13 @@ func getCatalogEffectivePermissions(ctx context.Context, d *plugin.QueryData, h // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_catalog_catalog.getCatalogEffectivePermissions", "connection_error", err) + logger.Error("databricks_catalog.getCatalogEffectivePermissions", "connection_error", err) return nil, err } permission, err := client.Grants.GetEffectiveBySecurableTypeAndFullName(ctx, catalog.SecurableTypeCatalog, name) if err != nil { - logger.Error("databricks_catalog_catalog.getCatalogEffectivePermissions", "api_error", err) + logger.Error("databricks_catalog.getCatalogEffectivePermissions", "api_error", err) return nil, err } return permission.PrivilegeAssignments, nil diff --git a/databricks/table_databricks_catalog_schema.go b/databricks/table_databricks_catalog_schema.go index 1dd5e09..c312bc1 100644 --- a/databricks/table_databricks_catalog_schema.go +++ b/databricks/table_databricks_catalog_schema.go @@ -16,7 +16,7 @@ func tableDatabricksCatalogSchema(_ context.Context) *plugin.Table { Name: "databricks_catalog_schema", Description: "List schemas for a catalog in the metastore.", List: &plugin.ListConfig{ - ParentHydrate: listCatalogCatalogs, + ParentHydrate: listCatalogs, Hydrate: listCatalogSchemas, KeyColumns: plugin.OptionalColumns([]string{"catalog_name"}), }, diff --git a/databricks/table_databricks_workspace_workspace.go b/databricks/table_databricks_workspace_workspace.go index 4ac737e..12f7fe2 100644 --- a/databricks/table_databricks_workspace_workspace.go +++ b/databricks/table_databricks_workspace_workspace.go @@ -11,12 +11,12 @@ import ( //// TABLE DEFINITION -func tableDatabricksWorkspaceWorkspace(_ context.Context) *plugin.Table { +func tableDatabricksWorkspace(_ context.Context) *plugin.Table { return &plugin.Table{ - Name: "databricks_workspace_workspace", + Name: "databricks_workspace", Description: "List all secret workspaces available in the workspace.", List: &plugin.ListConfig{ - Hydrate: listWorkspaceWorkspaces, + Hydrate: listWorkspaces, ShouldIgnoreError: isNotFoundError([]string{"RESOURCE_DOES_NOT_EXIST"}), KeyColumns: plugin.OptionalColumns([]string{"path"}), }, @@ -72,7 +72,7 @@ func tableDatabricksWorkspaceWorkspace(_ context.Context) *plugin.Table { //// LIST FUNCTION -func listWorkspaceWorkspaces(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { +func listWorkspaces(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) path := "/" @@ -87,13 +87,13 @@ func listWorkspaceWorkspaces(ctx context.Context, d *plugin.QueryData, h *plugin // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_workspace_workspace.listWorkspaceWorkspaces", "connection_error", err) + logger.Error("databricks_workspace.listWorkspaces", "connection_error", err) return nil, err } workspaces, err := client.Workspace.ListAll(ctx, request) if err != nil { - logger.Error("databricks_workspace_workspace.listWorkspaceWorkspaces", "api_error", err) + logger.Error("databricks_workspace.listWorkspaces", "api_error", err) return nil, err } diff --git a/docs/tables/databricks_catalog_catalog.md b/docs/tables/databricks_catalog_catalog.md index db77837..bbda0f2 100644 --- a/docs/tables/databricks_catalog_catalog.md +++ b/docs/tables/databricks_catalog_catalog.md @@ -1,4 +1,4 @@ -# Table: databricks_catalog_catalog +# Table: databricks_catalog A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize your data assets. @@ -18,7 +18,7 @@ select metastore_id, account_id from - databricks_catalog_catalog; + databricks_catalog; ``` ### List catalogs modified in the last 7 days @@ -33,7 +33,7 @@ select metastore_id, account_id from - databricks_catalog_catalog + databricks_catalog where updated_at >= now() - interval '7 days'; ``` @@ -50,7 +50,7 @@ select metastore_id, account_id from - databricks_catalog_catalog + databricks_catalog where enable_auto_maintenance = 'ENABLE'; ``` @@ -67,7 +67,7 @@ select metastore_id, account_id from - databricks_catalog_catalog + databricks_catalog where isolation_mode = 'OPEN'; ``` @@ -80,7 +80,7 @@ select p ->> 'principal' as principal_name, p ->> 'privileges' as permissions from - databricks_catalog_catalog, + databricks_catalog, jsonb_array_elements(catalog_effective_permissions) p; ``` @@ -91,7 +91,7 @@ select catalog_type, count(*) as total_catalogs from - databricks_catalog_catalog + databricks_catalog group by catalog_type; ``` @@ -104,7 +104,7 @@ select catalog_type, updated_at from - databricks_catalog_catalog + databricks_catalog order by updated_at desc limit 1; @@ -118,7 +118,7 @@ select count(*) as total_catalogs, (count(*) * 100.0 / sum(count(*)) over ()) as ownership_percentage from - databricks_catalog_catalog + databricks_catalog group by owner; ``` \ No newline at end of file diff --git a/docs/tables/databricks_catalog_schema.md b/docs/tables/databricks_catalog_schema.md index c14ec2d..a6a28fa 100644 --- a/docs/tables/databricks_catalog_schema.md +++ b/docs/tables/databricks_catalog_schema.md @@ -97,7 +97,7 @@ from ( c.catalog_type, count(s.full_name) as schema_count from - databricks_catalog_catalog as c + databricks_catalog as c left join databricks_catalog_schema as s on c.name = s.catalog_name group by c.catalog_type diff --git a/docs/tables/databricks_catalog_table.md b/docs/tables/databricks_catalog_table.md index 2cd8880..3bd288d 100644 --- a/docs/tables/databricks_catalog_table.md +++ b/docs/tables/databricks_catalog_table.md @@ -162,7 +162,7 @@ select c.account_id from databricks_catalog_table as t - left join databricks_catalog_catalog as c on t.catalog_name = c.name + left join databricks_catalog as c on t.catalog_name = c.name where full_name = '__catalog_name__.__schema_name__.__table_name__'; ``` \ No newline at end of file diff --git a/docs/tables/databricks_workspace_workspace.md b/docs/tables/databricks_workspace_workspace.md index 4359ada..ca9443a 100644 --- a/docs/tables/databricks_workspace_workspace.md +++ b/docs/tables/databricks_workspace_workspace.md @@ -1,4 +1,4 @@ -# Table: databricks_workspace_workspace +# Table: databricks_workspace Workspace manages the notebooks and folders in databricks. A notebook is a web-based interface to a document that contains runnable code, visualizations, and explanatory text. @@ -16,7 +16,7 @@ select size, account_id from - databricks_workspace_workspace + databricks_workspace where path = '/Users/user@turbot.com/NotebookDev'; ``` @@ -33,7 +33,7 @@ select size, account_id from - databricks_workspace_workspace + databricks_workspace where created_at >= now() - interval '7' day; ``` @@ -50,7 +50,7 @@ select size, account_id from - databricks_workspace_workspace + databricks_workspace where modified_at >= now() - interval '30' day; ``` @@ -62,7 +62,7 @@ select object_type, count(*) as total_objects from - databricks_workspace_workspace + databricks_workspace group by object_type; ``` @@ -74,7 +74,7 @@ select language, count(*) as total_notebooks from - databricks_workspace_workspace + databricks_workspace where object_type = 'NOTEBOOK' group by From 67f8251613f08cd06abead76da9eba522d9706ff Mon Sep 17 00:00:00 2001 From: Karan Popat Date: Fri, 18 Aug 2023 14:03:16 +0530 Subject: [PATCH 2/8] Update table names --- ..._databricks_catalog_catalog.go => table_databricks_catalog.go} | 0 ...ricks_workspace_workspace.go => table_databricks_workspace.go} | 0 .../{databricks_catalog_catalog.md => databricks_catalog.md} | 0 ...{databricks_workspace_workspace.md => databricks_workspace.md} | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename databricks/{table_databricks_catalog_catalog.go => table_databricks_catalog.go} (100%) rename databricks/{table_databricks_workspace_workspace.go => table_databricks_workspace.go} (100%) rename docs/tables/{databricks_catalog_catalog.md => databricks_catalog.md} (100%) rename docs/tables/{databricks_workspace_workspace.md => databricks_workspace.md} (100%) diff --git a/databricks/table_databricks_catalog_catalog.go b/databricks/table_databricks_catalog.go similarity index 100% rename from databricks/table_databricks_catalog_catalog.go rename to databricks/table_databricks_catalog.go diff --git a/databricks/table_databricks_workspace_workspace.go b/databricks/table_databricks_workspace.go similarity index 100% rename from databricks/table_databricks_workspace_workspace.go rename to databricks/table_databricks_workspace.go diff --git a/docs/tables/databricks_catalog_catalog.md b/docs/tables/databricks_catalog.md similarity index 100% rename from docs/tables/databricks_catalog_catalog.md rename to docs/tables/databricks_catalog.md diff --git a/docs/tables/databricks_workspace_workspace.md b/docs/tables/databricks_workspace.md similarity index 100% rename from docs/tables/databricks_workspace_workspace.md rename to docs/tables/databricks_workspace.md From d9ee44ca2f344a560fc4c81a4f692c8c13e83551 Mon Sep 17 00:00:00 2001 From: Karan Popat Date: Tue, 22 Aug 2023 12:02:47 +0530 Subject: [PATCH 3/8] Update docs --- databricks/utils.go | 32 ++++++++++++++++++-------------- docs/index.md | 20 ++++++++++---------- 2 files changed, 28 insertions(+), 24 deletions(-) diff --git a/databricks/utils.go b/databricks/utils.go index 5b92b9f..ce94019 100644 --- a/databricks/utils.go +++ b/databricks/utils.go @@ -11,12 +11,14 @@ import ( func isNotFoundError(notFoundErrors []string) plugin.ErrorPredicate { return func(err error) bool { - errMsg := err.(*apierr.APIError) - for _, msg := range notFoundErrors { - if strings.Contains(errMsg.ErrorCode, msg) { - return true - } else if strings.Contains(strconv.Itoa(errMsg.StatusCode), msg) { - return true + switch err := err.(type) { + case *apierr.APIError: + for _, msg := range notFoundErrors { + if strings.Contains(err.ErrorCode, msg) { + return true + } else if strings.Contains(strconv.Itoa(err.StatusCode), msg) { + return true + } } } return false @@ -25,14 +27,16 @@ func isNotFoundError(notFoundErrors []string) plugin.ErrorPredicate { func shouldRetryError(retryErrors []string) plugin.ErrorPredicateWithContext { return func(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData, err error) bool { - errMsg := err.(*apierr.APIError) - for _, msg := range retryErrors { - if strings.Contains(errMsg.ErrorCode, msg) { - plugin.Logger(ctx).Error("databricks_errors.shouldRetryError", "rate_limit_error", err) - return true - } else if strings.Contains(strconv.Itoa(errMsg.StatusCode), msg) { - plugin.Logger(ctx).Error("databricks_errors.shouldRetryError", "rate_limit_error", err) - return true + switch err := err.(type) { + case *apierr.APIError: + for _, msg := range retryErrors { + if strings.Contains(err.ErrorCode, msg) { + plugin.Logger(ctx).Error("databricks_errors.shouldRetryError", "rate_limit_error", err) + return true + } else if strings.Contains(strconv.Itoa(err.StatusCode), msg) { + plugin.Logger(ctx).Error("databricks_errors.shouldRetryError", "rate_limit_error", err) + return true + } } } return false diff --git a/docs/index.md b/docs/index.md index 4dca939..798aa21 100644 --- a/docs/index.md +++ b/docs/index.md @@ -114,7 +114,7 @@ connection "databricks" { } ``` -By default, all options are commented out in the default connection, thus Steampipe will resolve your credentials using the same mechanism as the Databricks CLI (Databricks environment variables, default profile, etc). This provides a quick way to get started with Steampipe, but you will probably want to customize your experience using configuration options for [querying multiple regions](#multi-account-connections), [configuring credentials](#configuring-databricks-credentials) from your [Databricks Profiles](#databricks-profile-credentials). +You can customize your experience using configuration options for [querying multiple accounts](#multi-account-connections), [configuring credentials](#configuring-databricks-credentials) from your [Databricks Profiles](#databricks-profile-credentials). ## Multi-Account Connections @@ -122,19 +122,19 @@ You may create multiple databricks connections: ```hcl connection "databricks_dev" { plugin = "databricks" - profile = "databricks_dev" + config_profile = "databricks_dev" account_id = abcdd0f81-9be0-4425-9e29-3a7d96782373 } connection "databricks_qa" { plugin = "databricks" - profile = "databricks_qa" + config_profile = "databricks_qa" account_id = wxyzd0f81-9be0-4425-9e29-3a7d96782373 } connection "databricks_prod" { plugin = "databricks" - profile = "databricks_prod" + config_profile = "databricks_prod" account_id = pqrsd0f81-9be0-4425-9e29-3a7d96782373 } ``` @@ -206,19 +206,19 @@ account_id = abcdd0f81-9be0-4425-9e29-3a7d96782373 ```hcl connection "databricks_user1-account" { plugin = "databricks" - profile = "user1-account" + config_profile = "user1-account" account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" } connection "databricks_user1-workspace" { plugin = "databricks" - profile = "user1-workspace" + config_profile = "user1-workspace" account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" } connection "databricks_user1-basic" { plugin = "databricks" - profile = "user1-basic" + config_profile = "user1-basic" account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" } ``` @@ -241,7 +241,7 @@ account_id = abcdd0f81-9be0-4425-9e29-3a7d96782373 ```hcl connection "databricks_user1-account" { plugin = "databricks" - profile = "user1-account" + config_profile = "user1-account" account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" } ``` @@ -264,7 +264,7 @@ account_id = abcdd0f81-9be0-4425-9e29-3a7d96782373 ```hcl connection "databricks_user1-workspace" { plugin = "databricks" - profile = "user1-workspace" + config_profile = "user1-workspace" account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" } ``` @@ -291,7 +291,7 @@ connection "databricks_user1-workspace" { ### Credentials from Environment Variables -Alternatively, you can also use the standard Databricks environment variables to obtain credentials **only if other argument (`profile`, `account_id`, `account_token`/`account_host`/`workspace_token`/`workspace_host`) is not specified** in the connection: +Alternatively, you can also use the standard Databricks environment variables to obtain credentials **only if other argument (`config_profile`, `account_id`, `account_token`/`account_host`/`workspace_token`/`workspace_host`) is not specified** in the connection: ```sh export DATABRICKS_CONFIG_PROFILE=user1-test From 7b950c3505e65dab03f0217da75476c4ee31f28e Mon Sep 17 00:00:00 2001 From: Karan Popat Date: Fri, 25 Aug 2023 14:12:32 +0530 Subject: [PATCH 4/8] Update table names --- README.md | 4 +- config/databricks.spc | 4 +- databricks/connection_config.go | 6 +- databricks/plugin.go | 10 +-- databricks/service.go | 13 +--- ...ks_jobs_job.go => table_databricks_job.go} | 30 ++++----- ...job_run.go => table_databricks_job_run.go} | 20 +++--- ...peline.go => table_databricks_pipeline.go} | 62 +++++++++---------- ....go => table_databricks_pipeline_event.go} | 14 ++--- ...go => table_databricks_pipeline_update.go} | 22 +++---- docs/index.md | 24 +++---- ...tabricks_jobs_job.md => databricks_job.md} | 28 ++++----- ..._jobs_job_run.md => databricks_job_run.md} | 16 ++--- ...nes_pipeline.md => databricks_pipeline.md} | 28 ++++----- ..._event.md => databricks_pipeline_event.md} | 16 ++--- ...pdate.md => databricks_pipeline_update.md} | 14 ++--- 16 files changed, 152 insertions(+), 159 deletions(-) rename databricks/{table_databricks_jobs_job.go => table_databricks_job.go} (88%) rename databricks/{table_databricks_jobs_job_run.go => table_databricks_job_run.go} (90%) rename databricks/{table_databricks_pipelines_pipeline.go => table_databricks_pipeline.go} (80%) rename databricks/{table_databricks_pipelines_pipeline_event.go => table_databricks_pipeline_event.go} (85%) rename databricks/{table_databricks_pipelines_pipeline_update.go => table_databricks_pipeline_update.go} (81%) rename docs/tables/{databricks_jobs_job.md => databricks_job.md} (92%) rename docs/tables/{databricks_jobs_job_run.md => databricks_job_run.md} (90%) rename docs/tables/{databricks_pipelines_pipeline.md => databricks_pipeline.md} (88%) rename docs/tables/{databricks_pipelines_pipeline_event.md => databricks_pipeline_event.md} (85%) rename docs/tables/{databricks_pipelines_pipeline_update.md => databricks_pipeline_update.md} (83%) diff --git a/README.md b/README.md index 7151a25..5197baa 100644 --- a/README.md +++ b/README.md @@ -29,9 +29,9 @@ connection "databricks" { # A connection profile specified within .databrickscfg to use instead of DEFAULT. # This can also be set via the `DATABRICKS_CONFIG_PROFILE` environment variable. - # config_profile = "databricks-dev" + # profile = "databricks-dev" - # The target Databricks account ID. Required. + # The target Databricks account ID. # This can also be set via the `DATABRICKS_ACCOUNT_ID` environment variable. # See Locate your account ID: https://docs.databricks.com/administration-guide/account-settings/index.html#account-id. # account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" diff --git a/config/databricks.spc b/config/databricks.spc index 4efda52..b1e0ead 100644 --- a/config/databricks.spc +++ b/config/databricks.spc @@ -3,9 +3,9 @@ connection "databricks" { # A connection profile specified within .databrickscfg to use instead of DEFAULT. # This can also be set via the `DATABRICKS_CONFIG_PROFILE` environment variable. - # config_profile = "databricks-dev" + # profile = "databricks-dev" - # The target Databricks account ID. Required. + # The target Databricks account ID. # This can also be set via the `DATABRICKS_ACCOUNT_ID` environment variable. # See Locate your account ID: https://docs.databricks.com/administration-guide/account-settings/index.html#account-id. # account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" diff --git a/databricks/connection_config.go b/databricks/connection_config.go index 2670faf..50c7414 100644 --- a/databricks/connection_config.go +++ b/databricks/connection_config.go @@ -11,7 +11,7 @@ type databricksConfig struct { WorkspaceToken *string `cty:"workspace_token"` WorkspaceHost *string `cty:"workspace_host"` AccountId *string `cty:"account_id"` - ConfigProfile *string `cty:"config_profile"` + ConfigProfile *string `cty:"profile"` ConfigFile *string `cty:"config_file"` DataUsername *string `cty:"username"` DataPassword *string `cty:"password"` @@ -35,10 +35,10 @@ var ConfigSchema = map[string]*schema.Attribute{ Type: schema.TypeString, }, "account_id": { - Required: true, + Required: false, Type: schema.TypeString, }, - "config_profile": { + "profile": { Required: false, Type: schema.TypeString, }, diff --git a/databricks/plugin.go b/databricks/plugin.go index c7d76fc..38e0af2 100644 --- a/databricks/plugin.go +++ b/databricks/plugin.go @@ -47,14 +47,14 @@ func Plugin(ctx context.Context) *plugin.Plugin { "databricks_iam_group": tableDatabricksIAMGroup(ctx), "databricks_iam_service_principal": tableDatabricksIAMServicePrincipal(ctx), "databricks_iam_user": tableDatabricksIAMUser(ctx), - "databricks_jobs_job": tableDatabricksJobsJob(ctx), - "databricks_jobs_job_run": tableDatabricksJobsJobRun(ctx), + "databricks_job": tableDatabricksJob(ctx), + "databricks_job_run": tableDatabricksJobRun(ctx), "databricks_ml_experiment": tableDatabricksMLExperiment(ctx), "databricks_ml_model": tableDatabricksMLModel(ctx), "databricks_ml_webhook": tableDatabricksMLWebhook(ctx), - "databricks_pipelines_pipeline": tableDatabricksPipelinesPipeline(ctx), - "databricks_pipelines_pipeline_event": tableDatabricksPipelinesPipelineEvent(ctx), - "databricks_pipelines_pipeline_update": tableDatabricksPipelinesPipelineUpdate(ctx), + "databricks_pipeline": tableDatabricksPipeline(ctx), + "databricks_pipeline_event": tableDatabricksPipelineEvent(ctx), + "databricks_pipeline_update": tableDatabricksPipelineUpdate(ctx), "databricks_serving_serving_endpoint": tableDatabricksServingServingEndpoint(ctx), "databricks_settings_ip_access_list": tableDatabricksSettingsIpAccessList(ctx), "databricks_settings_token": tableDatabricksSettingsToken(ctx), diff --git a/databricks/service.go b/databricks/service.go index fa05975..15a819b 100644 --- a/databricks/service.go +++ b/databricks/service.go @@ -2,7 +2,6 @@ package databricks import ( "context" - "errors" "fmt" "os" @@ -38,21 +37,15 @@ func connectDatabricksAccount(ctx context.Context, d *plugin.QueryData) (*databr } if databricksConfig.DataPassword != nil { os.Setenv("DATABRICKS_PASSWORD", *databricksConfig.DataPassword) - } else if os.Getenv("DATABRICKS_PASSWORD") == "" || os.Getenv("DATABRICKS_USERNAME") == "" { - return nil, errors.New("account_token or username and password must be configured") } } if databricksConfig.AccountHost != nil { os.Setenv("DATABRICKS_HOST", *databricksConfig.AccountHost) - } else if os.Getenv("DATABRICKS_HOST") == "" { - return nil, errors.New("account_host must be configured") } if databricksConfig.AccountId != nil { os.Setenv("DATABRICKS_ACCOUNT_ID", *databricksConfig.AccountId) - } else if os.Getenv("DATABRICKS_ACCOUNT_ID") == "" { - return nil, errors.New("account_id must be configured") } } @@ -96,20 +89,20 @@ func connectDatabricksWorkspace(ctx context.Context, d *plugin.QueryData) (*data if databricksConfig.DataPassword != nil { os.Setenv("DATABRICKS_PASSWORD", *databricksConfig.DataPassword) } else if os.Getenv("DATABRICKS_PASSWORD") == "" || os.Getenv("DATABRICKS_USERNAME") == "" { - return nil, errors.New("workspace_token or username and password must be configured") + // return nil, errors.New("workspace_token or username and password must be configured") } } if databricksConfig.WorkspaceHost != nil { os.Setenv("DATABRICKS_HOST", *databricksConfig.WorkspaceHost) } else if os.Getenv("DATABRICKS_HOST") == "" { - return nil, errors.New("workspace_host must be configured") + // return nil, errors.New("workspace_host must be configured") } if databricksConfig.AccountId != nil { os.Setenv("DATABRICKS_ACCOUNT_ID", *databricksConfig.AccountId) } else if os.Getenv("DATABRICKS_ACCOUNT_ID") == "" { - return nil, errors.New("account_id must be configured") + // return nil, errors.New("account_id must be configured") } } diff --git a/databricks/table_databricks_jobs_job.go b/databricks/table_databricks_job.go similarity index 88% rename from databricks/table_databricks_jobs_job.go rename to databricks/table_databricks_job.go index 980df19..3c5dcc6 100644 --- a/databricks/table_databricks_jobs_job.go +++ b/databricks/table_databricks_job.go @@ -13,17 +13,17 @@ import ( //// TABLE DEFINITION -func tableDatabricksJobsJob(_ context.Context) *plugin.Table { +func tableDatabricksJob(_ context.Context) *plugin.Table { return &plugin.Table{ - Name: "databricks_jobs_job", + Name: "databricks_job", Description: "Get details for all the jobs associated with a Databricks workspace.", List: &plugin.ListConfig{ - Hydrate: listJobsJobs, + Hydrate: listJobs, KeyColumns: plugin.OptionalColumns([]string{"name"}), }, Get: &plugin.GetConfig{ KeyColumns: plugin.SingleColumn("job_id"), - Hydrate: getJobsJob, + Hydrate: getJob, }, Columns: databricksAccountColumns([]*plugin.Column{ { @@ -107,7 +107,7 @@ func tableDatabricksJobsJob(_ context.Context) *plugin.Table { Name: "job_permissions", Description: "A list of job-level permissions.", Type: proto.ColumnType_JSON, - Hydrate: getJobsJobPermissions, + Hydrate: getJobPermissions, Transform: transform.FromValue(), }, { @@ -156,7 +156,7 @@ func tableDatabricksJobsJob(_ context.Context) *plugin.Table { Name: "trigger_history", Description: "History of the file arrival trigger associated with the job.", Type: proto.ColumnType_JSON, - Hydrate: getJobsJob, + Hydrate: getJob, }, { Name: "webhook_notifications", @@ -178,7 +178,7 @@ func tableDatabricksJobsJob(_ context.Context) *plugin.Table { //// LIST FUNCTION -func listJobsJobs(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { +func listJobs(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) // Limiting the results @@ -201,14 +201,14 @@ func listJobsJobs(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateDat // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_jobs_job.listJobsJobs", "connection_error", err) + logger.Error("databricks_job.listJobs", "connection_error", err) return nil, err } for { response, err := client.Jobs.Impl().List(ctx, request) if err != nil { - logger.Error("databricks_jobs_job.listJobsJobs", "api_error", err) + logger.Error("databricks_job.listJobs", "api_error", err) return nil, err } @@ -231,7 +231,7 @@ func listJobsJobs(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateDat //// HYDRATE FUNCTIONS -func getJobsJob(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { +func getJob(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) var id int64 if h.Item != nil { @@ -248,27 +248,27 @@ func getJobsJob(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_jobs_job.getJobsJob", "connection_error", err) + logger.Error("databricks_job.getJob", "connection_error", err) return nil, err } job, err := client.Jobs.GetByJobId(ctx, id) if err != nil { - logger.Error("databricks_jobs_job.getJobsJob", "api_error", err) + logger.Error("databricks_job.getJob", "api_error", err) return nil, err } return *job, nil } -func getJobsJobPermissions(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { +func getJobPermissions(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) id := getJobId(h.Item) // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_jobs_job.getJobsJobPermissions", "connection_error", err) + logger.Error("databricks_job.getJobPermissions", "connection_error", err) return nil, err } @@ -279,7 +279,7 @@ func getJobsJobPermissions(ctx context.Context, d *plugin.QueryData, h *plugin.H permission, err := client.Permissions.Get(ctx, request) if err != nil { - logger.Error("databricks_jobs_job.getJobsJobPermissions", "api_error", err) + logger.Error("databricks_job.getJobPermissions", "api_error", err) return nil, err } return permission, nil diff --git a/databricks/table_databricks_jobs_job_run.go b/databricks/table_databricks_job_run.go similarity index 90% rename from databricks/table_databricks_jobs_job_run.go rename to databricks/table_databricks_job_run.go index cb525e5..02f71e9 100644 --- a/databricks/table_databricks_jobs_job_run.go +++ b/databricks/table_databricks_job_run.go @@ -11,17 +11,17 @@ import ( //// TABLE DEFINITION -func tableDatabricksJobsJobRun(_ context.Context) *plugin.Table { +func tableDatabricksJobRun(_ context.Context) *plugin.Table { return &plugin.Table{ - Name: "databricks_jobs_job_run", + Name: "databricks_job_run", Description: "List details for all the job runs.", List: &plugin.ListConfig{ KeyColumns: plugin.OptionalColumns([]string{"job_id", "run_type"}), - Hydrate: listJobsJobRuns, + Hydrate: listJobRuns, }, Get: &plugin.GetConfig{ KeyColumns: plugin.SingleColumn("run_id"), - Hydrate: getJobsJobRun, + Hydrate: getJobRun, }, Columns: databricksAccountColumns([]*plugin.Column{ { @@ -177,7 +177,7 @@ func tableDatabricksJobsJobRun(_ context.Context) *plugin.Table { //// LIST FUNCTION -func listJobsJobRuns(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { +func listJobRuns(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) // Limiting the results @@ -203,14 +203,14 @@ func listJobsJobRuns(ctx context.Context, d *plugin.QueryData, h *plugin.Hydrate // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_jobs_job_run.listJobsJobRuns", "connection_error", err) + logger.Error("databricks_job_run.listJobRuns", "connection_error", err) return nil, err } for { response, err := client.Jobs.Impl().ListRuns(ctx, request) if err != nil { - logger.Error("databricks_jobs_job_run.listJobsJobRuns", "api_error", err) + logger.Error("databricks_job_run.listJobRuns", "api_error", err) return nil, err } @@ -233,7 +233,7 @@ func listJobsJobRuns(ctx context.Context, d *plugin.QueryData, h *plugin.Hydrate //// HYDRATE FUNCTIONS -func getJobsJobRun(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error) { +func getJobRun(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) id := d.EqualsQuals["run_id"].GetInt64Value() @@ -245,7 +245,7 @@ func getJobsJobRun(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateDa // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_jobs_job_run.getJobsJobRun", "connection_error", err) + logger.Error("databricks_job_run.getJobRun", "connection_error", err) return nil, err } @@ -255,7 +255,7 @@ func getJobsJobRun(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateDa run, err := client.Jobs.GetRun(ctx, request) if err != nil { - logger.Error("databricks_jobs_job_run.getJobsJobRun", "api_error", err) + logger.Error("databricks_job_run.getJobRun", "api_error", err) return nil, err } diff --git a/databricks/table_databricks_pipelines_pipeline.go b/databricks/table_databricks_pipeline.go similarity index 80% rename from databricks/table_databricks_pipelines_pipeline.go rename to databricks/table_databricks_pipeline.go index baa3b4e..4ff9d76 100644 --- a/databricks/table_databricks_pipelines_pipeline.go +++ b/databricks/table_databricks_pipeline.go @@ -12,16 +12,16 @@ import ( //// TABLE DEFINITION -func tableDatabricksPipelinesPipeline(_ context.Context) *plugin.Table { +func tableDatabricksPipeline(_ context.Context) *plugin.Table { return &plugin.Table{ - Name: "databricks_pipelines_pipeline", + Name: "databricks_pipeline", Description: "List pipelines defined in the Delta Live Tables system.", List: &plugin.ListConfig{ - Hydrate: listPipelinesPipelines, + Hydrate: listPipelines, }, Get: &plugin.GetConfig{ KeyColumns: plugin.SingleColumn("pipeline_id"), - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, }, Columns: databricksAccountColumns([]*plugin.Column{ { @@ -38,20 +38,20 @@ func tableDatabricksPipelinesPipeline(_ context.Context) *plugin.Table { Name: "catalog", Description: "A catalog in Unity Catalog to publish data from this pipeline to.", Type: proto.ColumnType_STRING, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Catalog"), }, { Name: "cause", Description: "An optional message detailing the cause of the pipeline state.", Type: proto.ColumnType_STRING, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, }, { Name: "channel", Description: "DLT Release Channel that specifies which version to use.", Type: proto.ColumnType_STRING, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Channel"), }, { @@ -63,7 +63,7 @@ func tableDatabricksPipelinesPipeline(_ context.Context) *plugin.Table { Name: "continuous", Description: "Whether the pipeline is continuous or triggered.", Type: proto.ColumnType_BOOL, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Continuous"), }, { @@ -75,34 +75,34 @@ func tableDatabricksPipelinesPipeline(_ context.Context) *plugin.Table { Name: "development", Description: "Whether the pipeline is in Development mode.", Type: proto.ColumnType_BOOL, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Development"), }, { Name: "edition", Description: "Pipeline product edition.", Type: proto.ColumnType_STRING, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Edition"), }, { Name: "health", Description: "The health of the pipeline.", Type: proto.ColumnType_STRING, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, }, { Name: "last_modified", Description: "The last time the pipeline settings were modified or created.", Type: proto.ColumnType_STRING, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromGo().Transform(transform.UnixMsToTimestamp), }, { Name: "photon", Description: "Whether photon is enabled for this pipeline.", Type: proto.ColumnType_BOOL, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Photon"), }, { @@ -114,7 +114,7 @@ func tableDatabricksPipelinesPipeline(_ context.Context) *plugin.Table { Name: "serverless", Description: "Whether serverless compute is enabled for this pipeline.", Type: proto.ColumnType_BOOL, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Serverless"), }, { @@ -126,14 +126,14 @@ func tableDatabricksPipelinesPipeline(_ context.Context) *plugin.Table { Name: "storage", Description: "DBFS root directory for storing checkpoints and tables.", Type: proto.ColumnType_STRING, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Storage"), }, { Name: "target", Description: "Target schema (database) to add tables in this pipeline to.", Type: proto.ColumnType_STRING, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Target"), }, @@ -142,21 +142,21 @@ func tableDatabricksPipelinesPipeline(_ context.Context) *plugin.Table { Name: "clusters", Description: "Cluster settings for this pipeline deployment.", Type: proto.ColumnType_JSON, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Clusters"), }, { Name: "configuration", Description: "String-String configuration for this pipeline execution.", Type: proto.ColumnType_JSON, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Configuration"), }, { Name: "filters", Description: "Filters on which Pipeline packages to include in the deployed graph.", Type: proto.ColumnType_JSON, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Filters"), }, { @@ -168,21 +168,21 @@ func tableDatabricksPipelinesPipeline(_ context.Context) *plugin.Table { Name: "libraries", Description: "Libraries or code needed by this deployment.", Type: proto.ColumnType_JSON, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Libraries"), }, { Name: "pipeline_permissions", Description: "Permissions for this pipeline.", Type: proto.ColumnType_JSON, - Hydrate: getPipelinesPipelinePermissions, + Hydrate: getPipelinePermissions, Transform: transform.FromValue(), }, { Name: "trigger", Description: "Which pipeline trigger to use.", Type: proto.ColumnType_JSON, - Hydrate: getPipelinesPipeline, + Hydrate: getPipeline, Transform: transform.FromField("Spec.Trigger"), }, @@ -199,7 +199,7 @@ func tableDatabricksPipelinesPipeline(_ context.Context) *plugin.Table { //// LIST FUNCTION -func listPipelinesPipelines(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { +func listPipelines(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) // Limiting the results @@ -218,14 +218,14 @@ func listPipelinesPipelines(ctx context.Context, d *plugin.QueryData, h *plugin. // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_pipelines_pipeline.listPipelinesPipelines", "connection_error", err) + logger.Error("databricks_pipeline.listPipelines", "connection_error", err) return nil, err } for { response, err := client.Pipelines.Impl().ListPipelines(ctx, request) if err != nil { - logger.Error("databricks_pipelines_pipeline.listPipelinesPipelines", "api_error", err) + logger.Error("databricks_pipeline.listPipelines", "api_error", err) return nil, err } @@ -247,7 +247,7 @@ func listPipelinesPipelines(ctx context.Context, d *plugin.QueryData, h *plugin. //// HYDRATE FUNCTIONS -func getPipelinesPipeline(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { +func getPipeline(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) var id string if h.Item != nil { @@ -264,26 +264,26 @@ func getPipelinesPipeline(ctx context.Context, d *plugin.QueryData, h *plugin.Hy // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_pipelines_pipeline.getPipelinesPipeline", "connection_error", err) + logger.Error("databricks_pipeline.getPipeline", "connection_error", err) return nil, err } pipeline, err := client.Pipelines.GetByPipelineId(ctx, id) if err != nil { - logger.Error("databricks_pipelines_pipeline.getPipelinesPipeline", "api_error", err) + logger.Error("databricks_pipeline.getPipeline", "api_error", err) return nil, err } return *pipeline, nil } -func getPipelinesPipelinePermissions(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { +func getPipelinePermissions(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) id := getPipelineId(h.Item) // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_pipelines_pipeline.getPipelinesPipelinePermissions", "connection_error", err) + logger.Error("databricks_pipeline.getPipelinePermissions", "connection_error", err) return nil, err } @@ -294,7 +294,7 @@ func getPipelinesPipelinePermissions(ctx context.Context, d *plugin.QueryData, h permission, err := client.Permissions.Get(ctx, request) if err != nil { - logger.Error("databricks_pipelines_pipeline.getPipelinesPipelinePermissions", "api_error", err) + logger.Error("databricks_pipeline.getPipelinePermissions", "api_error", err) return nil, err } return permission, nil diff --git a/databricks/table_databricks_pipelines_pipeline_event.go b/databricks/table_databricks_pipeline_event.go similarity index 85% rename from databricks/table_databricks_pipelines_pipeline_event.go rename to databricks/table_databricks_pipeline_event.go index bc8e1b1..939ada8 100644 --- a/databricks/table_databricks_pipelines_pipeline_event.go +++ b/databricks/table_databricks_pipeline_event.go @@ -11,13 +11,13 @@ import ( //// TABLE DEFINITION -func tableDatabricksPipelinesPipelineEvent(_ context.Context) *plugin.Table { +func tableDatabricksPipelineEvent(_ context.Context) *plugin.Table { return &plugin.Table{ - Name: "databricks_pipelines_pipeline_event", + Name: "databricks_pipeline_event", Description: "Retrieves events for a pipeline.", List: &plugin.ListConfig{ - ParentHydrate: listPipelinesPipelines, - Hydrate: listPipelinesPipelineEvents, + ParentHydrate: listPipelines, + Hydrate: listPipelineEvents, KeyColumns: plugin.OptionalColumns([]string{"pipeline_id"}), }, Columns: databricksAccountColumns([]*plugin.Column{ @@ -92,7 +92,7 @@ type pipelineEventInfo struct { //// LIST FUNCTION -func listPipelinesPipelineEvents(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { +func listPipelineEvents(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) pipelineId := h.Item.(pipelines.PipelineStateInfo).PipelineId @@ -117,14 +117,14 @@ func listPipelinesPipelineEvents(ctx context.Context, d *plugin.QueryData, h *pl // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_pipelines_pipeline_event.listPipelinesPipelineEvents", "connection_error", err) + logger.Error("databricks_pipeline_event.listPipelineEvents", "connection_error", err) return nil, err } for { response, err := client.Pipelines.Impl().ListPipelineEvents(ctx, request) if err != nil { - logger.Error("databricks_pipelines_pipeline_event.listPipelinesPipelineEvents", "api_error", err) + logger.Error("databricks_pipeline_event.listPipelineEvents", "api_error", err) return nil, err } diff --git a/databricks/table_databricks_pipelines_pipeline_update.go b/databricks/table_databricks_pipeline_update.go similarity index 81% rename from databricks/table_databricks_pipelines_pipeline_update.go rename to databricks/table_databricks_pipeline_update.go index 32219ec..bc914d3 100644 --- a/databricks/table_databricks_pipelines_pipeline_update.go +++ b/databricks/table_databricks_pipeline_update.go @@ -11,18 +11,18 @@ import ( //// TABLE DEFINITION -func tableDatabricksPipelinesPipelineUpdate(_ context.Context) *plugin.Table { +func tableDatabricksPipelineUpdate(_ context.Context) *plugin.Table { return &plugin.Table{ - Name: "databricks_pipelines_pipeline_update", + Name: "databricks_pipeline_update", Description: "List updates for an active pipeline.", List: &plugin.ListConfig{ - ParentHydrate: listPipelinesPipelines, - Hydrate: listPipelinesPipelineUpdates, + ParentHydrate: listPipelines, + Hydrate: listPipelineUpdates, KeyColumns: plugin.OptionalColumns([]string{"pipeline_id"}), }, Get: &plugin.GetConfig{ KeyColumns: plugin.AllColumns([]string{"pipeline_id", "update_id"}), - Hydrate: getPipelinesPipelineUpdate, + Hydrate: getPipelineUpdate, }, Columns: databricksAccountColumns([]*plugin.Column{ { @@ -92,7 +92,7 @@ func tableDatabricksPipelinesPipelineUpdate(_ context.Context) *plugin.Table { //// LIST FUNCTION -func listPipelinesPipelineUpdates(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { +func listPipelineUpdates(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) pipelineId := h.Item.(pipelines.PipelineStateInfo).PipelineId @@ -117,14 +117,14 @@ func listPipelinesPipelineUpdates(ctx context.Context, d *plugin.QueryData, h *p // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_pipelines_pipeline_update.listPipelinesPipelineUpdates", "connection_error", err) + logger.Error("databricks_pipeline_update.listPipelineUpdates", "connection_error", err) return nil, err } for { response, err := client.Pipelines.Impl().ListUpdates(ctx, request) if err != nil { - logger.Error("databricks_pipelines_pipeline_update.listPipelinesPipelineUpdates", "api_error", err) + logger.Error("databricks_pipeline_update.listPipelineUpdates", "api_error", err) return nil, err } @@ -146,7 +146,7 @@ func listPipelinesPipelineUpdates(ctx context.Context, d *plugin.QueryData, h *p //// HYDRATE FUNCTIONS -func getPipelinesPipelineUpdate(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error) { +func getPipelineUpdate(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error) { logger := plugin.Logger(ctx) pipelineId := d.EqualsQualString("pipeline_id") updateId := d.EqualsQualString("update_id") @@ -159,13 +159,13 @@ func getPipelinesPipelineUpdate(ctx context.Context, d *plugin.QueryData, _ *plu // Create client client, err := connectDatabricksWorkspace(ctx, d) if err != nil { - logger.Error("databricks_pipelines_pipeline_update.getPipelinesPipelineUpdate", "connection_error", err) + logger.Error("databricks_pipeline_update.getPipelineUpdate", "connection_error", err) return nil, err } update, err := client.Pipelines.GetUpdateByPipelineIdAndUpdateId(ctx, pipelineId, updateId) if err != nil { - logger.Error("databricks_pipelines_pipeline_update.getPipelinesPipelineUpdate", "api_error", err) + logger.Error("databricks_pipeline_update.getPipelineUpdate", "api_error", err) return nil, err } return *update.Update, nil diff --git a/docs/index.md b/docs/index.md index 798aa21..fc49982 100644 --- a/docs/index.md +++ b/docs/index.md @@ -74,9 +74,9 @@ connection "databricks" { # A connection profile specified within .databrickscfg to use instead of DEFAULT. # This can also be set via the `DATABRICKS_CONFIG_PROFILE` environment variable. - # config_profile = "databricks-dev" + # profile = "databricks-dev" - # The target Databricks account ID. Required. + # The target Databricks account ID. # This can also be set via the `DATABRICKS_ACCOUNT_ID` environment variable. # See Locate your account ID: https://docs.databricks.com/administration-guide/account-settings/index.html#account-id. # account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" @@ -114,7 +114,7 @@ connection "databricks" { } ``` -You can customize your experience using configuration options for [querying multiple accounts](#multi-account-connections), [configuring credentials](#configuring-databricks-credentials) from your [Databricks Profiles](#databricks-profile-credentials). +By default, all options are commented out in the default connection, thus Steampipe will resolve your credentials using the same mechanism as the Databricks CLI (Databricks environment variables, default profile, etc). This provides a quick way to get started with Steampipe, but you will probably want to customize your experience using configuration options for [querying multiple accounts](#multi-account-connections), [configuring credentials](#configuring-databricks-credentials) from your [Databricks Profiles](#databricks-profile-credentials). ## Multi-Account Connections @@ -122,19 +122,19 @@ You may create multiple databricks connections: ```hcl connection "databricks_dev" { plugin = "databricks" - config_profile = "databricks_dev" + profile = "databricks_dev" account_id = abcdd0f81-9be0-4425-9e29-3a7d96782373 } connection "databricks_qa" { plugin = "databricks" - config_profile = "databricks_qa" + profile = "databricks_qa" account_id = wxyzd0f81-9be0-4425-9e29-3a7d96782373 } connection "databricks_prod" { plugin = "databricks" - config_profile = "databricks_prod" + profile = "databricks_prod" account_id = pqrsd0f81-9be0-4425-9e29-3a7d96782373 } ``` @@ -206,19 +206,19 @@ account_id = abcdd0f81-9be0-4425-9e29-3a7d96782373 ```hcl connection "databricks_user1-account" { plugin = "databricks" - config_profile = "user1-account" + profile = "user1-account" account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" } connection "databricks_user1-workspace" { plugin = "databricks" - config_profile = "user1-workspace" + profile = "user1-workspace" account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" } connection "databricks_user1-basic" { plugin = "databricks" - config_profile = "user1-basic" + profile = "user1-basic" account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" } ``` @@ -241,7 +241,7 @@ account_id = abcdd0f81-9be0-4425-9e29-3a7d96782373 ```hcl connection "databricks_user1-account" { plugin = "databricks" - config_profile = "user1-account" + profile = "user1-account" account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" } ``` @@ -264,7 +264,7 @@ account_id = abcdd0f81-9be0-4425-9e29-3a7d96782373 ```hcl connection "databricks_user1-workspace" { plugin = "databricks" - config_profile = "user1-workspace" + profile = "user1-workspace" account_id = "abcdd0f81-9be0-4425-9e29-3a7d96782373" } ``` @@ -291,7 +291,7 @@ connection "databricks_user1-workspace" { ### Credentials from Environment Variables -Alternatively, you can also use the standard Databricks environment variables to obtain credentials **only if other argument (`config_profile`, `account_id`, `account_token`/`account_host`/`workspace_token`/`workspace_host`) is not specified** in the connection: +Alternatively, you can also use the standard Databricks environment variables to obtain credentials **only if other argument (`profile`, `account_id`, `account_token`/`account_host`/`workspace_token`/`workspace_host`) is not specified** in the connection: ```sh export DATABRICKS_CONFIG_PROFILE=user1-test diff --git a/docs/tables/databricks_jobs_job.md b/docs/tables/databricks_job.md similarity index 92% rename from docs/tables/databricks_jobs_job.md rename to docs/tables/databricks_job.md index 533ae9e..83e3352 100644 --- a/docs/tables/databricks_jobs_job.md +++ b/docs/tables/databricks_job.md @@ -1,4 +1,4 @@ -# Table: databricks_jobs_job +# Table: databricks_job You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with complex dependencies. @@ -16,7 +16,7 @@ select format, account_id from - databricks_jobs_job; + databricks_job; ``` ### Get compute requirements for each job @@ -29,7 +29,7 @@ select compute ->> 'spec' as compute_spec, account_id from - databricks_jobs_job; + databricks_job; ``` ### List all continuous jobs @@ -42,7 +42,7 @@ select continuous ->> 'pause_status' as pause_status, account_id from - databricks_jobs_job + databricks_job where continuous is not null; ``` @@ -59,7 +59,7 @@ select email_notifications ->> 'no_alert_for_skipped_runs' as no_alert_for_skipped_runs, account_id from - databricks_jobs_job; + databricks_job; ``` ### Get git settings for each job @@ -76,7 +76,7 @@ select git_source ->> 'git_url' as git_url, account_id from - databricks_jobs_job + databricks_job where git_source is not null; ``` @@ -92,7 +92,7 @@ select jc -> 'new_cluster' ->> 'cluster_source' as new_cluster_source, account_id from - databricks_jobs_job, + databricks_job, jsonb_array_elements(job_clusters) as jc where job_clusters is not null; @@ -109,7 +109,7 @@ select schedule ->> 'timezone_id' as timezone_id, account_id from - databricks_jobs_job + databricks_job where schedule is not null; ``` @@ -131,7 +131,7 @@ select t ->> 'depends_on' as depends_on, account_id from - databricks_jobs_job, + databricks_job, jsonb_array_elements(tasks) as t where tasks is not null; @@ -147,7 +147,7 @@ select trigger ->> 'pause_status' as pause_status, account_id from - databricks_jobs_job + databricks_job where trigger is not null; ``` @@ -163,7 +163,7 @@ select trigger_history ->> 'last_triggered' as last_triggered, account_id from - databricks_jobs_job + databricks_job where trigger_history is not null; ``` @@ -178,7 +178,7 @@ select acl ->> 'group_name' as principal_group_name, acl ->> 'all_permissions' as permission_level from - databricks_jobs_job, + databricks_job, jsonb_array_elements(job_permissions -> 'access_control_list') as acl; ``` @@ -189,7 +189,7 @@ select account_id, count(*) as job_count from - databricks_jobs_job + databricks_job group by account_id order by @@ -205,5 +205,5 @@ select name, jsonb_pretty(webhook_notifications) as notification_ids from - databricks_jobs_job; + databricks_job; ``` \ No newline at end of file diff --git a/docs/tables/databricks_jobs_job_run.md b/docs/tables/databricks_job_run.md similarity index 90% rename from docs/tables/databricks_jobs_job_run.md rename to docs/tables/databricks_job_run.md index 86afd8d..67fb9dd 100644 --- a/docs/tables/databricks_jobs_job_run.md +++ b/docs/tables/databricks_job_run.md @@ -1,4 +1,4 @@ -# Table: databricks_jobs_job_run +# Table: databricks_job_run You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with complex dependencies. Job run is an instance of a job that is triggered by a scheduler or manually. @@ -17,7 +17,7 @@ select run_duration as run_duration_ms, account_id from - databricks_jobs_job_run; + databricks_job_run; ``` ### Get total runs per job @@ -27,7 +27,7 @@ select job_id, count(*) as total_runs from - databricks_jobs_job_run + databricks_job_run group by job_id; ``` @@ -40,7 +40,7 @@ select date_trunc('day', start_time) as day, count(*) as total_runs from - databricks_jobs_job_run + databricks_job_run group by job_id, day @@ -61,7 +61,7 @@ select state ->> 'result_state', account_id from - databricks_jobs_job_run + databricks_job_run order by attempt_number desc limit 1; @@ -85,7 +85,7 @@ select t ->> 'state' as state, account_id from - databricks_jobs_job_run, + databricks_job_run, jsonb_array_elements(tasks) as t where tasks is not null; @@ -104,7 +104,7 @@ select run_duration as run_duration_ms, account_id from - databricks_jobs_job_run + databricks_job_run where state ->> 'life_cycle_state' = 'WAITING_FOR_RETRY'; ``` @@ -122,7 +122,7 @@ select run_duration as run_duration_ms, account_id from - databricks_jobs_job_run + databricks_job_run where job_id = '572473586420586' and original_attempt_run_id <> run_id; diff --git a/docs/tables/databricks_pipelines_pipeline.md b/docs/tables/databricks_pipeline.md similarity index 88% rename from docs/tables/databricks_pipelines_pipeline.md rename to docs/tables/databricks_pipeline.md index 6b6cdb1..a02ad95 100644 --- a/docs/tables/databricks_pipelines_pipeline.md +++ b/docs/tables/databricks_pipeline.md @@ -1,4 +1,4 @@ -# Table: databricks_pipelines_pipeline +# Table: databricks_pipeline Delta Live Tables is a framework for building reliable, maintainable, and testable data processing pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task orchestration, cluster management, monitoring, data quality, and error handling. @@ -16,7 +16,7 @@ select edition, account_id from - databricks_pipelines_pipeline; + databricks_pipeline; ``` ### List pipelines that failed to start @@ -30,7 +30,7 @@ select state, account_id from - databricks_pipelines_pipeline + databricks_pipeline where state = 'FAILED'; ``` @@ -49,7 +49,7 @@ select c.runtime_engine, c.account_id from - databricks_pipelines_pipeline p, + databricks_pipeline p, databricks_compute_cluster c where p.cluster_id = c.cluster_id @@ -68,7 +68,7 @@ select u ->> 'update_id' as update_id, account_id from - databricks_pipelines_pipeline p, + databricks_pipeline p, jsonb_array_elements(p.latest_updates) as u where u ->> 'state' = 'COMPLETED' @@ -88,7 +88,7 @@ select u ->> 'update_id' as update_id, account_id from - databricks_pipelines_pipeline p, + databricks_pipeline p, jsonb_array_elements(p.latest_updates) as u where u ->> 'state' = 'FAILED' @@ -108,7 +108,7 @@ select state, account_id from - databricks_pipelines_pipeline + databricks_pipeline where catalog is not null; ``` @@ -124,7 +124,7 @@ select state, account_id from - databricks_pipelines_pipeline + databricks_pipeline where not continuous; ``` @@ -140,7 +140,7 @@ select state, account_id from - databricks_pipelines_pipeline + databricks_pipeline where health = 'UNHEALTHY'; ``` @@ -155,7 +155,7 @@ select state, account_id from - databricks_pipelines_pipeline + databricks_pipeline where development; ``` @@ -170,7 +170,7 @@ select acl ->> 'group_name' as principal_group_name, acl ->> 'all_permissions' as permission_level from - databricks_pipelines_pipeline, + databricks_pipeline, jsonb_array_elements(pipeline_permissions -> 'access_control_list') as acl; ``` @@ -187,7 +187,7 @@ select l -> 'file' ->> 'path' as file_path, account_id from - databricks_pipelines_pipeline, + databricks_pipeline, jsonb_array_elements(libraries) as l; ``` @@ -201,7 +201,7 @@ select trigger ->> 'manual' as is_manual, account_id from - databricks_pipelines_pipeline + databricks_pipeline where trigger is not null; ``` @@ -219,6 +219,6 @@ select c ->> 'policy_id' as policy_id, account_id from - databricks_pipelines_pipeline, + databricks_pipeline, jsonb_array_elements(clusters) as c; ``` \ No newline at end of file diff --git a/docs/tables/databricks_pipelines_pipeline_event.md b/docs/tables/databricks_pipeline_event.md similarity index 85% rename from docs/tables/databricks_pipelines_pipeline_event.md rename to docs/tables/databricks_pipeline_event.md index d2ffcec..29cb01c 100644 --- a/docs/tables/databricks_pipelines_pipeline_event.md +++ b/docs/tables/databricks_pipeline_event.md @@ -1,4 +1,4 @@ -# Table: databricks_pipelines_pipeline_event +# Table: databricks_pipeline_event Delta Live Tables is a framework for building reliable, maintainable, and testable data processing pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task orchestration, cluster management, monitoring, data quality, and error handling. Events are the pipeline event logs. @@ -16,7 +16,7 @@ select message, account_id from - databricks_pipelines_pipeline_event; + databricks_pipeline_event; ``` ### List events between a specific time range @@ -31,7 +31,7 @@ select message, account_id from - databricks_pipelines_pipeline_event + databricks_pipeline_event where timestamp between '2023-07-27T02:00:00' and '2023-07-27T22:00:00'; ``` @@ -49,7 +49,7 @@ select error ->> 'fatal' as fatal, account_id from - databricks_pipelines_pipeline_event + databricks_pipeline_event where level = 'ERROR'; ``` @@ -70,7 +70,7 @@ select origin ->> 'update_id' as origin_update_id, account_id from - databricks_pipelines_pipeline_event; + databricks_pipeline_event; ``` ### List all events caused due to user actions @@ -85,7 +85,7 @@ select message, account_id from - databricks_pipelines_pipeline_event + databricks_pipeline_event where event_type = 'user_action'; ``` @@ -102,7 +102,7 @@ select message, account_id from - databricks_pipelines_pipeline_event + databricks_pipeline_event where maturity_level = 'STABLE'; ``` @@ -114,7 +114,7 @@ select account_id, count(*) as event_count from - databricks_pipelines_pipeline_event + databricks_pipeline_event group by account_id order by diff --git a/docs/tables/databricks_pipelines_pipeline_update.md b/docs/tables/databricks_pipeline_update.md similarity index 83% rename from docs/tables/databricks_pipelines_pipeline_update.md rename to docs/tables/databricks_pipeline_update.md index 381a5b4..08ec358 100644 --- a/docs/tables/databricks_pipelines_pipeline_update.md +++ b/docs/tables/databricks_pipeline_update.md @@ -1,4 +1,4 @@ -# Table: databricks_pipelines_pipeline_update +# Table: databricks_pipeline_update Delta Live Tables is a framework for building reliable, maintainable, and testable data processing pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task orchestration, cluster management, monitoring, data quality, and error handling. Pipeline updates are the pipeline update logs. @@ -15,7 +15,7 @@ select creation_time, account_id from - databricks_pipelines_pipeline_update; + databricks_pipeline_update; ``` ### List updates created in the last 7 days @@ -29,7 +29,7 @@ select creation_time, account_id from - databricks_pipelines_pipeline_update + databricks_pipeline_update where creation_time >= now() - interval '7' day; ``` @@ -45,7 +45,7 @@ select creation_time, account_id from - databricks_pipelines_pipeline_update + databricks_pipeline_update where cause = 'API_CALL'; ``` @@ -61,7 +61,7 @@ select creation_time, account_id from - databricks_pipelines_pipeline_update + databricks_pipeline_update where state = 'FAILED'; ``` @@ -78,7 +78,7 @@ select full_refresh_selection, account_id from - databricks_pipelines_pipeline_update + databricks_pipeline_update where full_refresh; ``` @@ -90,7 +90,7 @@ select account_id, count(*) as update_count from - databricks_pipelines_pipeline_update + databricks_pipeline_update group by account_id order by From 9dc4e2054b05e360a6817700a3477561247aa2fa Mon Sep 17 00:00:00 2001 From: Karan Popat Date: Fri, 25 Aug 2023 14:16:10 +0530 Subject: [PATCH 5/8] Update table names --- databricks/service.go | 6 ------ 1 file changed, 6 deletions(-) diff --git a/databricks/service.go b/databricks/service.go index 15a819b..e3f536a 100644 --- a/databricks/service.go +++ b/databricks/service.go @@ -88,21 +88,15 @@ func connectDatabricksWorkspace(ctx context.Context, d *plugin.QueryData) (*data } if databricksConfig.DataPassword != nil { os.Setenv("DATABRICKS_PASSWORD", *databricksConfig.DataPassword) - } else if os.Getenv("DATABRICKS_PASSWORD") == "" || os.Getenv("DATABRICKS_USERNAME") == "" { - // return nil, errors.New("workspace_token or username and password must be configured") } } if databricksConfig.WorkspaceHost != nil { os.Setenv("DATABRICKS_HOST", *databricksConfig.WorkspaceHost) - } else if os.Getenv("DATABRICKS_HOST") == "" { - // return nil, errors.New("workspace_host must be configured") } if databricksConfig.AccountId != nil { os.Setenv("DATABRICKS_ACCOUNT_ID", *databricksConfig.AccountId) - } else if os.Getenv("DATABRICKS_ACCOUNT_ID") == "" { - // return nil, errors.New("account_id must be configured") } } From 0d1ad27d033496dde8c9d39fde453344781ae725 Mon Sep 17 00:00:00 2001 From: Karan Popat Date: Fri, 25 Aug 2023 16:31:16 +0530 Subject: [PATCH 6/8] Update table names --- databricks/connection_config.go | 2 +- databricks/service.go | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/databricks/connection_config.go b/databricks/connection_config.go index 50c7414..31b3b79 100644 --- a/databricks/connection_config.go +++ b/databricks/connection_config.go @@ -35,7 +35,7 @@ var ConfigSchema = map[string]*schema.Attribute{ Type: schema.TypeString, }, "account_id": { - Required: false, + Required: true, Type: schema.TypeString, }, "profile": { diff --git a/databricks/service.go b/databricks/service.go index e3f536a..06154e9 100644 --- a/databricks/service.go +++ b/databricks/service.go @@ -2,6 +2,7 @@ package databricks import ( "context" + "errors" "fmt" "os" @@ -46,6 +47,8 @@ func connectDatabricksAccount(ctx context.Context, d *plugin.QueryData) (*databr if databricksConfig.AccountId != nil { os.Setenv("DATABRICKS_ACCOUNT_ID", *databricksConfig.AccountId) + } else if os.Getenv("DATABRICKS_ACCOUNT_ID") == "" { + return nil, errors.New("account_id must be configured") } } @@ -97,6 +100,8 @@ func connectDatabricksWorkspace(ctx context.Context, d *plugin.QueryData) (*data if databricksConfig.AccountId != nil { os.Setenv("DATABRICKS_ACCOUNT_ID", *databricksConfig.AccountId) + } else if os.Getenv("DATABRICKS_ACCOUNT_ID") == "" { + return nil, errors.New("account_id must be configured") } } From 15bb748a515640176ac26f788ced55e0c2164565 Mon Sep 17 00:00:00 2001 From: Karan Popat Date: Fri, 25 Aug 2023 22:29:05 +0530 Subject: [PATCH 7/8] rename config_file to config_file_path --- README.md | 2 +- config/databricks.spc | 2 +- databricks/connection_config.go | 6 +++--- databricks/service.go | 16 ++++++++-------- docs/index.md | 2 +- 5 files changed, 14 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 5197baa..489a150 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,7 @@ connection "databricks" { # A non-default location of the Databricks CLI credentials file. # This can also be set via the `DATABRICKS_CONFIG_FILE` environment variable. - # config_file = "/Users/username/.databrickscfg" + # config_file_path = "/Users/username/.databrickscfg" } ``` diff --git a/config/databricks.spc b/config/databricks.spc index b1e0ead..5ac24d0 100644 --- a/config/databricks.spc +++ b/config/databricks.spc @@ -39,5 +39,5 @@ connection "databricks" { # A non-default location of the Databricks CLI credentials file. # This can also be set via the `DATABRICKS_CONFIG_FILE` environment variable. - # config_file = "/Users/username/.databrickscfg" + # config_file_path = "/Users/username/.databrickscfg" } diff --git a/databricks/connection_config.go b/databricks/connection_config.go index 31b3b79..9c683b8 100644 --- a/databricks/connection_config.go +++ b/databricks/connection_config.go @@ -11,8 +11,8 @@ type databricksConfig struct { WorkspaceToken *string `cty:"workspace_token"` WorkspaceHost *string `cty:"workspace_host"` AccountId *string `cty:"account_id"` - ConfigProfile *string `cty:"profile"` - ConfigFile *string `cty:"config_file"` + Profile *string `cty:"profile"` + ConfigFilePath *string `cty:"config_file_path"` DataUsername *string `cty:"username"` DataPassword *string `cty:"password"` } @@ -42,7 +42,7 @@ var ConfigSchema = map[string]*schema.Attribute{ Required: false, Type: schema.TypeString, }, - "config_file": { + "config_file_path": { Required: false, Type: schema.TypeString, }, diff --git a/databricks/service.go b/databricks/service.go index 06154e9..5477b2e 100644 --- a/databricks/service.go +++ b/databricks/service.go @@ -23,10 +23,10 @@ func connectDatabricksAccount(ctx context.Context, d *plugin.QueryData) (*databr // Default to using env vars (#2) // But prefer the config (#1) - if databricksConfig.ConfigProfile != nil { - os.Setenv("DATABRICKS_CONFIG_PROFILE", *databricksConfig.ConfigProfile) - if databricksConfig.ConfigFile != nil { - os.Setenv("DATABRICKS_CONFIG_FILE", *databricksConfig.ConfigFile) + if databricksConfig.Profile != nil { + os.Setenv("DATABRICKS_CONFIG_PROFILE", *databricksConfig.Profile) + if databricksConfig.ConfigFilePath != nil { + os.Setenv("DATABRICKS_CONFIG_FILE", *databricksConfig.ConfigFilePath) } } else if os.Getenv("DATABRICKS_CONFIG_PROFILE") == "" { if databricksConfig.AccountToken != nil { @@ -76,10 +76,10 @@ func connectDatabricksWorkspace(ctx context.Context, d *plugin.QueryData) (*data // Default to using env vars (#2) // But prefer the config (#1) - if databricksConfig.ConfigProfile != nil { - os.Setenv("DATABRICKS_CONFIG_PROFILE", *databricksConfig.ConfigProfile) - if databricksConfig.ConfigFile != nil { - os.Setenv("DATABRICKS_CONFIG_FILE", *databricksConfig.ConfigFile) + if databricksConfig.Profile != nil { + os.Setenv("DATABRICKS_CONFIG_PROFILE", *databricksConfig.Profile) + if databricksConfig.ConfigFilePath != nil { + os.Setenv("DATABRICKS_CONFIG_FILE", *databricksConfig.ConfigFilePath) } } else if os.Getenv("DATABRICKS_CONFIG_PROFILE") == "" { if databricksConfig.WorkspaceToken != nil { diff --git a/docs/index.md b/docs/index.md index fc49982..20402ac 100644 --- a/docs/index.md +++ b/docs/index.md @@ -110,7 +110,7 @@ connection "databricks" { # A non-default location of the Databricks CLI credentials file. # This can also be set via the `DATABRICKS_CONFIG_FILE` environment variable. - # config_file = "/Users/username/.databrickscfg" + # config_file_path = "/Users/username/.databrickscfg" } ``` From 03b2e4183de529dbc005dee15ff8713cd14b503c Mon Sep 17 00:00:00 2001 From: cbruno10 Date: Mon, 28 Aug 2023 12:51:21 -0400 Subject: [PATCH 8/8] Update DEFAULT profile name in index doc --- docs/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.md b/docs/index.md index 20402ac..4c880fa 100644 --- a/docs/index.md +++ b/docs/index.md @@ -114,7 +114,7 @@ connection "databricks" { } ``` -By default, all options are commented out in the default connection, thus Steampipe will resolve your credentials using the same mechanism as the Databricks CLI (Databricks environment variables, default profile, etc). This provides a quick way to get started with Steampipe, but you will probably want to customize your experience using configuration options for [querying multiple accounts](#multi-account-connections), [configuring credentials](#configuring-databricks-credentials) from your [Databricks Profiles](#databricks-profile-credentials). +By default, all options are commented out in the default connection, thus Steampipe will resolve your credentials using the same mechanism as the Databricks CLI (Databricks environment variables, DEFAULT profile, etc). This provides a quick way to get started with Steampipe, but you will probably want to customize your experience using configuration options for [querying multiple accounts](#multi-account-connections), [configuring credentials](#configuring-databricks-credentials) from your [Databricks Profiles](#databricks-profile-credentials). ## Multi-Account Connections