Skip to content

Commit

Permalink
Prepare version 0.2.5
Browse files Browse the repository at this point in the history
Co-authored-by: Serge Smertin <serge.smertin@databricks.com>
  • Loading branch information
nfx and nfx committed Sep 11, 2020
1 parent f2929df commit e50f0c6
Show file tree
Hide file tree
Showing 11 changed files with 45 additions and 91 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ terraform {
required_providers {
databricks = {
source = "databrickslabs/databricks"
version = ... # replace dots with latest version
version = "0.2.5"
}
}
}
Expand Down
8 changes: 2 additions & 6 deletions access/acceptance/secret_acl_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (

"github.com/databrickslabs/databricks-terraform/common"
"github.com/databrickslabs/databricks-terraform/internal/acceptance"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
"github.com/stretchr/testify/assert"
Expand All @@ -22,12 +23,7 @@ func TestAccSecretAclResource(t *testing.T) {
}
//var secretScope Secre
var secretACL ACLItem
// generate a random name for each tokenInfo test run, to avoid
// collisions from multiple concurrent tests.
// the acctest package includes many helpers such as RandStringFromCharSet
// See https://godoc.org/github.com/hashicorp/terraform-plugin-sdk/helper/acctest
//scope := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum)
scope := "terraform_acc_test_acl"
scope := fmt.Sprintf("tf-scope-%s", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum))
principal := "users"
permission := "READ"

Expand Down
3 changes: 2 additions & 1 deletion access/acceptance/secret_scope_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (

"github.com/databrickslabs/databricks-terraform/common"
"github.com/databrickslabs/databricks-terraform/internal/acceptance"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
"github.com/stretchr/testify/assert"
Expand All @@ -22,7 +23,7 @@ func TestAccSecretScopeResource(t *testing.T) {
}
var secretScope SecretScope

scope := "terraform_acc_test_scope"
scope := fmt.Sprintf("tf-%s", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum))

acceptance.AccTest(t, resource.TestCase{
CheckDestroy: testSecretScopeResourceDestroy,
Expand Down
24 changes: 0 additions & 24 deletions compute/acceptance/cluster_policy_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,30 +40,6 @@ func TestAccClusterPolicyResourceFullLifecycle(t *testing.T) {
Check: resource.TestCheckResourceAttr("databricks_cluster_policy.external_metastore",
"name", fmt.Sprintf("Terraform policy %s", randomName+": UPDATED")),
},
{
Config: testExternalMetastore(randomName + ": UPDATED"),
Destroy: true,
Check: acceptance.ResourceCheck("databricks_cluster_policy.external_metastore",
func(client *common.DatabricksClient, id string) error {
resp, err := NewClusterPoliciesAPI(client).Get(id)
if err == nil {
return fmt.Errorf("Resource must have been deleted but: %v", resp)
}
return nil
}),
},
{
// and create it again
Config: testExternalMetastore(randomName + ": UPDATED"),
Check: acceptance.ResourceCheck("databricks_cluster_policy.external_metastore",
func(client *common.DatabricksClient, id string) error {
_, err := NewClusterPoliciesAPI(client).Get(id)
if err != nil {
return err
}
return nil
}),
},
},
})
}
Expand Down
21 changes: 7 additions & 14 deletions compute/acceptance/cluster_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -299,25 +299,18 @@ func TestAwsAccClusterResource_CreateClusterViaInstancePool(t *testing.T) {
}

func TestAzureAccClusterResource_CreateClusterViaInstancePool(t *testing.T) {
randomInstancePoolName := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum)
randomInstancePoolInterpolation := fmt.Sprintf("databricks_instance_pool.%s.id", randomInstancePoolName)
randomClusterSuffix := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum)
randomClusterName := fmt.Sprintf("cluster-%s", randomClusterSuffix)
randomClusterId := fmt.Sprintf("databricks_cluster.%s", randomClusterName)
defaultAzureInstancePoolClusterTest :=
newInstancePoolHCLBuilder(randomInstancePoolName).
withCloudEnv().
build() +
newClusterHCLBuilder(randomClusterName).
withInstancePool(randomInstancePoolInterpolation).
build()

randomInstancePoolName := fmt.Sprintf("pool_%s", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum))
randomClusterName := fmt.Sprintf("cluster_%s", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum))
defaultAzureInstancePoolClusterTest := newInstancePoolHCLBuilder(randomInstancePoolName).withCloudEnv().build() +
newClusterHCLBuilder(randomClusterName).withInstancePool(
fmt.Sprintf("databricks_instance_pool.%s.id", randomInstancePoolName)).build()
acceptance.AccTest(t, resource.TestCase{
Steps: []resource.TestStep{
{
Config: defaultAzureInstancePoolClusterTest,
Check: resource.ComposeTestCheckFunc(
testClusterCheckAndTerminateForFutureTests(randomClusterId, t),
testClusterCheckAndTerminateForFutureTests(
fmt.Sprintf("databricks_cluster.%s", randomClusterName), t),
),
},
},
Expand Down
10 changes: 10 additions & 0 deletions docs/changelog.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
# Version changelog

## 0.2.5

* Added support for [local disk encryption](https://github.com/databrickslabs/terraform-provider-databricks/pull/313)
* Added more reliable [indication about Azure environment](https://github.com/databrickslabs/terraform-provider-databricks/pull/312) and fixed azure auth issue for Terraform 0.13
* Updated [databricks_aws_crossaccount_policy](https://github.com/databrickslabs/terraform-provider-databricks/pull/311) to latest rules
* Fixed missing importers for [databricks_scim_*](https://github.com/databrickslabs/terraform-provider-databricks/pull/290) resources
* Updated [Terraform Plugin SDK](https://github.com/databrickslabs/terraform-provider-databricks/pull/279) to latest version along with transitive dependencies.
* Added support disclaimers
* Increased code coverage to 65%

## 0.2.4

* Added [Azure CLI authentication](https://github.com/databrickslabs/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-azure-cli) to bridge the gap of local development workflows and let more people use the provider.
Expand Down
4 changes: 3 additions & 1 deletion docs/data-sources/aws_crossaccount_policy.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# databricks_aws_crossaccount_policy Data Source

This data source constructs necessary AWS cross-account policy for you, which is based on [official documentation](https://docs.databricks.com/administration-guide/account-settings/aws-accounts.html).
-> **Note** This resource has evolving API, which may change in future versions of provider. Please always consult [latest documentation](https://docs.databricks.com/administration-guide/account-api/iam-role.html#language-Your%C2%A0VPC,%C2%A0default) in case of any questions.

This data source constructs necessary AWS cross-account policy for you, which is based on [official documentation](https://docs.databricks.com/administration-guide/account-api/iam-role.html#language-Your%C2%A0VPC,%C2%A0default).

## Example Usage

Expand Down
6 changes: 6 additions & 0 deletions internal/sanity/utils_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,12 @@ func TestAccMissingResourcesInWorkspace(t *testing.T) {
return err
},
},
{
Name: "Cluster Policies Delete",
ReadFunc: func() error {
return compute.NewClusterPoliciesAPI(client).Delete(randStringID)
},
},
{
Name: "Jobs",
ReadFunc: func() error {
Expand Down
10 changes: 10 additions & 0 deletions scripts/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,16 @@ function go_test {
if [[ $@ == *"--tee"* ]]; then
go_test $2 2>&1 | tee out.log
echo "✓ To output of existing tests: less $PWD/out.log"

FAILURES=$(grep "\-\-\- FAIL" out.log | sed 's/--- FAIL: /\* \[ \]/g' | sort)
PASSES=$(grep PASS out.log | grep Test | sort | sed 's/PASS/ \* \[x\]/')

cat <<-EOF > test-report.log
$1
---
${FAILURES}
${PASSES}
EOF
else
go_test $2
fi
40 changes: 0 additions & 40 deletions storage/acceptance/adls_gen2_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package acceptance

import (
"os"
"regexp"
"testing"

"github.com/databrickslabs/databricks-terraform/common"
Expand Down Expand Up @@ -45,42 +44,3 @@ func TestAzureAccAdlsGen2Mount_correctly_mounts(t *testing.T) {
},
})
}

func TestAzureAccAdlsGen2Mount_capture_error(t *testing.T) {
if _, ok := os.LookupEnv("CLOUD_ENV"); !ok {
t.Skip("Acceptance tests skipped unless env 'CLOUD_ENV' is set")
}
client := common.CommonEnvironmentClient()
if !client.AzureAuth.IsClientSecretSet() {
t.Skip("Test is meant only for SP Azure")
}
acceptance.AccTest(t, resource.TestCase{
Steps: []resource.TestStep{
{
Config: qa.EnvironmentTemplate(t, `
resource "databricks_secret_scope" "terraform" {
name = "terraform-{var.RANDOM}"
initial_manage_principal = "users"
}
resource "databricks_secret" "client_secret" {
key = "datalake_sp_secret"
string_value = "{env.ARM_CLIENT_SECRET}"
scope = databricks_secret_scope.terraform.name
}
resource "databricks_azure_adls_gen2_mount" "mount" {
storage_account_name = "{env.TEST_STORAGE_V2_ACCOUNT}"
container_name = "{env.TEST_STORAGE_V2_ABFSS}"
mount_name = "localdir{var.RANDOM}"
tenant_id = "{env.ARM_TENANT_ID}"
client_id = "{env.ARM_CLIENT_ID}"
client_secret_scope = databricks_secret_scope.terraform.name
client_secret_key = "SECRET_KEY_WRONG_ON_PURPOSE"
initialize_file_system = true
}`),
ExpectNonEmptyPlan: true,
ExpectError: regexp.MustCompile("Secret does not exist with scope"),
Destroy: false,
},
},
})
}
8 changes: 4 additions & 4 deletions storage/mounts.go
Original file line number Diff line number Diff line change
Expand Up @@ -156,19 +156,19 @@ func mountCluster(tpl interface{}, d *schema.ResourceData, m interface{},

// returns resource create mount for object store on workspace
func mountCreate(tpl interface{}, r *schema.Resource) func(*schema.ResourceData, interface{}) error {
return func(d *schema.ResourceData, m interface{}) (err error) {
return func(d *schema.ResourceData, m interface{}) error {
mountConfig, mountPoint, err := mountCluster(tpl, d, m, r)
if err != nil {
return
return err
}
log.Printf("[INFO] Mounting %s at /mnt/%s", mountConfig.Source(), d.Id())
source, err := mountPoint.Mount(mountConfig)
if err != nil {
return
return err
}
err = d.Set("source", source)
if err != nil {
return
return err
}
return readMountSource(mountPoint, d)
}
Expand Down

0 comments on commit e50f0c6

Please sign in to comment.