Skip to content

Commit

Permalink
go-fmt cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
nfx committed Jun 14, 2020
1 parent 3f56ba9 commit fb4a680
Show file tree
Hide file tree
Showing 6 changed files with 61 additions and 61 deletions.
24 changes: 12 additions & 12 deletions databricks/mounts_test.go
Expand Up @@ -8,16 +8,16 @@ import (

func TestValidateMountDirectory(t *testing.T) {
testCases := []struct {
directory string
errorCount int
}{
{"", 0},
{"/directory", 0},
{"directory", 1},
}
for _, tc := range testCases {
_, errs := ValidateMountDirectory(tc.directory, "key")
assert.Lenf(t, errs, tc.errorCount, "directory '%s' does not generate the expected error count", tc.directory)
}
directory string
errorCount int
}{
{"", 0},
{"/directory", 0},
{"directory", 1},
}
for _, tc := range testCases {
_, errs := ValidateMountDirectory(tc.directory, "key")

assert.Lenf(t, errs, tc.errorCount, "directory '%s' does not generate the expected error count", tc.directory)
}
}
2 changes: 1 addition & 1 deletion databricks/resource_databricks_azure_adls_gen1_mount.go
Expand Up @@ -39,7 +39,7 @@ func resourceAzureAdlsGen1Mount() *schema.Resource {
Optional: true,
Computed: true,
//Default: "/",
ForceNew: true,
ForceNew: true,
ValidateFunc: ValidateMountDirectory,
},
"mount_name": {
Expand Down
8 changes: 4 additions & 4 deletions databricks/resource_databricks_azure_adls_gen2_mount.go
Expand Up @@ -32,10 +32,10 @@ func resourceAzureAdlsGen2Mount() *schema.Resource {
ForceNew: true,
},
"directory": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
ValidateFunc: ValidateMountDirectory,
},
"mount_name": {
Expand Down
2 changes: 1 addition & 1 deletion databricks/resource_databricks_azure_blob_mount.go
Expand Up @@ -37,7 +37,7 @@ func resourceAzureBlobMount() *schema.Resource {
Optional: true,
Computed: true,
//Default: "/",
ForceNew: true,
ForceNew: true,
ValidateFunc: ValidateMountDirectory,
},
"mount_name": {
Expand Down
52 changes: 26 additions & 26 deletions databricks/resource_databricks_job_aws_test.go
Expand Up @@ -114,30 +114,30 @@ func testAwsJobValuesNewCluster(t *testing.T, job *model.Job) resource.TestCheck
}

func testAwsJobResourceNewCluster() string {
return fmt.Sprintf(`
resource "databricks_job" "my_job" {
new_cluster {
autoscale {
min_workers = 2
max_workers = 3
}
spark_version = "6.4.x-scala2.11"
aws_attributes {
availability = "SPOT"
zone_id = "us-east-1a"
spot_bid_price_percent = "100"
first_on_demand = 1
ebs_volume_type = "GENERAL_PURPOSE_SSD"
ebs_volume_count = 1
ebs_volume_size = 32
}
node_type_id = "r3.xlarge"
}
notebook_path = "/Users/jane.doe@databricks.com/my-demo-notebook"
name = "my-demo-notebook"
timeout_seconds = 3600
max_retries = 1
max_concurrent_runs = 1
}
`)
return `
resource "databricks_job" "my_job" {
new_cluster {
autoscale {
min_workers = 2
max_workers = 3
}
spark_version = "6.4.x-scala2.11"
aws_attributes {
availability = "SPOT"
zone_id = "us-east-1a"
spot_bid_price_percent = "100"
first_on_demand = 1
ebs_volume_type = "GENERAL_PURPOSE_SSD"
ebs_volume_count = 1
ebs_volume_size = 32
}
node_type_id = "r3.xlarge"
}
notebook_path = "/Users/jane.doe@databricks.com/my-demo-notebook"
name = "my-demo-notebook"
timeout_seconds = 3600
max_retries = 1
max_concurrent_runs = 1
}
`
}
34 changes: 17 additions & 17 deletions databricks/resource_databricks_job_azure_test.go
Expand Up @@ -106,21 +106,21 @@ func testAzureJobValuesNewCluster(t *testing.T, job *model.Job) resource.TestChe
}

func testAzureJobResourceNewCluster() string {
return fmt.Sprintf(`
resource "databricks_job" "my_job" {
new_cluster {
autoscale {
min_workers = 2
max_workers = 3
}
spark_version = "6.4.x-scala2.11"
node_type_id = "Standard_DS3_v2"
}
notebook_path = "/Users/jane.doe@databricks.com/my-demo-notebook"
name = "my-demo-notebook"
timeout_seconds = 3600
max_retries = 1
max_concurrent_runs = 1
}
`)
return `
resource "databricks_job" "my_job" {
new_cluster {
autoscale {
min_workers = 2
max_workers = 3
}
spark_version = "6.4.x-scala2.11"
node_type_id = "Standard_DS3_v2"
}
notebook_path = "/Users/jane.doe@databricks.com/my-demo-notebook"
name = "my-demo-notebook"
timeout_seconds = 3600
max_retries = 1
max_concurrent_runs = 1
}
`
}

0 comments on commit fb4a680

Please sign in to comment.