Skip to content

Commit

Permalink
Merge pull request #100 from databrickslabs/cleanup-and-tests
Browse files Browse the repository at this point in the history
[CLEANUP] Making VSCode highlight only new issues
  • Loading branch information
stikkireddy committed Jun 15, 2020
2 parents 3f56ba9 + f57c17d commit 95b6e0d
Show file tree
Hide file tree
Showing 39 changed files with 376 additions and 376 deletions.
4 changes: 2 additions & 2 deletions databricks/azure_auth.go
Expand Up @@ -87,10 +87,10 @@ func (a *AzureAuth) getWorkspaceID(config *service.DBApiClientConfig) error {
"Authorization": "Bearer " + a.ManagementToken,
}
type apiVersion struct {
ApiVersion string `url:"api-version"`
APIVersion string `url:"api-version"`
}
uriPayload := apiVersion{
ApiVersion: "2018-04-01",
APIVersion: "2018-04-01",
}
var responseMap map[string]interface{}
resp, err := service.PerformQuery(config, http.MethodGet, url, "2.0", headers, false, true, uriPayload, nil)
Expand Down
8 changes: 4 additions & 4 deletions databricks/data_source_databricks_dbfs_file.go
Expand Up @@ -9,22 +9,22 @@ func dataSourceDBFSFile() *schema.Resource {
return &schema.Resource{
Read: dataSourceDBFSFileRead,
Schema: map[string]*schema.Schema{
"path": &schema.Schema{
"path": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"limit_file_size": &schema.Schema{
"limit_file_size": {
Type: schema.TypeBool,
Required: true,
ForceNew: true,
},
"content": &schema.Schema{
"content": {
Type: schema.TypeString,
Computed: true,
ForceNew: true,
},
"file_size": &schema.Schema{
"file_size": {
Type: schema.TypeInt,
Computed: true,
},
Expand Down
6 changes: 3 additions & 3 deletions databricks/data_source_databricks_dbfs_file_paths.go
Expand Up @@ -9,17 +9,17 @@ func dataSourceDBFSFilePaths() *schema.Resource {
return &schema.Resource{
Read: dataSourceDBFSFilePathsRead,
Schema: map[string]*schema.Schema{
"path": &schema.Schema{
"path": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"recursive": &schema.Schema{
"recursive": {
Type: schema.TypeBool,
Required: true,
ForceNew: true,
},
"path_list": &schema.Schema{
"path_list": {
Type: schema.TypeSet,
Computed: true,
Elem: &schema.Resource{
Expand Down
4 changes: 2 additions & 2 deletions databricks/data_source_databricks_default_user_roles.go
Expand Up @@ -25,12 +25,12 @@ func dataSourceDefaultUserRoles() *schema.Resource {
return err
},
Schema: map[string]*schema.Schema{
"default_username": &schema.Schema{
"default_username": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"roles": &schema.Schema{
"roles": {
Type: schema.TypeList,
Computed: true,
Elem: &schema.Schema{Type: schema.TypeString},
Expand Down
12 changes: 6 additions & 6 deletions databricks/data_source_databricks_notebook.go
Expand Up @@ -12,12 +12,12 @@ func dataSourceNotebook() *schema.Resource {
Read: dataSourceNotebookRead,
Schema: map[string]*schema.Schema{

"path": &schema.Schema{
"path": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"format": &schema.Schema{
"format": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Expand All @@ -27,19 +27,19 @@ func dataSourceNotebook() *schema.Resource {
string(model.HTML),
}, false),
},
"content": &schema.Schema{
"content": {
Type: schema.TypeString,
Computed: true,
},
"language": &schema.Schema{
"language": {
Type: schema.TypeString,
Computed: true,
},
"object_type": &schema.Schema{
"object_type": {
Type: schema.TypeString,
Computed: true,
},
"object_id": &schema.Schema{
"object_id": {
Type: schema.TypeInt,
Computed: true,
},
Expand Down
6 changes: 3 additions & 3 deletions databricks/data_source_databricks_notebook_paths.go
Expand Up @@ -12,17 +12,17 @@ func dataSourceNotebookPaths() *schema.Resource {
Read: dataSourceNotebookPathsRead,
Schema: map[string]*schema.Schema{

"path": &schema.Schema{
"path": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"recursive": &schema.Schema{
"recursive": {
Type: schema.TypeBool,
Required: true,
ForceNew: true,
},
"notebook_path_list": &schema.Schema{
"notebook_path_list": {
Type: schema.TypeSet,
Computed: true,
Elem: &schema.Resource{
Expand Down
4 changes: 2 additions & 2 deletions databricks/data_source_databricks_zones.go
Expand Up @@ -24,12 +24,12 @@ func dataSourceClusterZones() *schema.Resource {
return err
},
Schema: map[string]*schema.Schema{
"default_zone": &schema.Schema{
"default_zone": {
Type: schema.TypeString,
Computed: true,
ForceNew: true,
},
"zones": &schema.Schema{
"zones": {
Type: schema.TypeList,
Computed: true,
Elem: &schema.Schema{Type: schema.TypeString},
Expand Down
24 changes: 12 additions & 12 deletions databricks/mounts_test.go
Expand Up @@ -8,16 +8,16 @@ import (

func TestValidateMountDirectory(t *testing.T) {
testCases := []struct {
directory string
errorCount int
}{
{"", 0},
{"/directory", 0},
{"directory", 1},
}
for _, tc := range testCases {
_, errs := ValidateMountDirectory(tc.directory, "key")
assert.Lenf(t, errs, tc.errorCount, "directory '%s' does not generate the expected error count", tc.directory)
}
directory string
errorCount int
}{
{"", 0},
{"/directory", 0},
{"directory", 1},
}
for _, tc := range testCases {
_, errs := ValidateMountDirectory(tc.directory, "key")

assert.Lenf(t, errs, tc.errorCount, "directory '%s' does not generate the expected error count", tc.directory)
}
}
16 changes: 8 additions & 8 deletions databricks/provider.go
Expand Up @@ -50,30 +50,30 @@ func Provider(version string) terraform.ResourceProvider {
"databricks_mws_workspaces": resourceMWSWorkspaces(),
},
Schema: map[string]*schema.Schema{
"host": &schema.Schema{
"host": {
Type: schema.TypeString,
Optional: true,
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_HOST", nil),
},
"token": &schema.Schema{
"token": {
Type: schema.TypeString,
Optional: true,
Sensitive: true,
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_TOKEN", nil),
ConflictsWith: []string{"basic_auth"},
},
"basic_auth": &schema.Schema{
"basic_auth": {
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"username": &schema.Schema{
"username": {
Type: schema.TypeString,
Required: true,
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_USERNAME", nil),
},
"password": &schema.Schema{
"password": {
Type: schema.TypeString,
Sensitive: true,
Required: true,
Expand All @@ -83,7 +83,7 @@ func Provider(version string) terraform.ResourceProvider {
},
ConflictsWith: []string{"token"},
},
"config_file": &schema.Schema{
"config_file": {
Type: schema.TypeString,
Optional: true,
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_CONFIG_FILE", "~/.databrickscfg"),
Expand All @@ -92,14 +92,14 @@ func Provider(version string) terraform.ResourceProvider {
"in ~/.databrickscfg. Check https://docs.databricks.com/dev-tools/cli/index.html#set-up-authentication for docs. Config\n" +
"file credetials will only be used when host/token are not provided.",
},
"profile": &schema.Schema{
"profile": {
Type: schema.TypeString,
Optional: true,
Default: "DEFAULT",
Description: "Connection profile specified within ~/.databrickscfg. Please check\n" +
"https://docs.databricks.com/dev-tools/cli/index.html#connection-profiles for documentation.",
},
"azure_auth": &schema.Schema{
"azure_auth": {
Type: schema.TypeMap,
Optional: true,
Elem: &schema.Resource{
Expand Down
2 changes: 1 addition & 1 deletion databricks/resource_databricks_aws_s3_mount.go
Expand Up @@ -12,7 +12,7 @@ func resourceAWSS3Mount() *schema.Resource {
Delete: resourceAWSS3Delete,

Schema: map[string]*schema.Schema{
"cluster_id": &schema.Schema{
"cluster_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Expand Down
4 changes: 2 additions & 2 deletions databricks/resource_databricks_azure_adls_gen1_mount.go
Expand Up @@ -17,7 +17,7 @@ func resourceAzureAdlsGen1Mount() *schema.Resource {
Delete: resourceAzureAdlsGen1Delete,

Schema: map[string]*schema.Schema{
"cluster_id": &schema.Schema{
"cluster_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Expand All @@ -39,7 +39,7 @@ func resourceAzureAdlsGen1Mount() *schema.Resource {
Optional: true,
Computed: true,
//Default: "/",
ForceNew: true,
ForceNew: true,
ValidateFunc: ValidateMountDirectory,
},
"mount_name": {
Expand Down
10 changes: 5 additions & 5 deletions databricks/resource_databricks_azure_adls_gen2_mount.go
Expand Up @@ -16,7 +16,7 @@ func resourceAzureAdlsGen2Mount() *schema.Resource {
Delete: resourceAzureAdlsGen2Delete,

Schema: map[string]*schema.Schema{
"cluster_id": &schema.Schema{
"cluster_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Expand All @@ -32,10 +32,10 @@ func resourceAzureAdlsGen2Mount() *schema.Resource {
ForceNew: true,
},
"directory": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
ValidateFunc: ValidateMountDirectory,
},
"mount_name": {
Expand Down
10 changes: 5 additions & 5 deletions databricks/resource_databricks_azure_adls_gen2_mount_test.go
Expand Up @@ -14,7 +14,7 @@ import (
)

func TestAccAzureAdlsGen2Mount_correctly_mounts(t *testing.T) {
terraformToApply := testAccAzureAdlsGen2Mount_correctly_mounts()
terraformToApply := testAccAzureAdlsGen2MountCorrectlyMounts()

resource.Test(t, resource.TestCase{
Providers: testAccProviders,
Expand All @@ -27,7 +27,7 @@ func TestAccAzureAdlsGen2Mount_correctly_mounts(t *testing.T) {
}

func TestAccAzureAdlsGen2Mount_cluster_deleted_correctly_mounts(t *testing.T) {
terraformToApply := testAccAzureAdlsGen2Mount_correctly_mounts()
terraformToApply := testAccAzureAdlsGen2MountCorrectlyMounts()
var cluster model.ClusterInfo

resource.Test(t, resource.TestCase{
Expand All @@ -50,7 +50,7 @@ func TestAccAzureAdlsGen2Mount_cluster_deleted_correctly_mounts(t *testing.T) {
}

func TestAccAzureAdlsGen2Mount_capture_error(t *testing.T) {
terraformToApply := testAccAzureAdlsGen2Mount_capture_error()
terraformToApply := testAccAzureAdlsGen2MountCaptureError()

resource.Test(t, resource.TestCase{
Providers: testAccProviders,
Expand All @@ -65,7 +65,7 @@ func TestAccAzureAdlsGen2Mount_capture_error(t *testing.T) {
})
}

func testAccAzureAdlsGen2Mount_correctly_mounts() string {
func testAccAzureAdlsGen2MountCorrectlyMounts() string {
clientID := os.Getenv("ARM_CLIENT_ID")
clientSecret := os.Getenv("ARM_CLIENT_SECRET")
tenantID := os.Getenv("ARM_TENANT_ID")
Expand Down Expand Up @@ -129,7 +129,7 @@ func testAccAzureAdlsGen2Mount_correctly_mounts() string {
return definition
}

func testAccAzureAdlsGen2Mount_capture_error() string {
func testAccAzureAdlsGen2MountCaptureError() string {
clientID := os.Getenv("ARM_CLIENT_ID")
clientSecret := os.Getenv("ARM_CLIENT_SECRET")
tenantID := os.Getenv("ARM_TENANT_ID")
Expand Down
4 changes: 2 additions & 2 deletions databricks/resource_databricks_azure_blob_mount.go
Expand Up @@ -17,7 +17,7 @@ func resourceAzureBlobMount() *schema.Resource {
Delete: resourceAzureBlobMountDelete,

Schema: map[string]*schema.Schema{
"cluster_id": &schema.Schema{
"cluster_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Expand All @@ -37,7 +37,7 @@ func resourceAzureBlobMount() *schema.Resource {
Optional: true,
Computed: true,
//Default: "/",
ForceNew: true,
ForceNew: true,
ValidateFunc: ValidateMountDirectory,
},
"mount_name": {
Expand Down

0 comments on commit 95b6e0d

Please sign in to comment.