Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CLEANUP] Making VSCode highlight only new issues #100

Merged
merged 3 commits into from
Jun 15, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 2 additions & 2 deletions databricks/azure_auth.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,10 @@ func (a *AzureAuth) getWorkspaceID(config *service.DBApiClientConfig) error {
"Authorization": "Bearer " + a.ManagementToken,
}
type apiVersion struct {
ApiVersion string `url:"api-version"`
APIVersion string `url:"api-version"`
}
uriPayload := apiVersion{
ApiVersion: "2018-04-01",
APIVersion: "2018-04-01",
}
var responseMap map[string]interface{}
resp, err := service.PerformQuery(config, http.MethodGet, url, "2.0", headers, false, true, uriPayload, nil)
Expand Down
8 changes: 4 additions & 4 deletions databricks/data_source_databricks_dbfs_file.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,22 +9,22 @@ func dataSourceDBFSFile() *schema.Resource {
return &schema.Resource{
Read: dataSourceDBFSFileRead,
Schema: map[string]*schema.Schema{
"path": &schema.Schema{
"path": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"limit_file_size": &schema.Schema{
"limit_file_size": {
Type: schema.TypeBool,
Required: true,
ForceNew: true,
},
"content": &schema.Schema{
"content": {
Type: schema.TypeString,
Computed: true,
ForceNew: true,
},
"file_size": &schema.Schema{
"file_size": {
Type: schema.TypeInt,
Computed: true,
},
Expand Down
6 changes: 3 additions & 3 deletions databricks/data_source_databricks_dbfs_file_paths.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,17 @@ func dataSourceDBFSFilePaths() *schema.Resource {
return &schema.Resource{
Read: dataSourceDBFSFilePathsRead,
Schema: map[string]*schema.Schema{
"path": &schema.Schema{
"path": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"recursive": &schema.Schema{
"recursive": {
Type: schema.TypeBool,
Required: true,
ForceNew: true,
},
"path_list": &schema.Schema{
"path_list": {
Type: schema.TypeSet,
Computed: true,
Elem: &schema.Resource{
Expand Down
4 changes: 2 additions & 2 deletions databricks/data_source_databricks_default_user_roles.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,12 @@ func dataSourceDefaultUserRoles() *schema.Resource {
return err
},
Schema: map[string]*schema.Schema{
"default_username": &schema.Schema{
"default_username": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"roles": &schema.Schema{
"roles": {
Type: schema.TypeList,
Computed: true,
Elem: &schema.Schema{Type: schema.TypeString},
Expand Down
12 changes: 6 additions & 6 deletions databricks/data_source_databricks_notebook.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@ func dataSourceNotebook() *schema.Resource {
Read: dataSourceNotebookRead,
Schema: map[string]*schema.Schema{

"path": &schema.Schema{
"path": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"format": &schema.Schema{
"format": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Expand All @@ -27,19 +27,19 @@ func dataSourceNotebook() *schema.Resource {
string(model.HTML),
}, false),
},
"content": &schema.Schema{
"content": {
Type: schema.TypeString,
Computed: true,
},
"language": &schema.Schema{
"language": {
Type: schema.TypeString,
Computed: true,
},
"object_type": &schema.Schema{
"object_type": {
Type: schema.TypeString,
Computed: true,
},
"object_id": &schema.Schema{
"object_id": {
Type: schema.TypeInt,
Computed: true,
},
Expand Down
6 changes: 3 additions & 3 deletions databricks/data_source_databricks_notebook_paths.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,17 +12,17 @@ func dataSourceNotebookPaths() *schema.Resource {
Read: dataSourceNotebookPathsRead,
Schema: map[string]*schema.Schema{

"path": &schema.Schema{
"path": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"recursive": &schema.Schema{
"recursive": {
Type: schema.TypeBool,
Required: true,
ForceNew: true,
},
"notebook_path_list": &schema.Schema{
"notebook_path_list": {
Type: schema.TypeSet,
Computed: true,
Elem: &schema.Resource{
Expand Down
4 changes: 2 additions & 2 deletions databricks/data_source_databricks_zones.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,12 @@ func dataSourceClusterZones() *schema.Resource {
return err
},
Schema: map[string]*schema.Schema{
"default_zone": &schema.Schema{
"default_zone": {
Type: schema.TypeString,
Computed: true,
ForceNew: true,
},
"zones": &schema.Schema{
"zones": {
Type: schema.TypeList,
Computed: true,
Elem: &schema.Schema{Type: schema.TypeString},
Expand Down
24 changes: 12 additions & 12 deletions databricks/mounts_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,16 @@ import (

func TestValidateMountDirectory(t *testing.T) {
testCases := []struct {
directory string
errorCount int
}{
{"", 0},
{"/directory", 0},
{"directory", 1},
}
for _, tc := range testCases {
_, errs := ValidateMountDirectory(tc.directory, "key")
assert.Lenf(t, errs, tc.errorCount, "directory '%s' does not generate the expected error count", tc.directory)
}
directory string
errorCount int
}{
{"", 0},
{"/directory", 0},
{"directory", 1},
}
for _, tc := range testCases {
_, errs := ValidateMountDirectory(tc.directory, "key")

assert.Lenf(t, errs, tc.errorCount, "directory '%s' does not generate the expected error count", tc.directory)
}
}
16 changes: 8 additions & 8 deletions databricks/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,30 +50,30 @@ func Provider(version string) terraform.ResourceProvider {
"databricks_mws_workspaces": resourceMWSWorkspaces(),
},
Schema: map[string]*schema.Schema{
"host": &schema.Schema{
"host": {
Type: schema.TypeString,
Optional: true,
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_HOST", nil),
},
"token": &schema.Schema{
"token": {
Type: schema.TypeString,
Optional: true,
Sensitive: true,
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_TOKEN", nil),
ConflictsWith: []string{"basic_auth"},
},
"basic_auth": &schema.Schema{
"basic_auth": {
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"username": &schema.Schema{
"username": {
Type: schema.TypeString,
Required: true,
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_USERNAME", nil),
},
"password": &schema.Schema{
"password": {
Type: schema.TypeString,
Sensitive: true,
Required: true,
Expand All @@ -83,7 +83,7 @@ func Provider(version string) terraform.ResourceProvider {
},
ConflictsWith: []string{"token"},
},
"config_file": &schema.Schema{
"config_file": {
Type: schema.TypeString,
Optional: true,
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_CONFIG_FILE", "~/.databrickscfg"),
Expand All @@ -92,14 +92,14 @@ func Provider(version string) terraform.ResourceProvider {
"in ~/.databrickscfg. Check https://docs.databricks.com/dev-tools/cli/index.html#set-up-authentication for docs. Config\n" +
"file credetials will only be used when host/token are not provided.",
},
"profile": &schema.Schema{
"profile": {
Type: schema.TypeString,
Optional: true,
Default: "DEFAULT",
Description: "Connection profile specified within ~/.databrickscfg. Please check\n" +
"https://docs.databricks.com/dev-tools/cli/index.html#connection-profiles for documentation.",
},
"azure_auth": &schema.Schema{
"azure_auth": {
Type: schema.TypeMap,
Optional: true,
Elem: &schema.Resource{
Expand Down
2 changes: 1 addition & 1 deletion databricks/resource_databricks_aws_s3_mount.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ func resourceAWSS3Mount() *schema.Resource {
Delete: resourceAWSS3Delete,

Schema: map[string]*schema.Schema{
"cluster_id": &schema.Schema{
"cluster_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Expand Down
4 changes: 2 additions & 2 deletions databricks/resource_databricks_azure_adls_gen1_mount.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ func resourceAzureAdlsGen1Mount() *schema.Resource {
Delete: resourceAzureAdlsGen1Delete,

Schema: map[string]*schema.Schema{
"cluster_id": &schema.Schema{
"cluster_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Expand All @@ -39,7 +39,7 @@ func resourceAzureAdlsGen1Mount() *schema.Resource {
Optional: true,
Computed: true,
//Default: "/",
ForceNew: true,
ForceNew: true,
ValidateFunc: ValidateMountDirectory,
},
"mount_name": {
Expand Down
10 changes: 5 additions & 5 deletions databricks/resource_databricks_azure_adls_gen2_mount.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ func resourceAzureAdlsGen2Mount() *schema.Resource {
Delete: resourceAzureAdlsGen2Delete,

Schema: map[string]*schema.Schema{
"cluster_id": &schema.Schema{
"cluster_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Expand All @@ -32,10 +32,10 @@ func resourceAzureAdlsGen2Mount() *schema.Resource {
ForceNew: true,
},
"directory": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
ValidateFunc: ValidateMountDirectory,
},
"mount_name": {
Expand Down
10 changes: 5 additions & 5 deletions databricks/resource_databricks_azure_adls_gen2_mount_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import (
)

func TestAccAzureAdlsGen2Mount_correctly_mounts(t *testing.T) {
terraformToApply := testAccAzureAdlsGen2Mount_correctly_mounts()
terraformToApply := testAccAzureAdlsGen2MountCorrectlyMounts()

resource.Test(t, resource.TestCase{
Providers: testAccProviders,
Expand All @@ -27,7 +27,7 @@ func TestAccAzureAdlsGen2Mount_correctly_mounts(t *testing.T) {
}

func TestAccAzureAdlsGen2Mount_cluster_deleted_correctly_mounts(t *testing.T) {
terraformToApply := testAccAzureAdlsGen2Mount_correctly_mounts()
terraformToApply := testAccAzureAdlsGen2MountCorrectlyMounts()
var cluster model.ClusterInfo

resource.Test(t, resource.TestCase{
Expand All @@ -50,7 +50,7 @@ func TestAccAzureAdlsGen2Mount_cluster_deleted_correctly_mounts(t *testing.T) {
}

func TestAccAzureAdlsGen2Mount_capture_error(t *testing.T) {
terraformToApply := testAccAzureAdlsGen2Mount_capture_error()
terraformToApply := testAccAzureAdlsGen2MountCaptureError()

resource.Test(t, resource.TestCase{
Providers: testAccProviders,
Expand All @@ -65,7 +65,7 @@ func TestAccAzureAdlsGen2Mount_capture_error(t *testing.T) {
})
}

func testAccAzureAdlsGen2Mount_correctly_mounts() string {
func testAccAzureAdlsGen2MountCorrectlyMounts() string {
clientID := os.Getenv("ARM_CLIENT_ID")
clientSecret := os.Getenv("ARM_CLIENT_SECRET")
tenantID := os.Getenv("ARM_TENANT_ID")
Expand Down Expand Up @@ -129,7 +129,7 @@ func testAccAzureAdlsGen2Mount_correctly_mounts() string {
return definition
}

func testAccAzureAdlsGen2Mount_capture_error() string {
func testAccAzureAdlsGen2MountCaptureError() string {
clientID := os.Getenv("ARM_CLIENT_ID")
clientSecret := os.Getenv("ARM_CLIENT_SECRET")
tenantID := os.Getenv("ARM_TENANT_ID")
Expand Down
4 changes: 2 additions & 2 deletions databricks/resource_databricks_azure_blob_mount.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ func resourceAzureBlobMount() *schema.Resource {
Delete: resourceAzureBlobMountDelete,

Schema: map[string]*schema.Schema{
"cluster_id": &schema.Schema{
"cluster_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Expand All @@ -37,7 +37,7 @@ func resourceAzureBlobMount() *schema.Resource {
Optional: true,
Computed: true,
//Default: "/",
ForceNew: true,
ForceNew: true,
ValidateFunc: ValidateMountDirectory,
},
"mount_name": {
Expand Down