Skip to content

Commit

Permalink
Merge pull request #96 from databrickslabs/provider-auth-fix
Browse files Browse the repository at this point in the history
fixed order of precedence for basic auth
  • Loading branch information
stikkireddy committed Jun 12, 2020
2 parents b8b4d86 + b312c13 commit 34e68d5
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 27 deletions.
16 changes: 10 additions & 6 deletions databricks/provider.go
Expand Up @@ -286,7 +286,7 @@ func providerConfigure(d *schema.ResourceData, providerVersion string) (interfac
config.Setup()

//version information from go-releaser using -ldflags to tell the golang linker to send semver info
config.UserAgent = fmt.Sprintf("databricks-tf-provider-%s", providerVersion)
config.UserAgent = fmt.Sprintf("databricks-tf-provider/%s", providerVersion)

if _, ok := d.GetOk("azure_auth"); !ok {
if host, ok := d.GetOk("host"); ok {
Expand All @@ -295,11 +295,7 @@ func providerConfigure(d *schema.ResourceData, providerVersion string) (interfac
if token, ok := d.GetOk("token"); ok {
config.Token = token.(string)
}
if config.Host == "" || config.Token == "" {
if err := tryDatabricksCliConfigFile(d, &config); err != nil {
return nil, fmt.Errorf("failed to get credentials from config file; error msg: %w", err)
}
}

// Basic authentication setup via username and password
if _, ok := d.GetOk("basic_auth"); ok {
username, userOk := d.GetOk("basic_auth.0.username")
Expand All @@ -310,6 +306,14 @@ func providerConfigure(d *schema.ResourceData, providerVersion string) (interfac
config.AuthType = service.BasicAuth
}
}

// Final catch all in case basic_auth/token + host is not setup
if config.Host == "" || config.Token == "" {
if err := tryDatabricksCliConfigFile(d, &config); err != nil {
return nil, fmt.Errorf("failed to get credentials from config file; error msg: %w", err)
}
}

} else {
// Abstracted logic to another function that returns a interface{}, error to inject directly
// for the providers during cloud integration testing
Expand Down
14 changes: 14 additions & 0 deletions databricks/provider_test.go
Expand Up @@ -128,6 +128,20 @@ func TestProvider_HostTokensTakePrecedence(t *testing.T) {
assert.Equal(t, "configured", client.Token)
}

func TestProvider_BasicAuthTakePrecedence(t *testing.T) {
var raw = make(map[string]interface{})
raw["host"] = "foo"
raw["basic_auth"] = []interface{}{map[string]interface{}{"username": "user", "password": "pass"}}
raw["config_file"] = "testdata/.databrickscfg"
err := testAccProvider.Configure(terraform.NewResourceConfigRaw(raw))
assert.Nil(t, err)

// Basic auth convention
expectedToken := base64.StdEncoding.EncodeToString([]byte("user:pass"))
client := testAccProvider.Meta().(*service.DBApiClient).Config
assert.Equal(t, expectedToken, client.Token)
}

func TestProvider_MissingEnvMakesConfigRead(t *testing.T) {
var raw = make(map[string]interface{})
raw["token"] = "configured"
Expand Down
28 changes: 14 additions & 14 deletions databricks/resource_databricks_job_aws_test.go
Expand Up @@ -95,20 +95,20 @@ func testAwsJobValuesNewCluster(t *testing.T, job *model.Job) resource.TestCheck
assert.NotNil(t, job.Settings.NotebookTask)
assert.Equal(t, 2, int(job.Settings.NewCluster.Autoscale.MinWorkers))
assert.Equal(t, 3, int(job.Settings.NewCluster.Autoscale.MaxWorkers))
assert.Equal(t, "6.4.x-scala2.11", job.Settings.NewCluster.SparkVersion)
assert.Equal(t, model.AwsAvailability(model.AwsAvailabilitySpot), job.Settings.NewCluster.AwsAttributes.Availability)
assert.Equal(t, "us-east-1a", job.Settings.NewCluster.AwsAttributes.ZoneID)
assert.Equal(t, 100, int(job.Settings.NewCluster.AwsAttributes.SpotBidPricePercent))
assert.Equal(t, 1, int(job.Settings.NewCluster.AwsAttributes.FirstOnDemand))
assert.Equal(t, model.EbsVolumeType(model.EbsVolumeTypeGeneralPurposeSsd), job.Settings.NewCluster.AwsAttributes.EbsVolumeType)
assert.Equal(t, 1, int(job.Settings.NewCluster.AwsAttributes.EbsVolumeCount))
assert.Equal(t, 32, int(job.Settings.NewCluster.AwsAttributes.EbsVolumeSize))
assert.Equal(t, "r3.xlarge", job.Settings.NewCluster.NodeTypeID)
assert.Equal(t, "/Users/jane.doe@databricks.com/my-demo-notebook", job.Settings.NotebookTask.NotebookPath)
assert.Equal(t, "my-demo-notebook", job.Settings.Name)
assert.Equal(t, 3600, int(job.Settings.TimeoutSeconds))
assert.Equal(t, 1, int(job.Settings.MaxRetries))
assert.Equal(t, 1, int(job.Settings.MaxConcurrentRuns))
assert.Equal(t, "6.4.x-scala2.11", job.Settings.NewCluster.SparkVersion)
assert.Equal(t, model.AwsAvailability(model.AwsAvailabilitySpot), job.Settings.NewCluster.AwsAttributes.Availability)
assert.Equal(t, "us-east-1a", job.Settings.NewCluster.AwsAttributes.ZoneID)
assert.Equal(t, 100, int(job.Settings.NewCluster.AwsAttributes.SpotBidPricePercent))
assert.Equal(t, 1, int(job.Settings.NewCluster.AwsAttributes.FirstOnDemand))
assert.Equal(t, model.EbsVolumeType(model.EbsVolumeTypeGeneralPurposeSsd), job.Settings.NewCluster.AwsAttributes.EbsVolumeType)
assert.Equal(t, 1, int(job.Settings.NewCluster.AwsAttributes.EbsVolumeCount))
assert.Equal(t, 32, int(job.Settings.NewCluster.AwsAttributes.EbsVolumeSize))
assert.Equal(t, "r3.xlarge", job.Settings.NewCluster.NodeTypeID)
assert.Equal(t, "/Users/jane.doe@databricks.com/my-demo-notebook", job.Settings.NotebookTask.NotebookPath)
assert.Equal(t, "my-demo-notebook", job.Settings.Name)
assert.Equal(t, 3600, int(job.Settings.TimeoutSeconds))
assert.Equal(t, 1, int(job.Settings.MaxRetries))
assert.Equal(t, 1, int(job.Settings.MaxConcurrentRuns))
return nil
}
}
Expand Down
14 changes: 7 additions & 7 deletions databricks/resource_databricks_job_azure_test.go
Expand Up @@ -94,13 +94,13 @@ func testAzureJobValuesNewCluster(t *testing.T, job *model.Job) resource.TestChe
assert.NotNil(t, job.Settings.NotebookTask)
assert.Equal(t, 2, int(job.Settings.NewCluster.Autoscale.MinWorkers))
assert.Equal(t, 3, int(job.Settings.NewCluster.Autoscale.MaxWorkers))
assert.Equal(t, "6.4.x-scala2.11", job.Settings.NewCluster.SparkVersion)
assert.Equal(t, "Standard_DS3_v2", job.Settings.NewCluster.NodeTypeID)
assert.Equal(t, "/Users/jane.doe@databricks.com/my-demo-notebook", job.Settings.NotebookTask.NotebookPath)
assert.Equal(t, "my-demo-notebook", job.Settings.Name)
assert.Equal(t, 3600, int(job.Settings.TimeoutSeconds))
assert.Equal(t, 1, int(job.Settings.MaxRetries))
assert.Equal(t, 1, int(job.Settings.MaxConcurrentRuns))
assert.Equal(t, "6.4.x-scala2.11", job.Settings.NewCluster.SparkVersion)
assert.Equal(t, "Standard_DS3_v2", job.Settings.NewCluster.NodeTypeID)
assert.Equal(t, "/Users/jane.doe@databricks.com/my-demo-notebook", job.Settings.NotebookTask.NotebookPath)
assert.Equal(t, "my-demo-notebook", job.Settings.Name)
assert.Equal(t, 3600, int(job.Settings.TimeoutSeconds))
assert.Equal(t, 1, int(job.Settings.MaxRetries))
assert.Equal(t, 1, int(job.Settings.MaxConcurrentRuns))
return nil
}
}
Expand Down

0 comments on commit 34e68d5

Please sign in to comment.