From 1cfaa13f01c68e202f734f402e205b645e6170bc Mon Sep 17 00:00:00 2001 From: dyindude Date: Wed, 9 Jan 2019 11:02:36 -0600 Subject: [PATCH 1/9] implement workaround for chunked file transfers to ADLS https://github.com/Azure/azure-sdk-for-go/issues/3231 --- azurerm/resource_arm_data_lake_store_file.go | 27 +++++++++++++++----- 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/azurerm/resource_arm_data_lake_store_file.go b/azurerm/resource_arm_data_lake_store_file.go index 19eb6304f45d..fdfcaade5794 100644 --- a/azurerm/resource_arm_data_lake_store_file.go +++ b/azurerm/resource_arm_data_lake_store_file.go @@ -3,6 +3,7 @@ package azurerm import ( "bytes" "fmt" + "io" "io/ioutil" "log" "net/url" @@ -53,6 +54,7 @@ func resourceArmDataLakeStoreFile() *schema.Resource { func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*ArmClient).dataLakeStoreFilesClient ctx := meta.(*ArmClient).StopContext + bufferSize := 4 * 1024 * 1024 log.Printf("[INFO] preparing arguments for Date Lake Store File creation.") @@ -82,15 +84,28 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{} } defer utils.IoCloseAndLogError(file, fmt.Sprintf("Error closing Data Lake Store File %q", localFilePath)) - // Read the file contents - fileContents, err := ioutil.ReadAll(file) + _, err = client.Create(ctx, accountName, remoteFilePath, nil, nil, filesystem.DATA, nil, nil) if err != nil { - return err + return fmt.Errorf("Error issuing create request for Data Lake Store File %q : %+v", remoteFilePath, err) } - _, err = client.Create(ctx, accountName, remoteFilePath, ioutil.NopCloser(bytes.NewReader(fileContents)), utils.Bool(false), filesystem.CLOSE, nil, nil) - if err != nil { - return fmt.Errorf("Error issuing create request for Data Lake Store File %q : %+v", remoteFilePath, err) + buffer := make([]byte, bufferSize, bufferSize) + for { + n, err := file.Read(buffer) + if err == io.EOF { + break + } + flag := filesystem.DATA + if n < bufferSize { + // last chunk + flag = filesystem.CLOSE + } + chunk := ioutil.NopCloser(bytes.NewReader(buffer)) + + _, err = client.Append(ctx, accountName, remoteFilePath, chunk, nil, flag, nil, nil) + if err != nil { + return fmt.Errorf("Error transferring chunk for Data Lake Store File %q : %+v", remoteFilePath, err) + } } d.SetId(id) From 5c775190626d1e54cb0ff40cf2bdd43a1e3cd11e Mon Sep 17 00:00:00 2001 From: dyindude Date: Thu, 10 Jan 2019 10:41:28 -0600 Subject: [PATCH 2/9] pass only the read n bytes of the buffer to NewReader so that the last chunk is not oversized --- azurerm/resource_arm_data_lake_store_file.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/azurerm/resource_arm_data_lake_store_file.go b/azurerm/resource_arm_data_lake_store_file.go index fdfcaade5794..4f0fcc08d471 100644 --- a/azurerm/resource_arm_data_lake_store_file.go +++ b/azurerm/resource_arm_data_lake_store_file.go @@ -100,7 +100,7 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{} // last chunk flag = filesystem.CLOSE } - chunk := ioutil.NopCloser(bytes.NewReader(buffer)) + chunk := ioutil.NopCloser(bytes.NewReader(buffer[:n])) _, err = client.Append(ctx, accountName, remoteFilePath, chunk, nil, flag, nil, nil) if err != nil { From 35052a734db0f3653c0cfe41f7e3bea5188380c8 Mon Sep 17 00:00:00 2001 From: dyindude Date: Fri, 11 Jan 2019 09:26:00 -0600 Subject: [PATCH 3/9] acceptance test for azurerm_data_lake_store_file that tests large (greater than 4 megabytes) files --- .../resource_arm_data_lake_store_file_test.go | 58 +++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/azurerm/resource_arm_data_lake_store_file_test.go b/azurerm/resource_arm_data_lake_store_file_test.go index 049fbc2dc9cf..d1596cf1e9a6 100644 --- a/azurerm/resource_arm_data_lake_store_file_test.go +++ b/azurerm/resource_arm_data_lake_store_file_test.go @@ -2,6 +2,7 @@ package azurerm import ( "fmt" + "io/ioutil" "net/http" "testing" @@ -38,6 +39,41 @@ func TestAccAzureRMDataLakeStoreFile_basic(t *testing.T) { }) } +func TestAccAzureRMDataLakeStoreFile_largefiles(t *testing.T) { + resourceName := "azurerm_data_lake_store_file.test" + + //"large" in this context is anything greater than 4 megabytes + largeSize := 12 * 1024 * 1024 + data := make([]byte, largeSize, largeSize) + err := ioutil.WriteFile("./testdata/testAccAzureRMDataLakeStoreFile_largefiles.bin", data, 0644) + if err != nil { + t.Errorf("Error creating testdata.") + } + + ri := acctest.RandInt() + rs := acctest.RandString(4) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataLakeStoreFileDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataLakeStoreFile_largefiles(ri, rs, testLocation()), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataLakeStoreFileExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"local_file_path"}, + }, + }, + }) +} + func TestAccAzureRMDataLakeStoreFile_requiresimport(t *testing.T) { if !requireResourcesToBeImported { t.Skip("Skipping since resources aren't required to be imported") @@ -145,6 +181,28 @@ resource "azurerm_data_lake_store_file" "test" { `, rInt, location, rString, location) } +func testAccAzureRMDataLakeStoreFile_largefiles(rInt int, rString, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_data_lake_store" "test" { + name = "unlikely23exst2acct%s" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "%s" + firewall_state = "Disabled" +} + +resource "azurerm_data_lake_store_file" "test" { + remote_file_path = "/test/testAccAzureRMDataLakeStoreFile_largefiles.bin" + account_name = "${azurerm_data_lake_store.test.name}" + local_file_path = "./testdata/testAccAzureRMDataLakeStoreFile_largefiles.bin" +} +`, rInt, location, rString, location) +} + func testAccAzureRMDataLakeStoreFile_requiresImport(rInt int, rString, location string) string { template := testAccAzureRMDataLakeStoreFile_basic(rInt, rString, location) return fmt.Sprintf(` From f31e8c8ab91b4664577aa7d666f86ff5124bfbd4 Mon Sep 17 00:00:00 2001 From: dyindude Date: Fri, 11 Jan 2019 09:33:38 -0600 Subject: [PATCH 4/9] renamed bufferSize to chunkSize for clarity --- azurerm/resource_arm_data_lake_store_file.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/azurerm/resource_arm_data_lake_store_file.go b/azurerm/resource_arm_data_lake_store_file.go index 4f0fcc08d471..d42b400e2147 100644 --- a/azurerm/resource_arm_data_lake_store_file.go +++ b/azurerm/resource_arm_data_lake_store_file.go @@ -54,7 +54,7 @@ func resourceArmDataLakeStoreFile() *schema.Resource { func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*ArmClient).dataLakeStoreFilesClient ctx := meta.(*ArmClient).StopContext - bufferSize := 4 * 1024 * 1024 + chunkSize := 4 * 1024 * 1024 log.Printf("[INFO] preparing arguments for Date Lake Store File creation.") @@ -89,14 +89,14 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{} return fmt.Errorf("Error issuing create request for Data Lake Store File %q : %+v", remoteFilePath, err) } - buffer := make([]byte, bufferSize, bufferSize) + buffer := make([]byte, chunkSize, chunkSize) for { n, err := file.Read(buffer) if err == io.EOF { break } flag := filesystem.DATA - if n < bufferSize { + if n < chunkSize { // last chunk flag = filesystem.CLOSE } From f23da35c71db9e3d86a79353d184b643d627887b Mon Sep 17 00:00:00 2001 From: kt Date: Fri, 11 Jan 2019 11:46:17 -0800 Subject: [PATCH 5/9] TestAccAzureRMDataLakeStoreFile_largefiles: use a temp file with random data and remove it afterwards --- .../resource_arm_data_lake_store_file_test.go | 29 +++++++++++++------ 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/azurerm/resource_arm_data_lake_store_file_test.go b/azurerm/resource_arm_data_lake_store_file_test.go index d1596cf1e9a6..70d2ca6dc349 100644 --- a/azurerm/resource_arm_data_lake_store_file_test.go +++ b/azurerm/resource_arm_data_lake_store_file_test.go @@ -3,7 +3,9 @@ package azurerm import ( "fmt" "io/ioutil" + "math/rand" "net/http" + "os" "testing" "github.com/hashicorp/terraform/helper/acctest" @@ -41,17 +43,26 @@ func TestAccAzureRMDataLakeStoreFile_basic(t *testing.T) { func TestAccAzureRMDataLakeStoreFile_largefiles(t *testing.T) { resourceName := "azurerm_data_lake_store_file.test" + ri := acctest.RandInt() + rs := acctest.RandString(4) //"large" in this context is anything greater than 4 megabytes - largeSize := 12 * 1024 * 1024 + largeSize := 12 * 1024 * 1024 //12 mb data := make([]byte, largeSize, largeSize) - err := ioutil.WriteFile("./testdata/testAccAzureRMDataLakeStoreFile_largefiles.bin", data, 0644) + rand.Read(data) //fill with random data + + tmpfile, err := ioutil.TempFile("", "azurerm-acc-datalake-file-large") if err != nil { - t.Errorf("Error creating testdata.") + t.Errorf("Unable to open a temporary file.") } + defer os.Remove(tmpfile.Name()) - ri := acctest.RandInt() - rs := acctest.RandString(4) + if _, err := tmpfile.Write(data); err != nil { + t.Errorf("Unable to write to temporary file %q: %v", tmpfile.Name(), err) + } + if err := tmpfile.Close(); err != nil { + t.Errorf("Unable to close temporary file %q: %v", tmpfile.Name(), err) + } resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -59,7 +70,7 @@ func TestAccAzureRMDataLakeStoreFile_largefiles(t *testing.T) { CheckDestroy: testCheckAzureRMDataLakeStoreFileDestroy, Steps: []resource.TestStep{ { - Config: testAccAzureRMDataLakeStoreFile_largefiles(ri, rs, testLocation()), + Config: testAccAzureRMDataLakeStoreFile_largefiles(ri, rs, testLocation(), tmpfile.Name()), Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataLakeStoreFileExists(resourceName), ), @@ -181,7 +192,7 @@ resource "azurerm_data_lake_store_file" "test" { `, rInt, location, rString, location) } -func testAccAzureRMDataLakeStoreFile_largefiles(rInt int, rString, location string) string { +func testAccAzureRMDataLakeStoreFile_largefiles(rInt int, rString, location, file string) string { return fmt.Sprintf(` resource "azurerm_resource_group" "test" { name = "acctestRG-%d" @@ -198,9 +209,9 @@ resource "azurerm_data_lake_store" "test" { resource "azurerm_data_lake_store_file" "test" { remote_file_path = "/test/testAccAzureRMDataLakeStoreFile_largefiles.bin" account_name = "${azurerm_data_lake_store.test.name}" - local_file_path = "./testdata/testAccAzureRMDataLakeStoreFile_largefiles.bin" + local_file_path = "%s" } -`, rInt, location, rString, location) +`, rInt, location, rString, location, file) } func testAccAzureRMDataLakeStoreFile_requiresImport(rInt int, rString, location string) string { From 62d7005e9959e3b1a4f836f3d3f36e7be42bffbb Mon Sep 17 00:00:00 2001 From: kt Date: Fri, 11 Jan 2019 12:21:52 -0800 Subject: [PATCH 6/9] fix linting issues --- azurerm/resource_arm_data_lake_store_file.go | 2 +- azurerm/resource_arm_data_lake_store_file_test.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/azurerm/resource_arm_data_lake_store_file.go b/azurerm/resource_arm_data_lake_store_file.go index d42b400e2147..3e3197eb1832 100644 --- a/azurerm/resource_arm_data_lake_store_file.go +++ b/azurerm/resource_arm_data_lake_store_file.go @@ -89,7 +89,7 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{} return fmt.Errorf("Error issuing create request for Data Lake Store File %q : %+v", remoteFilePath, err) } - buffer := make([]byte, chunkSize, chunkSize) + buffer := make([]byte, chunkSize) for { n, err := file.Read(buffer) if err == io.EOF { diff --git a/azurerm/resource_arm_data_lake_store_file_test.go b/azurerm/resource_arm_data_lake_store_file_test.go index 70d2ca6dc349..82e14805a9ef 100644 --- a/azurerm/resource_arm_data_lake_store_file_test.go +++ b/azurerm/resource_arm_data_lake_store_file_test.go @@ -48,7 +48,7 @@ func TestAccAzureRMDataLakeStoreFile_largefiles(t *testing.T) { //"large" in this context is anything greater than 4 megabytes largeSize := 12 * 1024 * 1024 //12 mb - data := make([]byte, largeSize, largeSize) + data := make([]byte, largeSize) rand.Read(data) //fill with random data tmpfile, err := ioutil.TempFile("", "azurerm-acc-datalake-file-large") From 4a4f5b3d9dfca15982ba6ccfe16528c4fdf3a0de Mon Sep 17 00:00:00 2001 From: kt Date: Fri, 11 Jan 2019 12:30:35 -0800 Subject: [PATCH 7/9] minor style fix --- azurerm/resource_arm_data_lake_store_file.go | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/azurerm/resource_arm_data_lake_store_file.go b/azurerm/resource_arm_data_lake_store_file.go index 3e3197eb1832..182cdcad436b 100644 --- a/azurerm/resource_arm_data_lake_store_file.go +++ b/azurerm/resource_arm_data_lake_store_file.go @@ -84,8 +84,7 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{} } defer utils.IoCloseAndLogError(file, fmt.Sprintf("Error closing Data Lake Store File %q", localFilePath)) - _, err = client.Create(ctx, accountName, remoteFilePath, nil, nil, filesystem.DATA, nil, nil) - if err != nil { + if _, err = client.Create(ctx, accountName, remoteFilePath, nil, nil, filesystem.DATA, nil, nil); err != nil { return fmt.Errorf("Error issuing create request for Data Lake Store File %q : %+v", remoteFilePath, err) } @@ -102,8 +101,7 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{} } chunk := ioutil.NopCloser(bytes.NewReader(buffer[:n])) - _, err = client.Append(ctx, accountName, remoteFilePath, chunk, nil, flag, nil, nil) - if err != nil { + if _, err = client.Append(ctx, accountName, remoteFilePath, chunk, nil, flag, nil, nil); err != nil { return fmt.Errorf("Error transferring chunk for Data Lake Store File %q : %+v", remoteFilePath, err) } } From ef25823fee2783c4ea80e4f2011893438998081e Mon Sep 17 00:00:00 2001 From: kt Date: Fri, 11 Jan 2019 13:56:09 -0800 Subject: [PATCH 8/9] acctest.RandInt() -> tf.AccRandTimeInt() --- azurerm/data_source_application_insights_test.go | 2 +- azurerm/data_source_loadbalancer_backend_address_pool_test.go | 2 +- azurerm/data_source_loadbalancer_test.go | 2 +- azurerm/resource_arm_cognitive_account_test.go | 2 +- azurerm/resource_arm_data_lake_store_file_test.go | 2 +- azurerm/resource_arm_kubernetes_cluster_test.go | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/azurerm/data_source_application_insights_test.go b/azurerm/data_source_application_insights_test.go index 374729de2d93..bb9cbd6f0f09 100644 --- a/azurerm/data_source_application_insights_test.go +++ b/azurerm/data_source_application_insights_test.go @@ -10,7 +10,7 @@ import ( func TestAccDataSourceApplicationInsights_basic(t *testing.T) { dataSourceName := "data.azurerm_application_insights.test" - ri := acctest.RandInt() + ri := tf.AccRandTimeInt() location := testLocation() resource.ParallelTest(t, resource.TestCase{ diff --git a/azurerm/data_source_loadbalancer_backend_address_pool_test.go b/azurerm/data_source_loadbalancer_backend_address_pool_test.go index 1c37eb59d3a7..e18b3b0a7957 100644 --- a/azurerm/data_source_loadbalancer_backend_address_pool_test.go +++ b/azurerm/data_source_loadbalancer_backend_address_pool_test.go @@ -10,7 +10,7 @@ import ( func TestAccAzureRMDataSourceLoadBalancerBackEndAddressPool_basic(t *testing.T) { dataSourceName := "data.azurerm_lb_backend_address_pool.test" - ri := acctest.RandInt() + ri := tf.AccRandTimeInt() location := testLocation() addressPoolName := fmt.Sprintf("%d-address-pool", ri) diff --git a/azurerm/data_source_loadbalancer_test.go b/azurerm/data_source_loadbalancer_test.go index 709ded2ee861..e38c733dc067 100644 --- a/azurerm/data_source_loadbalancer_test.go +++ b/azurerm/data_source_loadbalancer_test.go @@ -10,7 +10,7 @@ import ( func TestAccAzureRMDataSourceLoadBalancer_basic(t *testing.T) { dataSourceName := "data.azurerm_lb.test" - ri := acctest.RandInt() + ri := tf.AccRandTimeInt() location := testLocation() resource.ParallelTest(t, resource.TestCase{ diff --git a/azurerm/resource_arm_cognitive_account_test.go b/azurerm/resource_arm_cognitive_account_test.go index 9629e80838ca..5ac7a917464a 100644 --- a/azurerm/resource_arm_cognitive_account_test.go +++ b/azurerm/resource_arm_cognitive_account_test.go @@ -40,7 +40,7 @@ func TestAccAzureRMCognitiveAccount_basic(t *testing.T) { func TestAccAzureRMCognitiveAccount_speechServices(t *testing.T) { resourceName := "azurerm_cognitive_account.test" - ri := acctest.RandInt() + ri := tf.AccRandTimeInt() config := testAccAzureRMCognitiveAccount_speechServices(ri, testLocation()) resource.ParallelTest(t, resource.TestCase{ diff --git a/azurerm/resource_arm_data_lake_store_file_test.go b/azurerm/resource_arm_data_lake_store_file_test.go index 9ca58130d703..72ad93d63664 100644 --- a/azurerm/resource_arm_data_lake_store_file_test.go +++ b/azurerm/resource_arm_data_lake_store_file_test.go @@ -44,7 +44,7 @@ func TestAccAzureRMDataLakeStoreFile_basic(t *testing.T) { func TestAccAzureRMDataLakeStoreFile_largefiles(t *testing.T) { resourceName := "azurerm_data_lake_store_file.test" - ri := acctest.RandInt() + ri := tf.AccRandTimeInt() rs := acctest.RandString(4) //"large" in this context is anything greater than 4 megabytes diff --git a/azurerm/resource_arm_kubernetes_cluster_test.go b/azurerm/resource_arm_kubernetes_cluster_test.go index e5343a15eb66..0fecf34803b1 100644 --- a/azurerm/resource_arm_kubernetes_cluster_test.go +++ b/azurerm/resource_arm_kubernetes_cluster_test.go @@ -311,7 +311,7 @@ func TestAccAzureRMKubernetesCluster_internalNetwork(t *testing.T) { func TestAccAzureRMKubernetesCluster_addonProfileAciConnectorLinux(t *testing.T) { resourceName := "azurerm_kubernetes_cluster.test" - ri := acctest.RandInt() + ri := tf.AccRandTimeInt() clientId := os.Getenv("ARM_CLIENT_ID") clientSecret := os.Getenv("ARM_CLIENT_SECRET") config := testAccAzureRMKubernetesCluster_addonProfileAciConnectorLinux(ri, clientId, clientSecret, testLocation()) From 09fd2b9713e9225142cd9f0bd34ea128edb53036 Mon Sep 17 00:00:00 2001 From: kt Date: Fri, 11 Jan 2019 14:04:25 -0800 Subject: [PATCH 9/9] fix import issues --- azurerm/data_source_application_insights_test.go | 3 ++- azurerm/data_source_loadbalancer_backend_address_pool_test.go | 3 ++- azurerm/data_source_loadbalancer_test.go | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/azurerm/data_source_application_insights_test.go b/azurerm/data_source_application_insights_test.go index bb9cbd6f0f09..e5a3f40bdbf0 100644 --- a/azurerm/data_source_application_insights_test.go +++ b/azurerm/data_source_application_insights_test.go @@ -4,7 +4,8 @@ import ( "fmt" "testing" - "github.com/hashicorp/terraform/helper/acctest" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/hashicorp/terraform/helper/resource" ) diff --git a/azurerm/data_source_loadbalancer_backend_address_pool_test.go b/azurerm/data_source_loadbalancer_backend_address_pool_test.go index e18b3b0a7957..35c6835bd49d 100644 --- a/azurerm/data_source_loadbalancer_backend_address_pool_test.go +++ b/azurerm/data_source_loadbalancer_backend_address_pool_test.go @@ -4,7 +4,8 @@ import ( "fmt" "testing" - "github.com/hashicorp/terraform/helper/acctest" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/hashicorp/terraform/helper/resource" ) diff --git a/azurerm/data_source_loadbalancer_test.go b/azurerm/data_source_loadbalancer_test.go index e38c733dc067..303a2b0cc5fd 100644 --- a/azurerm/data_source_loadbalancer_test.go +++ b/azurerm/data_source_loadbalancer_test.go @@ -4,7 +4,8 @@ import ( "fmt" "testing" - "github.com/hashicorp/terraform/helper/acctest" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/hashicorp/terraform/helper/resource" )