Skip to content

Commit

Permalink
Merge pull request #2633 from dyindude/master
Browse files Browse the repository at this point in the history
Chunked file transfers for azurerm_data_lake_store_file
  • Loading branch information
katbyte committed Jan 11, 2019
2 parents a0bbae7 + 09fd2b9 commit 139aa76
Show file tree
Hide file tree
Showing 7 changed files with 100 additions and 15 deletions.
5 changes: 3 additions & 2 deletions azurerm/data_source_application_insights_test.go
Expand Up @@ -4,13 +4,14 @@ import (
"fmt"
"testing"

"github.com/hashicorp/terraform/helper/acctest"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"

"github.com/hashicorp/terraform/helper/resource"
)

func TestAccDataSourceApplicationInsights_basic(t *testing.T) {
dataSourceName := "data.azurerm_application_insights.test"
ri := acctest.RandInt()
ri := tf.AccRandTimeInt()
location := testLocation()

resource.ParallelTest(t, resource.TestCase{
Expand Down
5 changes: 3 additions & 2 deletions azurerm/data_source_loadbalancer_backend_address_pool_test.go
Expand Up @@ -4,13 +4,14 @@ import (
"fmt"
"testing"

"github.com/hashicorp/terraform/helper/acctest"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"

"github.com/hashicorp/terraform/helper/resource"
)

func TestAccAzureRMDataSourceLoadBalancerBackEndAddressPool_basic(t *testing.T) {
dataSourceName := "data.azurerm_lb_backend_address_pool.test"
ri := acctest.RandInt()
ri := tf.AccRandTimeInt()
location := testLocation()
addressPoolName := fmt.Sprintf("%d-address-pool", ri)

Expand Down
5 changes: 3 additions & 2 deletions azurerm/data_source_loadbalancer_test.go
Expand Up @@ -4,13 +4,14 @@ import (
"fmt"
"testing"

"github.com/hashicorp/terraform/helper/acctest"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"

"github.com/hashicorp/terraform/helper/resource"
)

func TestAccAzureRMDataSourceLoadBalancer_basic(t *testing.T) {
dataSourceName := "data.azurerm_lb.test"
ri := acctest.RandInt()
ri := tf.AccRandTimeInt()
location := testLocation()

resource.ParallelTest(t, resource.TestCase{
Expand Down
2 changes: 1 addition & 1 deletion azurerm/resource_arm_cognitive_account_test.go
Expand Up @@ -40,7 +40,7 @@ func TestAccAzureRMCognitiveAccount_basic(t *testing.T) {

func TestAccAzureRMCognitiveAccount_speechServices(t *testing.T) {
resourceName := "azurerm_cognitive_account.test"
ri := acctest.RandInt()
ri := tf.AccRandTimeInt()
config := testAccAzureRMCognitiveAccount_speechServices(ri, testLocation())

resource.ParallelTest(t, resource.TestCase{
Expand Down
27 changes: 20 additions & 7 deletions azurerm/resource_arm_data_lake_store_file.go
Expand Up @@ -3,6 +3,7 @@ package azurerm
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"log"
"net/url"
Expand Down Expand Up @@ -53,6 +54,7 @@ func resourceArmDataLakeStoreFile() *schema.Resource {
func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataLakeStoreFilesClient
ctx := meta.(*ArmClient).StopContext
chunkSize := 4 * 1024 * 1024

log.Printf("[INFO] preparing arguments for Date Lake Store File creation.")

Expand Down Expand Up @@ -82,15 +84,26 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{}
}
defer utils.IoCloseAndLogError(file, fmt.Sprintf("Error closing Data Lake Store File %q", localFilePath))

// Read the file contents
fileContents, err := ioutil.ReadAll(file)
if err != nil {
return err
if _, err = client.Create(ctx, accountName, remoteFilePath, nil, nil, filesystem.DATA, nil, nil); err != nil {
return fmt.Errorf("Error issuing create request for Data Lake Store File %q : %+v", remoteFilePath, err)
}

_, err = client.Create(ctx, accountName, remoteFilePath, ioutil.NopCloser(bytes.NewReader(fileContents)), utils.Bool(false), filesystem.CLOSE, nil, nil)
if err != nil {
return fmt.Errorf("Error issuing create request for Data Lake Store File %q : %+v", remoteFilePath, err)
buffer := make([]byte, chunkSize)
for {
n, err := file.Read(buffer)
if err == io.EOF {
break
}
flag := filesystem.DATA
if n < chunkSize {
// last chunk
flag = filesystem.CLOSE
}
chunk := ioutil.NopCloser(bytes.NewReader(buffer[:n]))

if _, err = client.Append(ctx, accountName, remoteFilePath, chunk, nil, flag, nil, nil); err != nil {
return fmt.Errorf("Error transferring chunk for Data Lake Store File %q : %+v", remoteFilePath, err)
}
}

d.SetId(id)
Expand Down
69 changes: 69 additions & 0 deletions azurerm/resource_arm_data_lake_store_file_test.go
Expand Up @@ -2,7 +2,10 @@ package azurerm

import (
"fmt"
"io/ioutil"
"math/rand"
"net/http"
"os"
"testing"

"github.com/hashicorp/terraform/helper/acctest"
Expand Down Expand Up @@ -39,6 +42,50 @@ func TestAccAzureRMDataLakeStoreFile_basic(t *testing.T) {
})
}

func TestAccAzureRMDataLakeStoreFile_largefiles(t *testing.T) {
resourceName := "azurerm_data_lake_store_file.test"
ri := tf.AccRandTimeInt()
rs := acctest.RandString(4)

//"large" in this context is anything greater than 4 megabytes
largeSize := 12 * 1024 * 1024 //12 mb
data := make([]byte, largeSize)
rand.Read(data) //fill with random data

tmpfile, err := ioutil.TempFile("", "azurerm-acc-datalake-file-large")
if err != nil {
t.Errorf("Unable to open a temporary file.")
}
defer os.Remove(tmpfile.Name())

if _, err := tmpfile.Write(data); err != nil {
t.Errorf("Unable to write to temporary file %q: %v", tmpfile.Name(), err)
}
if err := tmpfile.Close(); err != nil {
t.Errorf("Unable to close temporary file %q: %v", tmpfile.Name(), err)
}

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testCheckAzureRMDataLakeStoreFileDestroy,
Steps: []resource.TestStep{
{
Config: testAccAzureRMDataLakeStoreFile_largefiles(ri, rs, testLocation(), tmpfile.Name()),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataLakeStoreFileExists(resourceName),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"local_file_path"},
},
},
})
}

func TestAccAzureRMDataLakeStoreFile_requiresimport(t *testing.T) {
if !requireResourcesToBeImported {
t.Skip("Skipping since resources aren't required to be imported")
Expand Down Expand Up @@ -146,6 +193,28 @@ resource "azurerm_data_lake_store_file" "test" {
`, rInt, location, rString, location)
}

func testAccAzureRMDataLakeStoreFile_largefiles(rInt int, rString, location, file string) string {
return fmt.Sprintf(`
resource "azurerm_resource_group" "test" {
name = "acctestRG-%d"
location = "%s"
}
resource "azurerm_data_lake_store" "test" {
name = "unlikely23exst2acct%s"
resource_group_name = "${azurerm_resource_group.test.name}"
location = "%s"
firewall_state = "Disabled"
}
resource "azurerm_data_lake_store_file" "test" {
remote_file_path = "/test/testAccAzureRMDataLakeStoreFile_largefiles.bin"
account_name = "${azurerm_data_lake_store.test.name}"
local_file_path = "%s"
}
`, rInt, location, rString, location, file)
}

func testAccAzureRMDataLakeStoreFile_requiresImport(rInt int, rString, location string) string {
template := testAccAzureRMDataLakeStoreFile_basic(rInt, rString, location)
return fmt.Sprintf(`
Expand Down
2 changes: 1 addition & 1 deletion azurerm/resource_arm_kubernetes_cluster_test.go
Expand Up @@ -311,7 +311,7 @@ func TestAccAzureRMKubernetesCluster_internalNetwork(t *testing.T) {

func TestAccAzureRMKubernetesCluster_addonProfileAciConnectorLinux(t *testing.T) {
resourceName := "azurerm_kubernetes_cluster.test"
ri := acctest.RandInt()
ri := tf.AccRandTimeInt()
clientId := os.Getenv("ARM_CLIENT_ID")
clientSecret := os.Getenv("ARM_CLIENT_SECRET")
config := testAccAzureRMKubernetesCluster_addonProfileAciConnectorLinux(ri, clientId, clientSecret, testLocation())
Expand Down

0 comments on commit 139aa76

Please sign in to comment.