Skip to content

Commit

Permalink
azurerm_data_factory_dataset_parquet - Support `azure_blob_fs_locat…
Browse files Browse the repository at this point in the history
…ion` (#23261)

* `data_factory_dataset_parquet` support `azure_blob_fs_location`

* support read azure_blob_fs_location

* add acc test

* ExactlyOneOf and doc

* also update azure_blob_fs_location for dataset_delimited_text
  • Loading branch information
bianyifan committed Sep 20, 2023
1 parent 856d283 commit 7e2a425
Show file tree
Hide file tree
Showing 7 changed files with 348 additions and 42 deletions.
42 changes: 16 additions & 26 deletions internal/services/datafactory/data_factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -424,18 +424,14 @@ func expandDataFactoryDatasetAzureBlobFSLocation(d *pluginsdk.ResourceData) data

props := azureBlobFsLocations[0].(map[string]interface{})

blobStorageLocation := datafactory.AzureBlobFSLocation{
FileSystem: props["file_system"].(string),
blobFSLocation := datafactory.AzureBlobFSLocation{
Type: datafactory.TypeBasicDatasetLocationTypeAzureBlobFSLocation,
}
if path := props["path"].(string); len(path) > 0 {
blobStorageLocation.FolderPath = path
}
if filename := props["filename"].(string); len(filename) > 0 {
blobStorageLocation.FileName = filename
FileSystem: expandDataFactoryExpressionResultType(props["file_system"].(string), props["dynamic_file_system_enabled"].(bool)),
FolderPath: expandDataFactoryExpressionResultType(props["path"].(string), props["dynamic_path_enabled"].(bool)),
FileName: expandDataFactoryExpressionResultType(props["filename"].(string), props["dynamic_filename_enabled"].(bool)),
}

return blobStorageLocation
return blobFSLocation
}

func flattenDataFactoryDatasetHTTPServerLocation(input *datafactory.HTTPServerLocation) []interface{} {
Expand Down Expand Up @@ -490,31 +486,25 @@ func flattenDataFactoryDatasetAzureBlobFSLocation(input *datafactory.AzureBlobFS
if input == nil {
return []interface{}{}
}
result := make(map[string]interface{})

fileSystem, path, fileName := "", "", ""
if input.FileSystem != nil {
if v, ok := input.FileSystem.(string); ok {
fileSystem = v
}
fileSystem, dynamicFileSystemEnabled := flattenDataFactoryExpressionResultType(input.FileSystem)
result["file_system"] = fileSystem
result["dynamic_file_system_enabled"] = dynamicFileSystemEnabled
}
if input.FolderPath != nil {
if v, ok := input.FolderPath.(string); ok {
path = v
}
path, dynamicPathEnabled := flattenDataFactoryExpressionResultType(input.FolderPath)
result["path"] = path
result["dynamic_path_enabled"] = dynamicPathEnabled
}
if input.FileName != nil {
if v, ok := input.FileName.(string); ok {
fileName = v
}
filename, dynamicFilenameEnabled := flattenDataFactoryExpressionResultType(input.FileName)
result["filename"] = filename
result["dynamic_filename_enabled"] = dynamicFilenameEnabled
}

return []interface{}{
map[string]interface{}{
"file_system": fileSystem,
"path": path,
"filename": fileName,
},
}
return []interface{}{result}
}

func flattenDataFactoryDatasetSFTPLocation(input *datafactory.SftpLocation) []interface{} {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,19 +147,34 @@ func resourceDataFactoryDatasetDelimitedText() *pluginsdk.Resource {
Schema: map[string]*pluginsdk.Schema{
"file_system": {
Type: pluginsdk.TypeString,
Required: true,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"dynamic_file_system_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
"path": {
Type: pluginsdk.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"dynamic_path_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
"filename": {
Type: pluginsdk.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"dynamic_filename_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
},
},
},
Expand Down Expand Up @@ -344,7 +359,7 @@ func resourceDataFactoryDatasetDelimitedTextCreateUpdate(d *pluginsdk.ResourceDa

location := expandDataFactoryDatasetLocation(d)
if location == nil {
return fmt.Errorf("one of `http_server_location`, `azure_blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset")
return fmt.Errorf("one of `http_server_location`, `azure_blob_fs_location`, `azure_blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset")
}

delimited_textDatasetProperties := datafactory.DelimitedTextDatasetTypeProperties{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,13 @@ func TestAccDataFactoryDatasetDelimitedText_blobFS(t *testing.T) {
),
},
data.ImportStep(),
{
Config: r.blobFSDynamicPath(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
}

Expand Down Expand Up @@ -623,6 +630,78 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" {
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
func (DatasetDelimitedTextResource) blobFSDynamicPath(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}
resource "azurerm_resource_group" "test" {
name = "acctestRG-df-%d"
location = "%s"
}
resource "azurerm_storage_account" "test" {
name = "acctestsa%s"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
account_kind = "BlobStorage"
account_tier = "Standard"
account_replication_type = "LRS"
is_hns_enabled = true
allow_nested_items_to_be_public = true
}
resource "azurerm_storage_data_lake_gen2_filesystem" "test" {
name = "acctest-datalake-%d"
storage_account_id = azurerm_storage_account.test.id
}
resource "azurerm_data_factory" "test" {
name = "acctestdf%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
identity {
type = "SystemAssigned"
}
}
resource "azurerm_role_assignment" "test" {
scope = azurerm_storage_account.test.id
role_definition_name = "Storage Blob Data Owner"
principal_id = azurerm_data_factory.test.identity.0.principal_id
}
resource "azurerm_data_factory_linked_service_data_lake_storage_gen2" "test" {
name = "acctestDataLakeStorage%d"
data_factory_id = azurerm_data_factory.test.id
use_managed_identity = true
url = azurerm_storage_account.test.primary_dfs_endpoint
}
resource "azurerm_data_factory_dataset_delimited_text" "test" {
name = "acctestds%d"
data_factory_id = azurerm_data_factory.test.id
linked_service_name = azurerm_data_factory_linked_service_data_lake_storage_gen2.test.name
azure_blob_fs_location {
file_system = azurerm_storage_data_lake_gen2_filesystem.test.name
dynamic_file_system_enabled = true
path = "@concat('foo/bar/',formatDateTime(convertTimeZone(utcnow(),'UTC','W. Europe Standard Time'),'yyyy-MM-dd'))"
dynamic_path_enabled = true
}
column_delimiter = ","
row_delimiter = "NEW"
encoding = "UTF-8"
quote_character = "x"
escape_character = "f"
first_row_as_header = true
null_value = "NULL"
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetDelimitedTextResource) blobDynamicContainer(data acceptance.TestData) string {
return fmt.Sprintf(`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,12 +76,54 @@ func resourceDataFactoryDatasetParquet() *pluginsdk.Resource {
},
},

// Parquet Specific Field, one option for 'location'
"azure_blob_fs_location": {
Type: pluginsdk.TypeList,
MaxItems: 1,
Optional: true,
ExactlyOneOf: []string{"azure_blob_fs_location", "azure_blob_storage_location", "http_server_location"},
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"file_system": {
Type: pluginsdk.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"dynamic_file_system_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
"path": {
Type: pluginsdk.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"dynamic_path_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
"filename": {
Type: pluginsdk.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"dynamic_filename_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
},
},
},

// Parquet Specific Field, one option for 'location'
"azure_blob_storage_location": {
Type: pluginsdk.TypeList,
MaxItems: 1,
Optional: true,
ConflictsWith: []string{"http_server_location"},
Type: pluginsdk.TypeList,
MaxItems: 1,
Optional: true,
ExactlyOneOf: []string{"azure_blob_fs_location", "azure_blob_storage_location", "http_server_location"},
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"container": {
Expand Down Expand Up @@ -156,10 +198,10 @@ func resourceDataFactoryDatasetParquet() *pluginsdk.Resource {

// Parquet Specific Field, one option for 'location'
"http_server_location": {
Type: pluginsdk.TypeList,
MaxItems: 1,
Optional: true,
ConflictsWith: []string{"azure_blob_storage_location"},
Type: pluginsdk.TypeList,
MaxItems: 1,
Optional: true,
ExactlyOneOf: []string{"azure_blob_fs_location", "azure_blob_storage_location", "http_server_location"},
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"relative_url": {
Expand Down Expand Up @@ -270,7 +312,7 @@ func resourceDataFactoryDatasetParquetCreateUpdate(d *pluginsdk.ResourceData, me

location := expandDataFactoryDatasetLocation(d)
if location == nil {
return fmt.Errorf("One of `http_server_location`, `azure_blob_storage_location` must be specified to create a DataFactory Parquet Dataset")
return fmt.Errorf("One of `http_server_location`, `azure_blob_fs_location`, `azure_blob_storage_location` must be specified to create a DataFactory Parquet Dataset")
}

parquetDatasetProperties := datafactory.ParquetDatasetTypeProperties{
Expand Down Expand Up @@ -394,6 +436,11 @@ func resourceDataFactoryDatasetParquetRead(d *pluginsdk.ResourceData, meta inter
return fmt.Errorf("setting `azure_blob_storage_location` for Data Factory Parquet Dataset %s", err)
}
}
if azureBlobFSLocation, ok := properties.Location.AsAzureBlobFSLocation(); ok {
if err := d.Set("azure_blob_fs_location", flattenDataFactoryDatasetAzureBlobFSLocation(azureBlobFSLocation)); err != nil {
return fmt.Errorf("setting `azure_blob_fs_location` for Data Factory Parquet Dataset %s", err)
}
}

compressionCodec, ok := properties.CompressionCodec.(string)
if !ok {
Expand Down

0 comments on commit 7e2a425

Please sign in to comment.