diff --git a/.changelog/6680.txt b/.changelog/6680.txt
new file mode 100644
index 00000000000..72ca889a605
--- /dev/null
+++ b/.changelog/6680.txt
@@ -0,0 +1,3 @@
+```release-note:enhancement
+bigquery: supported authorized routines on resource `bigquery_dataset` and `bigquery_dataset_access`
+```
diff --git a/google/iam_bigquery_dataset.go b/google/iam_bigquery_dataset.go
index 2ad13d7ddba..e3375d9e9c6 100644
--- a/google/iam_bigquery_dataset.go
+++ b/google/iam_bigquery_dataset.go
@@ -242,6 +242,10 @@ func accessToIamMember(access map[string]interface{}) (string, error) {
// dataset does not map to an IAM member, use access instead
return "", fmt.Errorf("Failed to convert BigQuery Dataset access to IAM member. To use views with a dataset, please use dataset_access")
}
+ if _, ok := access["routine"]; ok {
+ // dataset does not map to an IAM member, use access instead
+ return "", fmt.Errorf("Failed to convert BigQuery Dataset access to IAM member. To use views with a dataset, please use dataset_access")
+ }
if member, ok := access["userByEmail"]; ok {
// service accounts have "gservice" in their email. This is best guess due to lost information
if strings.Contains(member.(string), "gserviceaccount") {
diff --git a/google/resource_bigquery_dataset.go b/google/resource_bigquery_dataset.go
index d1912477c1b..ed8b3107f70 100644
--- a/google/resource_bigquery_dataset.go
+++ b/google/resource_bigquery_dataset.go
@@ -286,6 +286,37 @@ are supported. Predefined roles that have equivalent basic roles
are swapped by the API to their basic counterparts. See
[official docs](https://cloud.google.com/bigquery/docs/access-control).`,
},
+ "routine": {
+ Type: schema.TypeList,
+ Optional: true,
+ Description: `A routine from a different dataset to grant access to. Queries
+executed against that routine will have read access to tables in
+this dataset. The role field is not required when this field is
+set. If that routine is updated by any user, access to the routine
+needs to be granted again via an update operation.`,
+ MaxItems: 1,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "dataset_id": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The ID of the dataset containing this table.`,
+ },
+ "project_id": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The ID of the project containing this table.`,
+ },
+ "routine_id": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The ID of the routine. The ID must contain only letters (a-z,
+A-Z), numbers (0-9), or underscores (_). The maximum length
+is 256 characters.`,
+ },
+ },
+ },
+ },
"special_group": {
Type: schema.TypeString,
Optional: true,
@@ -733,6 +764,7 @@ func flattenBigQueryDatasetAccess(v interface{}, d *schema.ResourceData, config
"user_by_email": flattenBigQueryDatasetAccessUserByEmail(original["userByEmail"], d, config),
"view": flattenBigQueryDatasetAccessView(original["view"], d, config),
"dataset": flattenBigQueryDatasetAccessDataset(original["dataset"], d, config),
+ "routine": flattenBigQueryDatasetAccessRoutine(original["routine"], d, config),
})
}
return transformed
@@ -828,6 +860,35 @@ func flattenBigQueryDatasetAccessDatasetTargetTypes(v interface{}, d *schema.Res
return v
}
+func flattenBigQueryDatasetAccessRoutine(v interface{}, d *schema.ResourceData, config *Config) interface{} {
+ if v == nil {
+ return nil
+ }
+ original := v.(map[string]interface{})
+ if len(original) == 0 {
+ return nil
+ }
+ transformed := make(map[string]interface{})
+ transformed["dataset_id"] =
+ flattenBigQueryDatasetAccessRoutineDatasetId(original["datasetId"], d, config)
+ transformed["project_id"] =
+ flattenBigQueryDatasetAccessRoutineProjectId(original["projectId"], d, config)
+ transformed["routine_id"] =
+ flattenBigQueryDatasetAccessRoutineRoutineId(original["routineId"], d, config)
+ return []interface{}{transformed}
+}
+func flattenBigQueryDatasetAccessRoutineDatasetId(v interface{}, d *schema.ResourceData, config *Config) interface{} {
+ return v
+}
+
+func flattenBigQueryDatasetAccessRoutineProjectId(v interface{}, d *schema.ResourceData, config *Config) interface{} {
+ return v
+}
+
+func flattenBigQueryDatasetAccessRoutineRoutineId(v interface{}, d *schema.ResourceData, config *Config) interface{} {
+ return v
+}
+
func flattenBigQueryDatasetCreationTime(v interface{}, d *schema.ResourceData, config *Config) interface{} {
// Handles the string fixed64 format
if strVal, ok := v.(string); ok {
@@ -1020,6 +1081,13 @@ func expandBigQueryDatasetAccess(v interface{}, d TerraformResourceData, config
transformed["dataset"] = transformedDataset
}
+ transformedRoutine, err := expandBigQueryDatasetAccessRoutine(original["routine"], d, config)
+ if err != nil {
+ return nil, err
+ } else if val := reflect.ValueOf(transformedRoutine); val.IsValid() && !isEmptyValue(val) {
+ transformed["routine"] = transformedRoutine
+ }
+
req = append(req, transformed)
}
return req, nil
@@ -1154,6 +1222,51 @@ func expandBigQueryDatasetAccessDatasetTargetTypes(v interface{}, d TerraformRes
return v, nil
}
+func expandBigQueryDatasetAccessRoutine(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
+ l := v.([]interface{})
+ if len(l) == 0 || l[0] == nil {
+ return nil, nil
+ }
+ raw := l[0]
+ original := raw.(map[string]interface{})
+ transformed := make(map[string]interface{})
+
+ transformedDatasetId, err := expandBigQueryDatasetAccessRoutineDatasetId(original["dataset_id"], d, config)
+ if err != nil {
+ return nil, err
+ } else if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) {
+ transformed["datasetId"] = transformedDatasetId
+ }
+
+ transformedProjectId, err := expandBigQueryDatasetAccessRoutineProjectId(original["project_id"], d, config)
+ if err != nil {
+ return nil, err
+ } else if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) {
+ transformed["projectId"] = transformedProjectId
+ }
+
+ transformedRoutineId, err := expandBigQueryDatasetAccessRoutineRoutineId(original["routine_id"], d, config)
+ if err != nil {
+ return nil, err
+ } else if val := reflect.ValueOf(transformedRoutineId); val.IsValid() && !isEmptyValue(val) {
+ transformed["routineId"] = transformedRoutineId
+ }
+
+ return transformed, nil
+}
+
+func expandBigQueryDatasetAccessRoutineDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
+ return v, nil
+}
+
+func expandBigQueryDatasetAccessRoutineProjectId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
+ return v, nil
+}
+
+func expandBigQueryDatasetAccessRoutineRoutineId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
+ return v, nil
+}
+
func expandBigQueryDatasetDatasetReference(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
transformed := make(map[string]interface{})
transformedDatasetId, err := expandBigQueryDatasetDatasetReferenceDatasetId(d.Get("dataset_id"), d, config)
diff --git a/google/resource_bigquery_dataset_access.go b/google/resource_bigquery_dataset_access.go
index df9c610c6e1..cd156d3903d 100644
--- a/google/resource_bigquery_dataset_access.go
+++ b/google/resource_bigquery_dataset_access.go
@@ -202,7 +202,7 @@ but additional target types may be added in the future. Possible values: VIEWS`,
},
},
},
- ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"},
+ ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"},
},
"domain": {
Type: schema.TypeString,
@@ -211,7 +211,7 @@ but additional target types may be added in the future. Possible values: VIEWS`,
DiffSuppressFunc: resourceBigQueryDatasetAccessIamMemberDiffSuppress,
Description: `A domain to grant access to. Any users signed in with the
domain specified will be granted the specified access`,
- ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"},
+ ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"},
},
"group_by_email": {
Type: schema.TypeString,
@@ -219,7 +219,7 @@ domain specified will be granted the specified access`,
ForceNew: true,
DiffSuppressFunc: resourceBigQueryDatasetAccessIamMemberDiffSuppress,
Description: `An email address of a Google Group to grant access to.`,
- ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"},
+ ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"},
},
"iam_member": {
Type: schema.TypeString,
@@ -228,7 +228,7 @@ domain specified will be granted the specified access`,
DiffSuppressFunc: resourceBigQueryDatasetAccessIamMemberDiffSuppress,
Description: `Some other type of member that appears in the IAM Policy but isn't a user,
group, domain, or special group. For example: 'allUsers'`,
- ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"},
+ ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"},
},
"role": {
Type: schema.TypeString,
@@ -242,6 +242,42 @@ swapped by the API to their basic counterparts, and will show a diff
post-create. See
[official docs](https://cloud.google.com/bigquery/docs/access-control).`,
},
+ "routine": {
+ Type: schema.TypeList,
+ Optional: true,
+ ForceNew: true,
+ Description: `A routine from a different dataset to grant access to. Queries
+executed against that routine will have read access to tables in
+this dataset. The role field is not required when this field is
+set. If that routine is updated by any user, access to the routine
+needs to be granted again via an update operation.`,
+ MaxItems: 1,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "dataset_id": {
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ Description: `The ID of the dataset containing this table.`,
+ },
+ "project_id": {
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ Description: `The ID of the project containing this table.`,
+ },
+ "routine_id": {
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ Description: `The ID of the routine. The ID must contain only letters (a-z,
+A-Z), numbers (0-9), or underscores (_). The maximum length
+is 256 characters.`,
+ },
+ },
+ },
+ ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"},
+ },
"special_group": {
Type: schema.TypeString,
Optional: true,
@@ -260,7 +296,7 @@ post-create. See
* 'allAuthenticatedUsers': All authenticated BigQuery users.`,
- ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"},
+ ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"},
},
"user_by_email": {
Type: schema.TypeString,
@@ -269,7 +305,7 @@ post-create. See
DiffSuppressFunc: resourceBigQueryDatasetAccessIamMemberDiffSuppress,
Description: `An email address of a user to grant access to. For example:
fred@example.com`,
- ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"},
+ ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"},
},
"view": {
Type: schema.TypeList,
@@ -305,7 +341,7 @@ is 1,024 characters.`,
},
},
},
- ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"},
+ ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"},
},
"api_updated_member": {
Type: schema.TypeBool,
@@ -385,6 +421,12 @@ func resourceBigQueryDatasetAccessCreate(d *schema.ResourceData, meta interface{
} else if v, ok := d.GetOkExists("dataset"); !isEmptyValue(reflect.ValueOf(datasetProp)) && (ok || !reflect.DeepEqual(v, datasetProp)) {
obj["dataset"] = datasetProp
}
+ routineProp, err := expandNestedBigQueryDatasetAccessRoutine(d.Get("routine"), d, config)
+ if err != nil {
+ return err
+ } else if v, ok := d.GetOkExists("routine"); !isEmptyValue(reflect.ValueOf(routineProp)) && (ok || !reflect.DeepEqual(v, routineProp)) {
+ obj["routine"] = routineProp
+ }
lockName, err := replaceVars(d, config, "{{dataset_id}}")
if err != nil {
@@ -532,6 +574,9 @@ func resourceBigQueryDatasetAccessRead(d *schema.ResourceData, meta interface{})
if err := d.Set("dataset", flattenNestedBigQueryDatasetAccessDataset(res["dataset"], d, config)); err != nil {
return fmt.Errorf("Error reading DatasetAccess: %s", err)
}
+ if err := d.Set("routine", flattenNestedBigQueryDatasetAccessRoutine(res["routine"], d, config)); err != nil {
+ return fmt.Errorf("Error reading DatasetAccess: %s", err)
+ }
return nil
}
@@ -680,6 +725,35 @@ func flattenNestedBigQueryDatasetAccessDatasetTargetTypes(v interface{}, d *sche
return v
}
+func flattenNestedBigQueryDatasetAccessRoutine(v interface{}, d *schema.ResourceData, config *Config) interface{} {
+ if v == nil {
+ return nil
+ }
+ original := v.(map[string]interface{})
+ if len(original) == 0 {
+ return nil
+ }
+ transformed := make(map[string]interface{})
+ transformed["dataset_id"] =
+ flattenNestedBigQueryDatasetAccessRoutineDatasetId(original["datasetId"], d, config)
+ transformed["project_id"] =
+ flattenNestedBigQueryDatasetAccessRoutineProjectId(original["projectId"], d, config)
+ transformed["routine_id"] =
+ flattenNestedBigQueryDatasetAccessRoutineRoutineId(original["routineId"], d, config)
+ return []interface{}{transformed}
+}
+func flattenNestedBigQueryDatasetAccessRoutineDatasetId(v interface{}, d *schema.ResourceData, config *Config) interface{} {
+ return v
+}
+
+func flattenNestedBigQueryDatasetAccessRoutineProjectId(v interface{}, d *schema.ResourceData, config *Config) interface{} {
+ return v
+}
+
+func flattenNestedBigQueryDatasetAccessRoutineRoutineId(v interface{}, d *schema.ResourceData, config *Config) interface{} {
+ return v
+}
+
func expandNestedBigQueryDatasetAccessDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
@@ -824,6 +898,51 @@ func expandNestedBigQueryDatasetAccessDatasetTargetTypes(v interface{}, d Terraf
return v, nil
}
+func expandNestedBigQueryDatasetAccessRoutine(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
+ l := v.([]interface{})
+ if len(l) == 0 || l[0] == nil {
+ return nil, nil
+ }
+ raw := l[0]
+ original := raw.(map[string]interface{})
+ transformed := make(map[string]interface{})
+
+ transformedDatasetId, err := expandNestedBigQueryDatasetAccessRoutineDatasetId(original["dataset_id"], d, config)
+ if err != nil {
+ return nil, err
+ } else if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) {
+ transformed["datasetId"] = transformedDatasetId
+ }
+
+ transformedProjectId, err := expandNestedBigQueryDatasetAccessRoutineProjectId(original["project_id"], d, config)
+ if err != nil {
+ return nil, err
+ } else if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) {
+ transformed["projectId"] = transformedProjectId
+ }
+
+ transformedRoutineId, err := expandNestedBigQueryDatasetAccessRoutineRoutineId(original["routine_id"], d, config)
+ if err != nil {
+ return nil, err
+ } else if val := reflect.ValueOf(transformedRoutineId); val.IsValid() && !isEmptyValue(val) {
+ transformed["routineId"] = transformedRoutineId
+ }
+
+ return transformed, nil
+}
+
+func expandNestedBigQueryDatasetAccessRoutineDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
+ return v, nil
+}
+
+func expandNestedBigQueryDatasetAccessRoutineProjectId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
+ return v, nil
+}
+
+func expandNestedBigQueryDatasetAccessRoutineRoutineId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
+ return v, nil
+}
+
func flattenNestedBigQueryDatasetAccess(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) {
var v interface{}
var ok bool
@@ -891,6 +1010,11 @@ func resourceBigQueryDatasetAccessFindNestedObjectInList(d *schema.ResourceData,
return -1, nil, err
}
expectedFlattenedDataset := flattenNestedBigQueryDatasetAccessDataset(expectedDataset, d, meta.(*Config))
+ expectedRoutine, err := expandNestedBigQueryDatasetAccessRoutine(d.Get("routine"), d, meta.(*Config))
+ if err != nil {
+ return -1, nil, err
+ }
+ expectedFlattenedRoutine := flattenNestedBigQueryDatasetAccessRoutine(expectedRoutine, d, meta.(*Config))
// Search list for this resource.
for idx, itemRaw := range items {
@@ -947,6 +1071,12 @@ func resourceBigQueryDatasetAccessFindNestedObjectInList(d *schema.ResourceData,
log.Printf("[DEBUG] Skipping item with dataset= %#v, looking for %#v)", itemDataset, expectedFlattenedDataset)
continue
}
+ itemRoutine := flattenNestedBigQueryDatasetAccessRoutine(item["routine"], d, meta.(*Config))
+ // isEmptyValue check so that if one is nil and the other is "", that's considered a match
+ if !(isEmptyValue(reflect.ValueOf(itemRoutine)) && isEmptyValue(reflect.ValueOf(expectedFlattenedRoutine))) && !reflect.DeepEqual(itemRoutine, expectedFlattenedRoutine) {
+ log.Printf("[DEBUG] Skipping item with routine= %#v, looking for %#v)", itemRoutine, expectedFlattenedRoutine)
+ continue
+ }
log.Printf("[DEBUG] Found item for resource %q: %#v)", d.Id(), item)
return idx, item, nil
}
diff --git a/google/resource_bigquery_dataset_access_test.go b/google/resource_bigquery_dataset_access_test.go
index 0aa0010cb2a..bb1b5a8b836 100644
--- a/google/resource_bigquery_dataset_access_test.go
+++ b/google/resource_bigquery_dataset_access_test.go
@@ -100,6 +100,42 @@ func TestAccBigQueryDatasetAccess_authorizedDataset(t *testing.T) {
})
}
+func TestAccBigQueryDatasetAccess_authorizedRoutine(t *testing.T) {
+ // Multiple fine-grained resources
+ skipIfVcr(t)
+ t.Parallel()
+
+ context := map[string]interface{}{
+ "public_dataset": fmt.Sprintf("tf_test_public_dataset_%s", randString(t, 10)),
+ "public_routine": fmt.Sprintf("tf_test_public_routine_%s", randString(t, 10)),
+ "private_dataset": fmt.Sprintf("tf_test_private_dataset_%s", randString(t, 10)),
+ }
+
+ expected := map[string]interface{}{
+ "routine": map[string]interface{}{
+ "projectId": getTestProjectFromEnv(),
+ "datasetId": context["public_dataset"],
+ "routineId": context["public_routine"],
+ },
+ }
+
+ vcrTest(t, resource.TestCase{
+ PreCheck: func() { testAccPreCheck(t) },
+ Providers: testAccProviders,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccBigQueryDatasetAccess_authorizedRoutine(context),
+ Check: testAccCheckBigQueryDatasetAccessPresent(t, "google_bigquery_dataset.private", expected),
+ },
+ {
+ // Destroy step instead of CheckDestroy so we can check the access is removed without deleting the dataset
+ Config: testAccBigQueryDatasetAccess_destroy(context["private_dataset"].(string), "private"),
+ Check: testAccCheckBigQueryDatasetAccessAbsent(t, "google_bigquery_dataset.private", expected),
+ },
+ },
+ })
+}
+
func TestAccBigQueryDatasetAccess_multiple(t *testing.T) {
// Multiple fine-grained resources
skipIfVcr(t)
@@ -358,6 +394,47 @@ resource "google_bigquery_dataset" "public" {
`, datasetID, datasetID2)
}
+func testAccBigQueryDatasetAccess_authorizedRoutine(context map[string]interface{}) string {
+ return Nprintf(`
+resource "google_bigquery_dataset" "public" {
+ dataset_id = "%{public_dataset}"
+ description = "This dataset is public"
+}
+
+resource "google_bigquery_routine" "public" {
+ dataset_id = google_bigquery_dataset.public.dataset_id
+ routine_id = "%{public_routine}"
+ routine_type = "TABLE_VALUED_FUNCTION"
+ language = "SQL"
+ definition_body = <<-EOS
+ SELECT 1 + value AS value
+ EOS
+ arguments {
+ name = "value"
+ argument_kind = "FIXED_TYPE"
+ data_type = jsonencode({ "typeKind" = "INT64" })
+ }
+ return_table_type = jsonencode({ "columns" = [
+ { "name" = "value", "type" = { "typeKind" = "INT64" } },
+ ] })
+}
+
+resource "google_bigquery_dataset" "private" {
+ dataset_id = "%{private_dataset}"
+ description = "This dataset is private"
+}
+
+resource "google_bigquery_dataset_access" "authorized_routine" {
+ dataset_id = google_bigquery_dataset.private.dataset_id
+ routine {
+ project_id = google_bigquery_routine.public.project
+ dataset_id = google_bigquery_routine.public.dataset_id
+ routine_id = google_bigquery_routine.public.routine_id
+ }
+}
+`, context)
+}
+
func testAccBigQueryDatasetAccess_multiple(datasetID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset_access" "access" {
diff --git a/google/resource_bigquery_dataset_generated_test.go b/google/resource_bigquery_dataset_generated_test.go
index 8759a3b606d..776a258c5fd 100644
--- a/google/resource_bigquery_dataset_generated_test.go
+++ b/google/resource_bigquery_dataset_generated_test.go
@@ -218,6 +218,74 @@ resource "google_service_account" "bqowner" {
`, context)
}
+func TestAccBigQueryDataset_bigqueryDatasetAuthorizedRoutineExample(t *testing.T) {
+ t.Parallel()
+
+ context := map[string]interface{}{
+ "service_account": getTestServiceAccountFromEnv(t),
+ "random_suffix": randString(t, 10),
+ }
+
+ vcrTest(t, resource.TestCase{
+ PreCheck: func() { testAccPreCheck(t) },
+ Providers: testAccProviders,
+ CheckDestroy: testAccCheckBigQueryDatasetDestroyProducer(t),
+ Steps: []resource.TestStep{
+ {
+ Config: testAccBigQueryDataset_bigqueryDatasetAuthorizedRoutineExample(context),
+ },
+ {
+ ResourceName: "google_bigquery_dataset.private",
+ ImportState: true,
+ ImportStateVerify: true,
+ },
+ },
+ })
+}
+
+func testAccBigQueryDataset_bigqueryDatasetAuthorizedRoutineExample(context map[string]interface{}) string {
+ return Nprintf(`
+resource "google_bigquery_dataset" "public" {
+ dataset_id = "tf_test_public_dataset%{random_suffix}"
+ description = "This dataset is public"
+}
+
+resource "google_bigquery_routine" "public" {
+ dataset_id = google_bigquery_dataset.public.dataset_id
+ routine_id = "tf_test_public_routine%{random_suffix}"
+ routine_type = "TABLE_VALUED_FUNCTION"
+ language = "SQL"
+ definition_body = <<-EOS
+ SELECT 1 + value AS value
+ EOS
+ arguments {
+ name = "value"
+ argument_kind = "FIXED_TYPE"
+ data_type = jsonencode({ "typeKind" = "INT64" })
+ }
+ return_table_type = jsonencode({ "columns" = [
+ { "name" = "value", "type" = { "typeKind" = "INT64" } },
+ ] })
+}
+
+resource "google_bigquery_dataset" "private" {
+ dataset_id = "tf_test_private_dataset%{random_suffix}"
+ description = "This dataset is private"
+ access {
+ role = "OWNER"
+ user_by_email = "%{service_account}"
+ }
+ access {
+ routine {
+ project_id = google_bigquery_routine.public.project
+ dataset_id = google_bigquery_routine.public.dataset_id
+ routine_id = google_bigquery_routine.public.routine_id
+ }
+ }
+}
+`, context)
+}
+
func testAccCheckBigQueryDatasetDestroyProducer(t *testing.T) func(s *terraform.State) error {
return func(s *terraform.State) error {
for name, rs := range s.RootModule().Resources {
diff --git a/website/docs/r/bigquery_dataset.html.markdown b/website/docs/r/bigquery_dataset.html.markdown
index d66c83d01ba..c231bcb6097 100644
--- a/website/docs/r/bigquery_dataset.html.markdown
+++ b/website/docs/r/bigquery_dataset.html.markdown
@@ -161,6 +161,49 @@ resource "google_service_account" "bqowner" {
account_id = "bqowner"
}
```
+## Example Usage - Bigquery Dataset Authorized Routine
+
+
+```hcl
+resource "google_bigquery_dataset" "public" {
+ dataset_id = "public_dataset"
+ description = "This dataset is public"
+}
+
+resource "google_bigquery_routine" "public" {
+ dataset_id = google_bigquery_dataset.public.dataset_id
+ routine_id = "public_routine"
+ routine_type = "TABLE_VALUED_FUNCTION"
+ language = "SQL"
+ definition_body = <<-EOS
+ SELECT 1 + value AS value
+ EOS
+ arguments {
+ name = "value"
+ argument_kind = "FIXED_TYPE"
+ data_type = jsonencode({ "typeKind" = "INT64" })
+ }
+ return_table_type = jsonencode({ "columns" = [
+ { "name" = "value", "type" = { "typeKind" = "INT64" } },
+ ] })
+}
+
+resource "google_bigquery_dataset" "private" {
+ dataset_id = "private_dataset"
+ description = "This dataset is private"
+ access {
+ role = "OWNER"
+ user_by_email = "emailAddress:my@service-account.com"
+ }
+ access {
+ routine {
+ project_id = google_bigquery_routine.public.project
+ dataset_id = google_bigquery_routine.public.dataset_id
+ routine_id = google_bigquery_routine.public.routine_id
+ }
+ }
+}
+```
## Argument Reference
@@ -309,6 +352,15 @@ destroying the resource will fail if tables are present.
Grants all resources of particular types in a particular dataset read access to the current dataset.
Structure is [documented below](#nested_dataset).
+* `routine` -
+ (Optional)
+ A routine from a different dataset to grant access to. Queries
+ executed against that routine will have read access to tables in
+ this dataset. The role field is not required when this field is
+ set. If that routine is updated by any user, access to the routine
+ needs to be granted again via an update operation.
+ Structure is [documented below](#nested_routine).
+
The `view` block supports:
@@ -349,6 +401,22 @@ destroying the resource will fail if tables are present.
(Required)
The ID of the project containing this table.
+The `routine` block supports:
+
+* `dataset_id` -
+ (Required)
+ The ID of the dataset containing this table.
+
+* `project_id` -
+ (Required)
+ The ID of the project containing this table.
+
+* `routine_id` -
+ (Required)
+ The ID of the routine. The ID must contain only letters (a-z,
+ A-Z), numbers (0-9), or underscores (_). The maximum length
+ is 256 characters.
+
The `default_encryption_configuration` block supports:
* `kms_key_name` -
diff --git a/website/docs/r/bigquery_dataset_access.html.markdown b/website/docs/r/bigquery_dataset_access.html.markdown
index f14de7b273b..ac262e19aff 100644
--- a/website/docs/r/bigquery_dataset_access.html.markdown
+++ b/website/docs/r/bigquery_dataset_access.html.markdown
@@ -112,6 +112,47 @@ resource "google_bigquery_dataset" "public" {
dataset_id = "public"
}
```
+## Example Usage - Bigquery Dataset Access Authorized Routine
+
+
+```hcl
+resource "google_bigquery_dataset" "public" {
+ dataset_id = "public_dataset"
+ description = "This dataset is public"
+}
+
+resource "google_bigquery_routine" "public" {
+ dataset_id = google_bigquery_dataset.public.dataset_id
+ routine_id = "public_routine"
+ routine_type = "TABLE_VALUED_FUNCTION"
+ language = "SQL"
+ definition_body = <<-EOS
+ SELECT 1 + value AS value
+ EOS
+ arguments {
+ name = "value"
+ argument_kind = "FIXED_TYPE"
+ data_type = jsonencode({ "typeKind" = "INT64" })
+ }
+ return_table_type = jsonencode({ "columns" = [
+ { "name" = "value", "type" = { "typeKind" = "INT64" } },
+ ] })
+}
+
+resource "google_bigquery_dataset" "private" {
+ dataset_id = "private_dataset"
+ description = "This dataset is private"
+}
+
+resource "google_bigquery_dataset_access" "authorized_routine" {
+ dataset_id = google_bigquery_dataset.private.dataset_id
+ routine {
+ project_id = google_bigquery_routine.public.project
+ dataset_id = google_bigquery_routine.public.dataset_id
+ routine_id = google_bigquery_routine.public.routine_id
+ }
+}
+```
## Argument Reference
@@ -182,6 +223,15 @@ The following arguments are supported:
Grants all resources of particular types in a particular dataset read access to the current dataset.
Structure is [documented below](#nested_dataset).
+* `routine` -
+ (Optional)
+ A routine from a different dataset to grant access to. Queries
+ executed against that routine will have read access to tables in
+ this dataset. The role field is not required when this field is
+ set. If that routine is updated by any user, access to the routine
+ needs to be granted again via an update operation.
+ Structure is [documented below](#nested_routine).
+
* `project` - (Optional) The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
@@ -225,6 +275,22 @@ The following arguments are supported:
(Required)
The ID of the project containing this table.
+The `routine` block supports:
+
+* `dataset_id` -
+ (Required)
+ The ID of the dataset containing this table.
+
+* `project_id` -
+ (Required)
+ The ID of the project containing this table.
+
+* `routine_id` -
+ (Required)
+ The ID of the routine. The ID must contain only letters (a-z,
+ A-Z), numbers (0-9), or underscores (_). The maximum length
+ is 256 characters.
+
## Attributes Reference
In addition to the arguments listed above, the following computed attributes are exported: